code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def _score(cluster):
x, y = zip(*cluster)[:2]
return min(len(set(x)), len(set(y))) | score of the cluster, in this case, is the number of non-repetitive matches |
def sadd(self, name, values, *args):
with self.pipe as pipe:
values = [self.valueparse.encode(v) for v in
self._parse_values(values, args)]
return pipe.sadd(self.redis_key(name), *values) | Add the specified members to the Set.
:param name: str the name of the redis key
:param values: a list of values or a simple value.
:return: Future() |
def encode_signature(sig_r, sig_s):
if sig_s * 2 >= SECP256k1_order:
log.debug("High-S to low-S")
sig_s = SECP256k1_order - sig_s
sig_bin = '{:064x}{:064x}'.format(sig_r, sig_s).decode('hex')
assert len(sig_bin) == 64
sig_b64 = base64.b64encode(sig_bin)
return sig_b64 | Encode an ECDSA signature, with low-s |
def _ScanEncryptedVolumeNode(self, scan_context, scan_node):
if scan_node.type_indicator == definitions.TYPE_INDICATOR_APFS_CONTAINER:
container_file_entry = resolver.Resolver.OpenFileEntry(
scan_node.path_spec, resolver_context=self._resolver_context)
fsapfs_volume = container_file_entry.GetA... | Scans an encrypted volume node for supported formats.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): source scan node.
Raises:
BackEndError: if the scan node cannot be unlocked.
ValueError: if the scan context or scan node is invalid. |
def assert_not_in(first, second, msg_fmt="{msg}"):
if first in second:
msg = "{!r} is in {!r}".format(first, second)
fail(msg_fmt.format(msg=msg, first=first, second=second)) | Fail if first is in a collection second.
>>> assert_not_in("bar", [4, "foo", {}])
>>> assert_not_in("foo", [4, "foo", {}])
Traceback (most recent call last):
...
AssertionError: 'foo' is in [4, 'foo', {}]
The following msg_fmt arguments are supported:
* msg - the default error message
... |
def find_newline(source):
assert not isinstance(source, unicode)
counter = collections.defaultdict(int)
for line in source:
if line.endswith(CRLF):
counter[CRLF] += 1
elif line.endswith(CR):
counter[CR] += 1
elif line.endswith(LF):
counter[LF] += 1... | Return type of newline used in source.
Input is a list of lines. |
def _apply_sort(cursor, sort_by, sort_direction):
if sort_direction is not None and sort_direction.lower() == "desc":
sort = pymongo.DESCENDING
else:
sort = pymongo.ASCENDING
return cursor.sort(sort_by, sort) | Apply sort to a cursor.
:param cursor: The cursor to apply sort on.
:param sort_by: The field name to sort by.
:param sort_direction: The direction to sort, "asc" or "desc".
:return: |
def _encode_payload(data, headers=None):
"Wrap data in an SCGI request."
prolog = "CONTENT_LENGTH\0%d\0SCGI\x001\0" % len(data)
if headers:
prolog += _encode_headers(headers)
return _encode_netstring(prolog) + data | Wrap data in an SCGI request. |
def moist_static_energy(heights, temperature, specific_humidity):
r
return (dry_static_energy(heights, temperature)
+ mpconsts.Lv * specific_humidity.to('dimensionless')).to('kJ/kg') | r"""Calculate the moist static energy of parcels.
This function will calculate the moist static energy following
equation 3.72 in [Hobbs2006]_.
Notes
-----
.. math::\text{moist static energy} = c_{pd} * T + gz + L_v q
* :math:`T` is temperature
* :math:`z` is height
* :math:`q` is spec... |
def add(self, command):
self.add_command(command.config)
command.set_application(self)
return self | Adds a command object. |
def satellite(isochrone, kernel, stellar_mass, distance_modulus,**kwargs):
mag_1, mag_2 = isochrone.simulate(stellar_mass, distance_modulus)
lon, lat = kernel.simulate(len(mag_1))
return mag_1, mag_2, lon, lat | Wrapping the isochrone and kernel simulate functions. |
def get_translated_items(fapi, file_uri, use_cache, cache_dir=None):
items = None
cache_file = os.path.join(cache_dir, sha1(file_uri)) if use_cache else None
if use_cache and os.path.exists(cache_file):
print("Using cache file %s for translated items for: %s" % (cache_file, file_uri))
items ... | Returns the last modified from smarterling |
def reset_coords(self, names=None, drop=False, inplace=None):
inplace = _check_inplace(inplace)
if inplace and not drop:
raise ValueError('cannot reset coordinates in-place on a '
'DataArray without ``drop == True``')
if names is None:
names =... | Given names of coordinates, reset them to become variables.
Parameters
----------
names : str or list of str, optional
Name(s) of non-index coordinates in this dataset to reset into
variables. By default, all non-index coordinates are reset.
drop : bool, optional... |
def getMaxWidth(self, rows):
'Return the maximum length of any cell in column or its header.'
w = 0
if len(rows) > 0:
w = max(max(len(self.getDisplayValue(r)) for r in rows), len(self.name))+2
return max(w, len(self.name)) | Return the maximum length of any cell in column or its header. |
def _load_prefix_binding(self):
pymux = self.pymux
if self._prefix_binding:
self.custom_key_bindings.remove_binding(self._prefix_binding)
@self.custom_key_bindings.add(*self._prefix, filter=
~(HasPrefix(pymux) | has_focus(COMMAND) | has_focus(PROMPT) |
Waits... | Load the prefix key binding. |
def session(self, auth=None):
url = '{server}{auth_url}'.format(**self._options)
if isinstance(self._session.auth, tuple) or auth:
if not auth:
auth = self._session.auth
username, password = auth
authentication_data = {'username': username, 'password':... | Get a dict of the current authenticated user's session information.
:param auth: Tuple of username and password.
:type auth: Optional[Tuple[str,str]]
:rtype: User |
def find_models(self, constructor, constraints=None, *, columns=None, order_by=None,
limiting=None, table_name=None):
return self._find_models(
constructor, table_name or constructor.table_name, constraints, columns=columns,
order_by=order_by, limiting=limiting) | Specialization of DataAccess.find_all that returns models instead of cursor objects. |
def bandpass_filter_matrix( matrix,
tr=1, lowf=0.01, highf=0.1, order = 3):
from scipy.signal import butter, filtfilt
def butter_bandpass(lowcut, highcut, fs, order ):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = butter(order, [low, high], btype='band')
... | Bandpass filter the input time series image
ANTsR function: `frequencyFilterfMRI`
Arguments
---------
image: input time series image
tr: sampling time interval (inverse of sampling rate)
lowf: low frequency cutoff
highf: high frequency cutoff
order: order of the butterworth fi... |
def build_swagger12_handler(schema):
if schema:
return SwaggerHandler(
op_for_request=schema.validators_for_request,
handle_request=handle_request,
handle_response=validate_response,
) | Builds a swagger12 handler or returns None if no schema is present.
:type schema: :class:`pyramid_swagger.model.SwaggerSchema`
:rtype: :class:`SwaggerHandler` or None |
def param_help_download(self):
files = []
for vehicle in ['APMrover2', 'ArduCopter', 'ArduPlane', 'ArduSub', 'AntennaTracker']:
url = 'http://autotest.ardupilot.org/Parameters/%s/apm.pdef.xml' % vehicle
path = mp_util.dot_mavproxy("%s.xml" % vehicle)
files.append((url... | download XML files for parameters |
def total_misses(self, filename=None):
if filename is not None:
return len(self.missed_statements(filename))
total = 0
for filename in self.files():
total += len(self.missed_statements(filename))
return total | Return the total number of uncovered statements for the file
`filename`. If `filename` is not given, return the total
number of uncovered statements for all files. |
def _filter(self, text):
self.markdown.reset()
return self.markdown.convert(text) | Filter markdown. |
def load_global_catalog():
cat_dir = global_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | Return a catalog for the environment-specific Intake directory |
def format_kwargs(attrs, params):
attrs_mapping = {'cell_methods': {'YS': 'years', 'MS': 'months'},
'long_name': {'YS': 'Annual', 'MS': 'Monthly'}}
for key, val in attrs.items():
mba = {}
for k, v in params.items():
if isinstance(v, six.string_types) and v in att... | Modify attribute with argument values.
Parameters
----------
attrs : dict
Attributes to be assigned to function output. The values of the attributes in braces will be replaced the
the corresponding args values.
params : dict
A BoundArguments.arguments dictionary storing a function's a... |
def main():
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
else:
file = sys.stdin
ofile=""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[in... | NAME
vector_mean.py
DESCRIPTION
calculates vector mean of vector data
INPUT FORMAT
takes dec, inc, int from an input file
SYNTAX
vector_mean.py [command line options] [< filename]
OPTIONS
-h prints help message and quits
-f FILE, specify input file
... |
def request_middleware(api=None):
def decorator(middleware_method):
apply_to_api = hug.API(api) if api else hug.api.from_object(middleware_method)
class MiddlewareRouter(object):
__slots__ = ()
def process_request(self, request, response):
return middleware_me... | Registers a middleware function that will be called on every request |
def consolidate_args(args):
if not hasattr(args, 'hex_limit'):
return
active_plugins = {}
is_using_default_value = {}
for plugin in PluginOptions.all_plugins:
arg_name = PluginOptions._convert_flag_text_to_argument_name(
plugin.disable_flag_text,
... | There are many argument fields related to configuring plugins.
This function consolidates all of them, and saves the consolidated
information in args.plugins.
Note that we're deferring initialization of those plugins, because
plugins may have various initialization values, referenced in... |
def get_sqlite_core(connection_string, *, cursor_factory=None, edit_connection=None):
import sqlite3 as sqlite
def opener():
cn = sqlite.connect(connection_string)
if cursor_factory:
cn.row_factory = cursor_factory
if edit_connection:
edit_connection(cn)
return cn
return InjectedDataAc... | Creates a simple SQLite3 core. |
def generate_data_for_create_page(self):
if not self.can_create:
return {}
if self.create_form:
return self.create_form.to_dict()
return self.generate_simple_data_page() | Generate a custom representation of table's fields in dictionary type
if exist create form else use default representation.
:return: dict |
def read_data(filename, data_format=None):
if not os.path.exists(filename):
raise ValueError('Filename {} does not exist'.format(filename))
if not isinstance(data_format, MimeType):
data_format = get_data_format(filename)
if data_format.is_tiff_format():
return read_tiff_image(filena... | Read image data from file
This function reads input data from file. The format of the file
can be specified in ``data_format``. If not specified, the format is
guessed from the extension of the filename.
:param filename: filename to read data from
:type filename: str
:param data_format: format... |
def absolutify(url):
site_url = getattr(settings, 'SITE_URL', False)
if not site_url:
protocol = settings.PROTOCOL
hostname = settings.DOMAIN
port = settings.PORT
if (protocol, port) in (('https://', 443), ('http://', 80)):
site_url = ''.join(map(str, (protocol, hostn... | Takes a URL and prepends the SITE_URL |
def detectBlackBerry(self):
return UAgentInfo.deviceBB in self.__userAgent \
or UAgentInfo.vndRIM in self.__httpAccept | Return detection of Blackberry
Detects if the current browser is any BlackBerry.
Includes the PlayBook. |
def get_config(self):
config = {
'location': self.location,
'language': self.language,
'topic': self.topic,
}
return config | function to get current configuration |
def get_raw_token(self, header):
parts = header.split()
if len(parts) == 0:
return None
if parts[0] not in AUTH_HEADER_TYPE_BYTES:
return None
if len(parts) != 2:
raise AuthenticationFailed(
_('Authorization header must contain two spac... | Extracts an unvalidated JSON web token from the given "Authorization"
header value. |
def tensor(self, field_name, tensor_ind):
if tensor_ind == self._tensor_cache_file_num[field_name]:
return self._tensors[field_name]
filename = self.generate_tensor_filename(field_name, tensor_ind, compressed=True)
Tensor.load(filename, compressed=True,
prealloc=s... | Returns the tensor for a given field and tensor index.
Parameters
----------
field_name : str
the name of the field to load
tensor_index : int
the index of the tensor
Returns
-------
:obj:`Tensor`
the desired tensor |
def _detect_gamepads(self):
state = XinputState()
for device_number in range(4):
res = self.xinput.XInputGetState(
device_number, ctypes.byref(state))
if res == XINPUT_ERROR_SUCCESS:
device_path = (
"/dev/input/by_id/" +
... | Find gamepads. |
def render_check_and_set_platforms(self):
phase = 'prebuild_plugins'
plugin = 'check_and_set_platforms'
if not self.pt.has_plugin_conf(phase, plugin):
return
if self.user_params.koji_target.value:
self.pt.set_plugin_arg(phase, plugin, "koji_target",
... | If the check_and_set_platforms plugin is present, configure it |
def manage_mep(self, mep_json):
responses = representative_pre_import.send(sender=self,
representative_data=mep_json)
for receiver, response in responses:
if response is False:
logger.debug(
'Skipping MEP %s', mep_json['Name']['full'])
... | Import a mep as a representative from the json dict fetched from
parltrack |
def long_form_multiple_formats(jupytext_formats, metadata=None):
if not jupytext_formats:
return []
if not isinstance(jupytext_formats, list):
jupytext_formats = [fmt for fmt in jupytext_formats.split(',') if fmt]
jupytext_formats = [long_form_one_format(fmt, metadata) for fmt in jupytext_fo... | Convert a concise encoding of jupytext.formats to a list of formats, encoded as dictionaries |
def _generate_initial_model(self):
initial_parameters = [p.current_value for p in self.current_parameters]
try:
initial_model = self.specification(*initial_parameters)
except TypeError:
raise TypeError(
'Failed to build initial model. Make sure that the in... | Creates the initial model for the optimistation.
Raises
------
TypeError
Raised if the model failed to build. This could be due to
parameters being passed to the specification in the wrong
format. |
def GetNetworks(alias=None,location=None):
if alias is None: alias = clc.v1.Account.GetAlias()
if location is None: location = clc.v1.Account.GetLocation()
r = clc.v1.API.Call('post','Network/GetAccountNetworks', { 'AccountAlias': alias, 'Location': location })
if int(r['StatusCode']) == 0: return(r['Network... | Gets the list of Networks mapped to the account in the specified datacenter.
https://t3n.zendesk.com/entries/21024721-Get-Networks
:param alias: short code for a particular account. If none will use account's default alias
:param location: datacenter where group resides. If none will use account's primary dat... |
def construct_s3_location_object(location_uri, logical_id, property_name):
if isinstance(location_uri, dict):
if not location_uri.get("Bucket") or not location_uri.get("Key"):
raise InvalidResourceException(logical_id,
"'{}' requires Bucket and Key prop... | Constructs a Lambda `Code` or `Content` property, from the SAM `CodeUri` or `ContentUri` property.
This follows the current scheme for Lambda Functions and LayerVersions.
:param dict or string location_uri: s3 location dict or string
:param string logical_id: logical_id of the resource calling this functio... |
def _pys_assert_version(self, line):
if float(line.strip()) > 1.0:
msg = _("File version {version} unsupported (>1.0).").format(
version=line.strip())
raise ValueError(msg) | Asserts pys file version |
def split_result_of_axis_func_pandas(axis, num_splits, result, length_list=None):
if num_splits == 1:
return result
if length_list is not None:
length_list.insert(0, 0)
sums = np.cumsum(length_list)
if axis == 0:
return [result.iloc[sums[i] : sums[i + 1]] for i in ran... | Split the Pandas result evenly based on the provided number of splits.
Args:
axis: The axis to split across.
num_splits: The number of even splits to create.
result: The result of the computation. This should be a Pandas
DataFrame.
length_list: The list of lengths to spl... |
def load_from_file(self, path):
with open(path) as inf:
data = inf.read()
if data:
items = json.loads(data)
else:
items = {}
for item in items:
extra = dict((x, y) for x, y in item.items()
if x not i... | Load cookies from the file.
Content of file should be a JSON-serialized list of dicts. |
def pdf(self, resource_id):
self.resource_id(str(resource_id))
self._request_uri = '{}/pdf'.format(self._request_uri) | Update the request URI to get the pdf for this resource.
Args:
resource_id (integer): The group id. |
def removeAllChildrenAtIndex(self, parentIndex):
if not parentIndex.isValid():
logger.debug("No valid item selected for deletion (ignored).")
return
parentItem = self.getItem(parentIndex, None)
logger.debug("Removing children of {!r}".format(parentItem))
assert pa... | Removes all children of the item at the parentIndex.
The children's finalize method is called before removing them to give them a
chance to close their resources |
def _init_ws(n_items, comparisons, prior_inv, tau, nu):
prec = np.zeros((n_items, n_items))
xs = np.zeros(n_items)
for i, (a, b) in enumerate(comparisons):
prec[(a, a, b, b), (a, b, a, b)] += tau[i] * MAT_ONE_FLAT
xs[a] += nu[i]
xs[b] -= nu[i]
cov = inv_posdef(prior_inv + prec)
... | Initialize parameters in the weight space. |
def sign(self, h):
if not self.is_private():
raise RuntimeError("Key must be private to be able to sign")
val = from_bytes_32(h)
r, s = self._generator.sign(self.secret_exponent(), val)
return sigencode_der(r, s) | Return a der-encoded signature for a hash h.
Will throw a RuntimeError if this key is not a private key |
def store_checksums(dataset_name, sizes_checksums):
path = _get_path(dataset_name)
original_data = _get_sizes_checksums(path)
new_data = original_data.copy()
new_data.update(sizes_checksums)
if original_data == new_data:
return
with tf.io.gfile.GFile(path, 'w') as f:
for url, (size, checksum) in sor... | Store given checksums and sizes for specific dataset.
Content of file is never disgarded, only updated. This is to ensure that if
process is killed right after first download finishes, checksums registered
during previous runs aren't lost.
It is the responsibility of the caller not to call function multiple t... |
def prepare_release(ver=None):
write_changelog(True)
if ver is None:
ver = next_release()
print('saving updates to ChangeLog')
run('git commit ChangeLog -m "[RELEASE] Update to version v{}"'.format(ver), hide=True)
sha = run('git log -1 --pretty=format:"%h"', hide=True).stdout
run('git t... | Prepare release artifacts |
def pop_key(self, arg, key, *args, **kwargs):
return self.unfinished_arguments[arg].pop(key, *args, **kwargs) | Delete a previously defined key for the `add_argument` |
def dump(self, output, close_after_write=True):
try:
output.write
self.stream = output
except AttributeError:
self.stream = io.open(output, "w", encoding="utf-8")
try:
self.write_table()
finally:
if close_after_write:
... | Write data to the output with tabular format.
Args:
output (file descriptor or str):
file descriptor or path to the output file.
close_after_write (bool, optional):
Close the output after write.
Defaults to |True|. |
def _create_batches(self, instances: Iterable[Instance], shuffle: bool) -> Iterable[Batch]:
raise NotImplementedError | This method should return one epoch worth of batches. |
def reload(self, index):
finfo = self.data[index]
txt, finfo.encoding = encoding.read(finfo.filename)
finfo.lastmodified = QFileInfo(finfo.filename).lastModified()
position = finfo.editor.get_position('cursor')
finfo.editor.set_text(txt)
finfo.editor.document().setM... | Reload file from disk |
def is_client_ip_address_blacklisted(request: AxesHttpRequest) -> bool:
if is_ip_address_in_blacklist(request.axes_ip_address):
return True
if settings.AXES_ONLY_WHITELIST and not is_ip_address_in_whitelist(request.axes_ip_address):
return True
return False | Check if the given request refers to a blacklisted IP. |
def bind(self, **config):
while self.unbound_types:
typedef = self.unbound_types.pop()
try:
load, dump = typedef.bind(self, **config)
self.bound_types[typedef] = {
"load": load, "dump": dump
}
except Exceptio... | Bind all unbound types to the engine.
Bind each unbound typedef to the engine, passing in the engine and
:attr:`config`. The resulting ``load`` and ``dump`` functions can
be found under ``self.bound_types[typedef]["load"]`` and
``self.bound_types[typedef]["dump"], respectively.
... |
def ping(self):
if not self.conn:
self.connect()
self.conn.send('PING', time.time())
cmd, payload = self.conn.recv()
recv_ts = time.time()
if cmd != 'PONG':
raise Exception("Invalid response from server")
return recv_ts - payload[0] | Ping the server. Returns the time interval, in seconds,
required for the server to respond to the PING message. |
def load_from_json(db_file, language=DEFAULT_LANG):
raw = json.loads(file(db_file).read())
data = {
'_id': raw['id'],
'title': raw['title'],
'description': DBVuln.handle_ref(raw['description'], language=language),
'severity': raw['severity'],
'... | Parses the JSON data and returns it
:param db_file: File and path pointing to the JSON file to parse
:param language: The user's language (en, es, etc.)
:raises: All kind of exceptions if the file doesn't exist or JSON is
invalid.
:return: None |
def out(self):
out = ""
if self.use_sentinel:
out += sentinel_var + " = _coconut.object()\n"
closes = 0
for checks, defs in self.checkdefs:
if checks:
out += "if " + paren_join(checks, "and") + ":\n" + openindent
closes += 1
... | Return pattern-matching code. |
def _configure_logger_handler(cls, log_dest, log_filename):
if log_dest is None:
return None
msg_format = '%(asctime)s-%(name)s-%(message)s'
if log_dest == 'stderr':
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(msg_format))
... | Return a logging handler for the specified `log_dest`, or `None` if
`log_dest` is `None`. |
def log(self, level, msg, *args, **kwargs):
if level >= logging.FATAL:
extra = kwargs.setdefault('extra', {})
extra[_ABSL_LOG_FATAL] = True
super(ABSLLogger, self).log(level, msg, *args, **kwargs) | Logs a message at a cetain level substituting in the supplied arguments.
This method behaves differently in python and c++ modes.
Args:
level: int, the standard logging level at which to log the message.
msg: str, the text of the message to log.
*args: The arguments to substitute in the mess... |
def find_vm_by_name(self, si, path, name):
return self.find_obj_by_path(si, path, name, self.VM) | Finds vm in the vCenter or returns "None"
:param si: pyvmomi 'ServiceInstance'
:param path: the path to find the object ('dc' or 'dc/folder' or 'dc/folder/folder/etc...')
:param name: the vm name to return |
def add_net(self, net):
self.sanity_check_net(net)
self.logic.add(net) | Add a net to the logic of the block.
The passed net, which must be of type LogicNet, is checked and then
added to the block. No wires are added by this member, they must be
added seperately with add_wirevector. |
def _media(self):
css = ['markymark/css/markdown-editor.css']
iconlibrary_css = getattr(
settings,
'MARKYMARK_FONTAWESOME_CSS',
'markymark/fontawesome/fontawesome.min.css'
)
if iconlibrary_css:
css.append(iconlibrary_css)
media = fo... | Returns a forms.Media instance with the basic editor media and media
from all registered extensions. |
def initrepo(repopath, bare, shared):
ag = activegit.ActiveGit(repopath, bare=bare, shared=shared) | Initialize an activegit repo.
Default makes base shared repo that should be cloned for users |
def sysmeta_add_preferred(sysmeta_pyxb, node_urn):
if not has_replication_policy(sysmeta_pyxb):
sysmeta_set_default_rp(sysmeta_pyxb)
rp_pyxb = sysmeta_pyxb.replicationPolicy
_add_node(rp_pyxb, 'pref', node_urn)
_remove_node(rp_pyxb, 'block', node_urn) | Add a remote Member Node to the list of preferred replication targets to this
System Metadata object.
Also remove the target MN from the list of blocked Member Nodes if present.
If the target MN is already in the preferred list and not in the blocked list, this
function is a no-op.
Args:
sy... |
def output_json(gandi, format, value):
if format == 'json':
gandi.echo(json.dumps(value, default=date_handler, sort_keys=True))
elif format == 'pretty-json':
gandi.echo(json.dumps(value, default=date_handler, sort_keys=True,
indent=2, separators=(',', ': '))) | Helper to show json output |
def convert_field_to_html(cr, table, field_name, html_field_name):
if version_info[0] < 7:
logger.error("You cannot use this method in an OpenUpgrade version "
"prior to 7.0.")
return
cr.execute(
"SELECT id, %(field)s FROM %(table)s WHERE %(field)s IS NOT NULL" % {
... | Convert field value to HTML value.
.. versionadded:: 7.0 |
def check_tweet(tweet, validation_checking=False):
if "id" not in tweet:
raise NotATweetError("This text has no 'id' key")
original_format = is_original_format(tweet)
if original_format:
_check_original_format_tweet(tweet, validation_checking=validation_checking)
else:
_check_act... | Ensures a tweet is valid and determines the type of format for the tweet.
Args:
tweet (dict/Tweet): the tweet payload
validation_checking (bool): check for valid key structure in a tweet. |
def adopt(self):
valid_relationships = set(Relationship._instances.keys())
relationships = [
(parent, relation.complement(), term.id)
for term in six.itervalues(self.terms)
for relation in term.relations
for parent in term.relations... | Make terms aware of their children.
This is done automatically when using the `~Ontology.merge` and
`~Ontology.include` methods as well as the `~Ontology.__init__`
method, but it should be called in case of manual editing of the
parents or children of a `Term`. |
def ring_coding(array):
n = len(array)
codes = np.ones(n, dtype=Path.code_type) * Path.LINETO
codes[0] = Path.MOVETO
codes[-1] = Path.CLOSEPOLY
return codes | Produces matplotlib Path codes for exterior and interior rings
of a polygon geometry. |
def next(self):
if not self.cursor:
self.cursor = self.coll_handle.find().sort([("ts", ASCENDING)])
doc = self.cursor.next()
doc['thread'] = self.name
le = LogEvent(doc)
return le | Make iterators. |
def _ConvertBool(value, require_str):
if require_str:
if value == 'true':
return True
elif value == 'false':
return False
else:
raise ParseError('Expected "true" or "false", not {0}.'.format(value))
if not isinstance(value, bool):
raise ParseError('Expected true or false without qu... | Convert a boolean value.
Args:
value: A scalar value to convert.
require_str: If True, value must be a str.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed. |
def stop_app(self, callback_function_param=False):
self.logger.info("Receiver:Stopping current app '%s'", self.app_id)
return self.send_message(
{MESSAGE_TYPE: 'STOP'},
inc_session_id=True, callback_function=callback_function_param) | Stops the current running app on the Chromecast. |
def getBehavior(name, id=None):
name = name.upper()
if name in __behaviorRegistry:
if id:
for n, behavior in __behaviorRegistry[name]:
if n == id:
return behavior
return __behaviorRegistry[name][0][1]
return None | Return a matching behavior if it exists, or None.
If id is None, return the default for name. |
def check_predefined_conditions():
try:
node_info = current_k8s_corev1_api_client.list_node()
for node in node_info.items:
for condition in node.status.conditions:
if not condition.status:
return False
except ApiException as e:
log.error('S... | Check k8s predefined conditions for the nodes. |
def check():
dist_path = Path(DIST_PATH)
if not dist_path.exists() or not list(dist_path.glob('*')):
print("No distribution files found. Please run 'build' command first")
return
subprocess.check_call(['twine', 'check', 'dist/*']) | Checks the long description. |
def get_previous_character(self):
cursor = self.textCursor()
cursor.movePosition(QTextCursor.PreviousCharacter, QTextCursor.KeepAnchor)
return cursor.selectedText() | Returns the character before the cursor.
:return: Previous cursor character.
:rtype: QString |
def dispatch(argdict):
cmd = argdict['command']
ftc = getattr(THIS_MODULE, 'do_'+cmd)
ftc(argdict) | Call the command-specific function, depending on the command. |
def _reset_errors(self, msg=None):
if msg is not None and msg in self._errors:
del self._errors[msg]
else:
self._errors = {} | Resets the logging throttle cache, so the next error is emitted
regardless of the value in `self.server_error_interval`
:param msg: if present, only this key is reset. Otherwise, the whole
cache is cleaned. |
def _cli_main(args=None):
arguments = _parse_arguments(args)
_remove_none_values(arguments)
verbosity = min(arguments.pop('verbose'), 4)
levels = [logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG,
TRACE_LEVEL]
arguments.setdefault('d... | Pass input arguments to open_tunnel
Mandatory: ssh_address, -R (remote bind address list)
Optional:
-U (username) we may gather it from SSH_CONFIG_FILE or current username
-p (server_port), defaults to 22
-P (password)
-L (local_bind_address), default to 0.0.0.0:22
... |
def straight_line_show(title, length=100, linestyle="=", pad=0):
print(StrTemplate.straight_line(
title=title, length=length, linestyle=linestyle, pad=pad)) | Print a formatted straight line. |
def __set_rate_type(self, value):
if value not in [RATE_TYPE_FIXED, RATE_TYPE_PERCENTAGE]:
raise ValueError("Invalid rate type.")
self.__rate_type = value | Sets the rate type.
@param value:str |
async def run_action(self, action_name, **params):
action_facade = client.ActionFacade.from_connection(self.connection)
log.debug('Starting action `%s` on %s', action_name, self.name)
res = await action_facade.Enqueue([client.Action(
name=action_name,
parameters=params,
... | Run an action on this unit.
:param str action_name: Name of action to run
:param **params: Action parameters
:returns: A :class:`juju.action.Action` instance.
Note that this only enqueues the action. You will need to call
``action.wait()`` on the resulting `Action` instance if... |
def update(self):
ring = self._fetch()
n_replicas = len(ring)
replica_set = set([r[1] for r in self.replicas])
self.ranges = []
for n, (start, replica) in enumerate(ring):
if replica in replica_set:
end = ring[(n+1) % n_replicas][0] % RING_SIZE
... | Fetches the updated ring from Redis and updates the current ranges. |
def validate(self, graph):
if not nx.is_directed_acyclic_graph(graph):
raise DirectedAcyclicGraphInvalid(graph_name=self._name) | Validate the graph by checking whether it is a directed acyclic graph.
Args:
graph (DiGraph): Reference to a DiGraph object from NetworkX.
Raises:
DirectedAcyclicGraphInvalid: If the graph is not a valid dag. |
def validate_request(self,
data: Any,
*additional: AnyMapping,
merged_class: Type[dict] = dict) -> Any:
r
request_schema = getattr(self.module, 'request', None)
if request_schema is None:
logger.error(
... | r"""Validate request data against request schema from module.
:param data: Request data.
:param \*additional:
Additional data dicts to be merged with base request data.
:param merged_class:
When additional data dicts supplied method by default will return
mer... |
def glyph_metrics_stats(ttFont):
glyph_metrics = ttFont['hmtx'].metrics
ascii_glyph_names = [ttFont.getBestCmap()[c] for c in range(32, 128)
if c in ttFont.getBestCmap()]
ascii_widths = [adv for name, (adv, lsb) in glyph_metrics.items()
if name in ascii_glyph_names]
asci... | Returns a dict containing whether the font seems_monospaced,
what's the maximum glyph width and what's the most common width.
For a font to be considered monospaced, at least 80% of
the ascii glyphs must have the same width. |
def print_version(self):
if not self._version:
return self
if not self._title:
print(' %s %s' % (self._name, self._version))
return self
print(' %s (%s %s)' % (self._title, self._name, self._version))
return self | Print the program version. |
def find_tag_by_name(repo, tag_name, safe=True):
tagfmt = 'tags/{ref}'.format(ref=tag_name)
try:
ref = repo.get_git_ref(tagfmt)
if ref and ref.ref:
return ref
except github.UnknownObjectException:
if not safe:
raise
return None | Find tag by name in a github Repository
Parameters
----------
repo: :class:`github.Repository` instance
tag_name: str
Short name of tag (not a fully qualified ref).
safe: bool, optional
Defaults to `True`. When `True`, `None` is returned on failure. When
`False`, an except... |
def init_attachment_cache(self):
if self.request.method == 'GET':
attachments_cache.delete(self.get_attachments_cache_key(self.request))
return
attachments_cache_key = self.get_attachments_cache_key(self.request)
restored_attachments_dict = attachments_cache.get(attachmen... | Initializes the attachment cache for the current view. |
def recycle():
for service in cache.iter_keys('th_*'):
try:
service_value = cache.get(service, version=2)
cache.set(service, service_value)
cache.delete_pattern(service, version=2)
except ValueError:
pass
logger.info('recycle of cache done!') | the purpose of this tasks is to recycle the data from the cache
with version=2 in the main cache |
def cmd(self, argv):
assert isinstance(argv, (list, tuple)), \
"'argv' is not a sequence: %r" % argv
retval = None
try:
argv = self.precmd(argv)
retval = self.onecmd(argv)
self.postcmd(argv)
except:
if not self.cmdexc(argv):... | Run one command and exit.
"argv" is the arglist for the command to run. argv[0] is the
command to run. If argv is an empty list then the
'emptyline' handler is run.
Returns the return value from the command handler. |
def add_nodes(self, nodes, attr_dict=None, **attr):
attr_dict = self._combine_attribute_arguments(attr_dict, attr)
for node in nodes:
if type(node) is tuple:
new_node, node_attr_dict = node
new_dict = attr_dict.copy()
new_dict.update(node_attr_... | Adds multiple nodes to the graph, along with any related attributes
of the nodes.
:param nodes: iterable container to either references of the nodes
OR tuples of (node reference, attribute dictionary);
if an attribute dictionary is provided in the tuple,
... |
def _printable_id_code(self):
code = super(ISWCCode, self)._printable_id_code()
code1 = code[:3]
code2 = code[3:6]
code3 = code[-3:]
return '%s.%s.%s' % (code1, code2, code3) | Returns the code in a printable form, separating it into groups of
three characters using a point between them.
:return: the ID code in a printable form |
def can_add_new_content(self, block, file_info):
return ((self._max_files_per_container == 0 or self._max_files_per_container > len(block.content_file_infos))
and (self.does_content_fit(file_info, block)
or
(block.content_size < self._max_container_conte... | new content from file_info can be added into block iff
- file count limit hasn't been reached for the block
- there is enough space to completely fit the info into the block
- OR the info can be split and some info can fit into the block |
def as_dict(self, use_preliminary=False):
config = dict()
for key in self.config.keys:
if use_preliminary and key in self.preliminary_config:
value = self.preliminary_config[key]
else:
value = self.config.get_config_value(key)
config[ke... | Create a copy of the config in form of a dict
:param bool use_preliminary: Whether to include the preliminary config
:return: A dict with the copy of the config
:rtype: dict |
def info(verbose):
if _get_mongopatcher().manifest.is_initialized():
print('Datamodel version: %s' % _get_mongopatcher().manifest.version)
if verbose:
print('\nUpdate history:')
for update in reversed(_get_mongopatcher().manifest.history):
reason = update.get(... | Show version of the datamodel |
def get_es(self, default_builder=get_es):
return super(S, self).get_es(default_builder=default_builder) | Returns the elasticsearch Elasticsearch object to use.
This uses the django get_es builder by default which takes
into account settings in ``settings.py``. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.