code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def extract_field(self, field):
if not isinstance(field, basestring):
err_msg = u"Invalid extractor! => {}\n".format(field)
logger.log_error(err_msg)
raise exceptions.ParamsError(err_msg)
msg = "extract: {}".format(field)
if text_extractor_regexp_compile.match... | extract value from requests.Response. |
def invoke(self):
logger.debug('Running deferred function %s.', self)
self.module.makeLoadable()
function, args, kwargs = list(map(dill.loads, (self.function, self.args, self.kwargs)))
return function(*args, **kwargs) | Invoke the captured function with the captured arguments. |
def identify_denonavr_receivers():
devices = send_ssdp_broadcast()
receivers = []
for device in devices:
try:
receiver = evaluate_scpd_xml(device["URL"])
except ConnectionError:
continue
if receiver:
receivers.append(receiver)
return receivers | Identify DenonAVR using SSDP and SCPD queries.
Returns a list of dictionaries which includes all discovered Denon AVR
devices with keys "host", "modelName", "friendlyName", "presentationURL". |
def n2s(self, offset, length):
s = ''
for dummy in range(length):
if self.endian == 'I':
s += chr(offset & 0xFF)
else:
s = chr(offset & 0xFF) + s
offset = offset >> 8
return s | Convert offset to string. |
def unregister(self, name):
try:
name = name.name
except AttributeError:
pass
return self.pop(name,None) | Unregister function by name. |
def get_parent_objective_bank_ids(self, objective_bank_id):
if self._catalog_session is not None:
return self._catalog_session.get_parent_catalog_ids(catalog_id=objective_bank_id)
return self._hierarchy_session.get_parents(id_=objective_bank_id) | Gets the parent ``Ids`` of the given objective bank.
arg: objective_bank_id (osid.id.Id): the ``Id`` of an
objective bank
return: (osid.id.IdList) - the parent ``Ids`` of the objective
bank
raise: NotFound - ``objective_bank_id`` is not found
raise: ... |
def remove_product_version_from_build_configuration(id=None, name=None, product_version_id=None):
data = remove_product_version_from_build_configuration_raw(id, name, product_version_id)
if data:
return utils.format_json_list(data) | Remove a ProductVersion from association with a BuildConfiguration |
def append(self, state, symbol, action, destinationstate, production = None):
if action not in (None, "Accept", "Shift", "Reduce"):
raise TypeError
rule = {"action":action, "dest":destinationstate}
if action == "Reduce":
if rule is None:
raise TypeError("E... | Appends a new rule |
def get_status(video_id, _connection=None):
c = _connection
if not c:
c = connection.APIConnection()
return c.post('get_upload_status', video_id=video_id) | Get the status of a video given the ``video_id`` parameter. |
def compress(data,
mode=DEFAULT_MODE,
quality=lib.BROTLI_DEFAULT_QUALITY,
lgwin=lib.BROTLI_DEFAULT_WINDOW,
lgblock=0,
dictionary=b''):
compressor = Compressor(
mode=mode,
quality=quality,
lgwin=lgwin,
lgblock=lgblock,
... | Compress a string using Brotli.
.. versionchanged:: 0.5.0
Added ``mode``, ``quality``, `lgwin``, ``lgblock``, and ``dictionary``
parameters.
:param data: A bytestring containing the data to compress.
:type data: ``bytes``
:param mode: The encoder mode.
:type mode: :class:`BrotliEnco... |
def host_context(func):
"Sets the context of the setting to the current host"
@wraps(func)
def decorator(*args, **kwargs):
hosts = get_hosts_settings()
with settings(**hosts[env.host]):
return func(*args, **kwargs)
return decorator | Sets the context of the setting to the current host |
def usages(self):
row, col = self.editor.cursor()
self.log.debug('usages: in')
self.call_options[self.call_id] = {
"word_under_cursor": self.editor.current_word(),
"false_resp_msg": "Not a valid symbol under the cursor"}
self.send_at_point("UsesOfSymbol", ... | Request usages of whatever at cursor. |
def get(cls, bucket, key):
return cls.query.filter_by(
bucket_id=as_bucket_id(bucket),
key=key,
).one_or_none() | Get tag object. |
def code(self, text, lang=None):
with self.paragraph(stylename='code'):
lines = text.splitlines()
for line in lines[:-1]:
self._code_line(line)
self.linebreak()
self._code_line(lines[-1]) | Add a code block. |
def set_setting(key, val, env=None):
return settings.set(key, val, env=env) | Changes the value of the specified key in the current environment, or in
another environment if specified. |
def apply_depth_first(nodes, func, depth=0, as_dict=False, parents=None):
if as_dict:
items = OrderedDict()
else:
items = []
if parents is None:
parents = []
node_count = len(nodes)
for i, node in enumerate(nodes):
first = (i == 0)
last = (i == (node_count - 1... | Given a structure such as the application menu layout described above, we
may want to apply an operation to each entry to create a transformed
version of the structure.
For example, let's convert all entries in the application menu layout from
above to upper-case:
>>> pprint(apply_depth_first(menu... |
def chunk_size(self, value):
if value is not None and value > 0 and value % self._CHUNK_SIZE_MULTIPLE != 0:
raise ValueError(
"Chunk size must be a multiple of %d." % (self._CHUNK_SIZE_MULTIPLE,)
)
self._chunk_size = value | Set the blob's default chunk size.
:type value: int
:param value: (Optional) The current blob's chunk size, if it is set.
:raises: :class:`ValueError` if ``value`` is not ``None`` and is not a
multiple of 256 KB. |
def main():
"Send some test strings"
actions =
SendKeys(actions, pause = .1)
keys = parse_keys(actions)
for k in keys:
print(k)
k.Run()
time.sleep(.1)
test_strings = [
"\n"
"(aa)some text\n",
"(a)some{ }text\n",
"(b)some{{}text\n",
... | Send some test strings |
def prepend(self, key, val, time=0, min_compress_len=0):
return self._set("prepend", key, val, time, min_compress_len) | Prepend the value to the beginning of the existing key's value.
Only stores in memcache if key already exists.
Also see L{append}.
@return: Nonzero on success.
@rtype: int |
def addsshkey(self, title, key):
data = {'title': title, 'key': key}
request = requests.post(
self.keys_url, headers=self.headers, data=data,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:... | Add a new ssh key for the current user
:param title: title of the new key
:param key: the key itself
:return: true if added, false if it didn't add it (it could be because the name or key already exists) |
def deploy(self, id_networkv4):
data = dict()
uri = 'api/networkv4/%s/equipments/' % id_networkv4
return super(ApiNetworkIPv4, self).post(uri, data=data) | Deploy network in equipments and set column 'active = 1' in tables redeipv4
:param id_networkv4: ID for NetworkIPv4
:return: Equipments configuration output |
def read_local_manifest(output_path):
local_manifest_path = get_local_manifest_path(output_path)
try:
with open(local_manifest_path, 'r') as f:
manifest = dict(get_files_from_textfile(f))
logging.debug('Retrieving %s elements from manifest', len(manifest))
return mani... | Return the contents of the local manifest, as a dictionary. |
def delete_shell(self, pid):
try:
os.kill(pid, signal.SIGHUP)
except OSError:
pass
num_tries = 30
while num_tries > 0:
try:
if os.waitpid(pid, os.WNOHANG)[0] != 0:
break
except OSError:
br... | This function will kill the shell on a tab, trying to send
a sigterm and if it doesn't work, a sigkill. Between these two
signals, we have a timeout of 3 seconds, so is recommended to
call this in another thread. This doesn't change any thing in
UI, so you can use python's start_new_thre... |
def avail_images(conn=None, call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The avail_images function must be called with '
'-f or --function, or with the --list-images option'
)
if not conn:
conn = get_conn()
ret = {}
for item in conn.list_os... | List available images for Azure |
def register_functions(lib, ignore_errors):
def register(item):
return register_function(lib, item, ignore_errors)
for f in functionList:
register(f) | Register function prototypes with a libclang library instance.
This must be called as part of library instantiation so Python knows how
to call out to the shared library. |
def collection_choices():
from invenio_collections.models import Collection
return [(0, _('-None-'))] + [
(c.id, c.name) for c in Collection.query.all()
] | Return collection choices. |
def iterable_source(iterable, target):
it = iter(iterable)
for item in it:
try:
target.send(item)
except StopIteration:
return prepend(item, it)
return empty_iter() | Convert an iterable into a stream of events.
Args:
iterable: A series of items which will be sent to the target one by one.
target: The target coroutine or sink.
Returns:
An iterator over any remaining items. |
def nodes_ali(c_obj):
ali_nodes = []
try:
ali_nodes = c_obj.list_nodes()
except BaseHTTPError as e:
abort_err("\r HTTP Error with AliCloud: {}".format(e))
ali_nodes = adj_nodes_ali(ali_nodes)
return ali_nodes | Get node objects from AliCloud. |
def get_client(self, client_id):
self.assert_has_permission('clients.read')
uri = self.uri + '/oauth/clients/' + client_id
headers = self.get_authorization_headers()
response = requests.get(uri, headers=headers)
if response.status_code == 200:
return response.json()
... | Returns details about a specific client by the client_id. |
def destroy(self):
super(Syndic, self).destroy()
if hasattr(self, 'local'):
del self.local
if hasattr(self, 'forward_events'):
self.forward_events.stop() | Tear down the syndic minion |
def C(w, Xs):
n = len(Xs)
P = projection_matrix(w)
Ys = [np.dot(P, X) for X in Xs]
A = calc_A(Ys)
A_hat = calc_A_hat(A, skew_matrix(w))
return np.dot(A_hat, sum(np.dot(Y, Y) * Y for Y in Ys)) / np.trace(np.dot(A_hat, A)) | Calculate the cylinder center given the cylinder direction and
a list of data points. |
def define_mask_borders(image2d, sought_value, nadditional=0):
naxis2, naxis1 = image2d.shape
mask2d = np.zeros((naxis2, naxis1), dtype=bool)
borders = []
for i in range(naxis2):
jborder_min, jborder_max = find_pix_borders(
image2d[i, :],
sought_value=sought_value
... | Generate mask avoiding undesired values at the borders.
Set to True image borders with values equal to 'sought_value'
Parameters
----------
image2d : numpy array
Initial 2D image.
sought_value : int, float, bool
Pixel value that indicates missing data in the spectrum.
naddition... |
def hz2cents(freq_hz, base_frequency=10.0):
freq_cent = np.zeros(freq_hz.shape[0])
freq_nonz_ind = np.flatnonzero(freq_hz)
normalized_frequency = np.abs(freq_hz[freq_nonz_ind])/base_frequency
freq_cent[freq_nonz_ind] = 1200*np.log2(normalized_frequency)
return freq_cent | Convert an array of frequency values in Hz to cents.
0 values are left in place.
Parameters
----------
freq_hz : np.ndarray
Array of frequencies in Hz.
base_frequency : float
Base frequency for conversion.
(Default value = 10.0)
Returns
-------
cent : np.ndarray... |
def decompose(self):
self.extract()
if len(self.contents) == 0:
return
current = self.contents[0]
while current is not None:
next = current.next
if isinstance(current, Tag):
del current.contents[:]
current.parent = None
... | Recursively destroys the contents of this tree. |
def requires(*params):
def requires(f, self, *args, **kwargs):
missing = filter(lambda x: kwargs.get(x) is None, params)
if missing:
msgs = ", ".join([PARAMETERS[x]['msg'] for x in missing])
raise ValueError("Missing the following parameters: %s" % msgs)
return f(self... | Raise ValueError if any ``params`` are omitted from the decorated kwargs.
None values are considered omissions.
Example usage on an AWS() method:
@requires('zone', 'security_groups')
def my_aws_method(self, custom_args, **kwargs):
# We'll only get here if 'kwargs' contained non-No... |
def set_scope(self, value):
if self.default_command:
self.default_command += ' ' + value
else:
self.default_command += value
return value | narrows the scopes the commands |
def _parse(self, text):
text = str(text).strip()
if not text.startswith(DELIMITER):
return {}, text
try:
_, fm, content = BOUNDARY.split(text, 2)
except ValueError:
return {}, text
metadata = yaml.load(fm, Loader=self.loader_class)
meta... | Parse text with frontmatter, return metadata and content.
If frontmatter is not found, returns an empty metadata dictionary and original text content. |
def from_translation_key(
cls,
translation_key,
translations,
overlapping_reads,
ref_reads,
alt_reads,
alt_reads_supporting_protein_sequence,
transcripts_overlapping_variant,
transcripts_supporting_protein_sequen... | Create a ProteinSequence object from a TranslationKey, along with
all the extra fields a ProteinSequence requires. |
def create_entity_type(self,
parent,
entity_type,
language_code=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
... | Creates an entity type in the specified agent.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.EntityTypesClient()
>>>
>>> parent = client.project_agent_path('[PROJECT]')
>>>
>>> # TODO: Initialize ``entity_typ... |
def parse_filename(filename):
_patterns = patterns.get_expressions()
result = {}
for cmatcher in _patterns:
match = cmatcher.match(filename)
if match:
namedgroups = match.groupdict().keys()
result['pattern'] = cmatcher.pattern
result['series_name'] = match... | Parse media filename for metadata.
:param str filename: the name of media file
:returns: dict of metadata attributes found in filename
or None if no matching expression.
:rtype: dict |
def puts(s='', newline=True, stream=STDOUT):
max_width_ctx = _get_max_width_context()
if max_width_ctx:
cols, separator = max_width_ctx[-1]
s = max_width(s, cols, separator)
if newline:
s = tsplit(s, NEWLINES)
s = map(str, s)
indent = ''.join(INDENT_STRINGS)
s... | Prints given string to stdout. |
def Find(self, node_type, item_type):
if node_type == OtherNodes.DirectionNode:
child = self.GetChild(len(self.children) - 1)
while child is not None and not isinstance(
child.GetItem(),
item_type):
if child.GetItem().__class__.__na... | method for finding specific types of notation from nodes.
will currently return the first one it encounters because this method's only really intended
for some types of notation for which the exact value doesn't really
matter.
:param node_type: the type of node to look under
:... |
def get_name(self):
if hasattr(self, 'service_description'):
return self.service_description
if hasattr(self, 'name'):
return self.name
return 'SERVICE-DESCRIPTION-MISSING' | Accessor to service_description attribute or name if first not defined
:return: service name
:rtype: str |
def read_time_range(cls, *args, **kwargs):
criteria = list(args)
start = kwargs.get('start_timestamp')
end = kwargs.get('end_timestamp')
if start is not None:
criteria.append(cls.time_order <= -start)
if end is not None:
criteria.append(cls.time_order >= -... | Get all timezones set within a given time. Uses time_dsc_index
SELECT *
FROM <table>
WHERE time_order <= -<start_timestamp>
AND time_order >= -<end_timestamp>
:param args: SQLAlchemy filter criteria, (e.g., uid == uid, type == 1)
:param kwargs: start_timestamp and end_t... |
def add_channel(channel: EFBChannel):
global master, slaves
if isinstance(channel, EFBChannel):
if channel.channel_type == ChannelType.Slave:
slaves[channel.channel_id] = channel
else:
master = channel
else:
raise TypeError("Channel instance is expected") | Register the channel with the coordinator.
Args:
channel (EFBChannel): Channel to register |
def visit_Stmt(self, node):
save_defs, self.defs = self.defs or list(), list()
self.generic_visit(node)
new_defs, self.defs = self.defs, save_defs
return new_defs + [node] | Add new variable definition before the Statement. |
def was_run_code(self, get_all=True):
if self.stored is None:
return ""
else:
if get_all:
self.stored = ["\n".join(self.stored)]
return self.stored[-1] | Get all the code that was run. |
def analyze(self, text):
logger.debug('Sending %r to LUIS app %s', text, self._url)
r = requests.get(self._url, {'q': text})
logger.debug('Request sent to LUIS URL: %s', r.url)
logger.debug(
'LUIS returned status %s with text: %s', r.status_code, r.text)
r.raise_for_s... | Sends text to LUIS for analysis.
Returns a LuisResult. |
def find_by_id(self, tag, params={}, **options):
path = "/tags/%s" % (tag)
return self.client.get(path, params, **options) | Returns the complete tag record for a single tag.
Parameters
----------
tag : {Id} The tag to get.
[params] : {Object} Parameters for the request |
def create_bucket(self, bucket_name, headers=None,
location=Location.DEFAULT, policy=None):
check_lowercase_bucketname(bucket_name)
if policy:
if headers:
headers[self.provider.acl_header] = policy
else:
headers = {self.provid... | Creates a new located bucket. By default it's in the USA. You can pass
Location.EU to create an European bucket.
:type bucket_name: string
:param bucket_name: The name of the new bucket
:type headers: dict
:param headers: Additional headers to pass along with the reques... |
def get_snapshot(name, config_path=_DEFAULT_CONFIG_PATH, with_packages=False):
_validate_config(config_path)
sources = list()
cmd = ['snapshot', 'show', '-config={}'.format(config_path),
'-with-packages={}'.format(str(with_packages).lower()),
name]
cmd_ret = _cmd_run(cmd)
ret =... | Get detailed information about a snapshot.
:param str name: The name of the snapshot given during snapshot creation.
:param str config_path: The path to the configuration file for the aptly instance.
:param bool with_packages: Return a list of packages in the snapshot.
:return: A dictionary containing... |
def line(
xo: int, yo: int, xd: int, yd: int, py_callback: Callable[[int, int], bool]
) -> bool:
for x, y in line_iter(xo, yo, xd, yd):
if not py_callback(x, y):
break
else:
return True
return False | Iterate over a line using a callback function.
Your callback function will take x and y parameters and return True to
continue iteration or False to stop iteration and return.
This function includes both the start and end points.
Args:
xo (int): X starting point.
yo (int): Y starting ... |
def cross_validation(scheme_class, num_examples, num_folds, strict=True,
**kwargs):
if strict and num_examples % num_folds != 0:
raise ValueError(("{} examples are not divisible in {} evenly-sized " +
"folds. To allow this, have a look at the " +
... | Return pairs of schemes to be used for cross-validation.
Parameters
----------
scheme_class : subclass of :class:`IndexScheme` or :class:`BatchScheme`
The type of the returned schemes. The constructor is called with an
iterator and `**kwargs` as arguments.
num_examples : int
The... |
def GetRootKey(self):
root_registry_key = virtual.VirtualWinRegistryKey('')
for mapped_key in self._MAPPED_KEYS:
key_path_segments = key_paths.SplitKeyPath(mapped_key)
if not key_path_segments:
continue
registry_key = root_registry_key
for name in key_path_segments[:-1]:
... | Retrieves the Windows Registry root key.
Returns:
WinRegistryKey: Windows Registry root key.
Raises:
RuntimeError: if there are multiple matching mappings and
the correct mapping cannot be resolved. |
def lookupEncoding(encoding):
if isinstance(encoding, binary_type):
try:
encoding = encoding.decode("ascii")
except UnicodeDecodeError:
return None
if encoding is not None:
try:
return webencodings.lookup(encoding)
except AttributeError:
... | Return the python codec name corresponding to an encoding or None if the
string doesn't correspond to a valid encoding. |
def GetNewEventId(self, event_time=None):
if event_time is None:
event_time = int(time.time() * 1e6)
return "%s:%s:%s" % (event_time, socket.gethostname(), os.getpid()) | Return a unique Event ID string. |
def create_server(self, server_name, *args, **kwargs):
server = ServerConnection(name=server_name, reactor=self)
if args or kwargs:
server.set_connect_info(*args, **kwargs)
for verb, infos in self._event_handlers.items():
for info in infos:
server.register... | Create an IRC server connection slot.
The server will actually be connected to when
:meth:`girc.client.ServerConnection.connect` is called later.
Args:
server_name (str): Name of the server, to be used for functions and accessing the
server later through the reactor... |
def round(arg, digits=None):
op = ops.Round(arg, digits)
return op.to_expr() | Round values either to integer or indicated number of decimal places.
Returns
-------
rounded : type depending on digits argument
digits None or 0
decimal types: decimal
other numeric types: bigint
digits nonzero
decimal types: decimal
other numeric types: double |
def publish_command_start(self, command, database_name,
request_id, connection_id, op_id=None):
if op_id is None:
op_id = request_id
event = CommandStartedEvent(
command, database_name, request_id, connection_id, op_id)
for subscriber in self... | Publish a CommandStartedEvent to all command listeners.
:Parameters:
- `command`: The command document.
- `database_name`: The name of the database this command was run
against.
- `request_id`: The request id for this operation.
- `connection_id`: The address... |
def _has_x(self, kwargs):
return (('x' in kwargs) or (self._element_x in kwargs) or
(self._type == 3 and self._element_1mx in kwargs)) | Returns True if x is explicitly defined in kwargs |
def add_section(self, section):
section, _, _ = self._validate_value_types(section=section)
super(ConfigParser, self).add_section(section) | Create a new section in the configuration. Extends
RawConfigParser.add_section by validating if the section name is
a string. |
def get_by(self, name):
item = self.controlled_list.get_by(name)
if item:
return TodoElementUX(parent=self, controlled_element=item) | find a todo list element by name |
def save_json(obj, filename, **kwargs):
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs) | Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`. |
def as_text(str_or_bytes, encoding='utf-8', errors='strict'):
if isinstance(str_or_bytes, text):
return str_or_bytes
return str_or_bytes.decode(encoding, errors) | Return input string as a text string.
Should work for input string that's unicode or bytes,
given proper encoding.
>>> print(as_text(b'foo'))
foo
>>> b'foo'.decode('utf-8') == u'foo'
True |
def create_basic_op_node(op_name, node, kwargs):
name, input_nodes, _ = get_inputs(node, kwargs)
node = onnx.helper.make_node(
op_name,
input_nodes,
[name],
name=name
)
return [node] | Helper function to create a basic operator
node that doesn't contain op specific attrs |
def _build_credentials(self, nexus_switches):
credentials = {}
for switch_ip, attrs in nexus_switches.items():
credentials[switch_ip] = (
attrs[const.USERNAME], attrs[const.PASSWORD],
attrs[const.HTTPS_VERIFY], attrs[const.HTTPS_CERT],
None)
... | Build credential table for Rest API Client.
:param nexus_switches: switch config
:returns credentials: switch credentials list |
def global_include(self, pattern):
if self.allfiles is None:
self.findall()
match = translate_pattern(os.path.join('**', pattern))
found = [f for f in self.allfiles if match.match(f)]
self.extend(found)
return bool(found) | Include all files anywhere in the current directory that match the
pattern. This is very inefficient on large file trees. |
def _aux_type(self, i):
aux_type = ctypes.c_int()
check_call(_LIB.MXNDArrayGetAuxType(self.handle, i, ctypes.byref(aux_type)))
return _DTYPE_MX_TO_NP[aux_type.value] | Data-type of the array's ith aux data.
Returns
-------
numpy.dtype
This BaseSparseNDArray's aux data type. |
def angle2xyz(azi, zen):
azi = xu.deg2rad(azi)
zen = xu.deg2rad(zen)
x = xu.sin(zen) * xu.sin(azi)
y = xu.sin(zen) * xu.cos(azi)
z = xu.cos(zen)
return x, y, z | Convert azimuth and zenith to cartesian. |
def calc_mass_from_fit_and_conv_factor(A, Damping, ConvFactor):
T0 = 300
mFromA = 2*Boltzmann*T0/(pi*A) * ConvFactor**2 * Damping
return mFromA | Calculates mass from the A parameter from fitting, the damping from
fitting in angular units and the Conversion factor calculated from
comparing the ratio of the z signal and first harmonic of z.
Parameters
----------
A : float
A factor calculated from fitting
Damping : float
... |
def progress_bar(name, maxval, prefix='Converting'):
widgets = ['{} {}: '.format(prefix, name), Percentage(), ' ',
Bar(marker='=', left='[', right=']'), ' ', ETA()]
bar = ProgressBar(widgets=widgets, max_value=maxval, fd=sys.stdout).start()
try:
yield bar
finally:
bar.upda... | Manages a progress bar for a conversion.
Parameters
----------
name : str
Name of the file being converted.
maxval : int
Total number of steps for the conversion. |
def set_flowcontrol_receive(self, name, value=None, default=False,
disable=False):
return self.set_flowcontrol(name, 'receive', value, default, disable) | Configures the interface flowcontrol receive value
Args:
name (string): The interface identifier. It must be a full
interface name (ie Ethernet, not Et)
value (boolean): True if the interface should enable receiving
flow control packets, otherwise False... |
def connect(self, agent='Python'):
headers = {'User-Agent': agent}
request = urlopen(Request(self.url, headers=headers))
try:
yield request
finally:
request.close() | Context manager for HTTP Connection state and ensures proper handling
of network sockets, sends a GET request.
Exception is raised at the yield statement.
:yield request: FileIO<Socket> |
def _JsonDecodeDict(self, data):
rv = {}
for key, value in data.iteritems():
if isinstance(key, unicode):
key = self._TryStr(key)
if isinstance(value, unicode):
value = self._TryStr(value)
elif isinstance(value, list):
value = self._JsonDecodeList(value)
rv[key] =... | Json object decode hook that automatically converts unicode objects. |
def material_advantage(self, input_color, val_scheme):
if self.get_king(input_color).in_check(self) and self.no_moves(input_color):
return -100
if self.get_king(-input_color).in_check(self) and self.no_moves(-input_color):
return 100
return sum([val_scheme.val(piece, inpu... | Finds the advantage a particular side possesses given a value scheme.
:type: input_color: Color
:type: val_scheme: PieceValues
:rtype: double |
def _convert_to_array(array_like, dtype):
if isinstance(array_like, bytes):
return np.frombuffer(array_like, dtype=dtype)
return np.asarray(array_like, dtype=dtype) | Convert Matrix attributes which are array-like or buffer to array. |
def on_add_vrf_conf(self, evt):
vrf_conf = evt.value
route_family = vrf_conf.route_family
assert route_family in vrfs.SUPPORTED_VRF_RF
vrf_table = self._table_manager.create_and_link_vrf_table(vrf_conf)
vrf_conf.add_listener(ConfWithStats.UPDATE_STATS_LOG_ENABLED_EVT,
... | Event handler for new VrfConf.
Creates a VrfTable to store routing information related to new Vrf.
Also arranges for related paths to be imported to this VrfTable. |
async def info(self) -> Optional[JobDef]:
info = await self.result_info()
if not info:
v = await self._redis.get(job_key_prefix + self.job_id, encoding=None)
if v:
info = unpickle_job(v)
if info:
info.score = await self._redis.zscore(queue_name... | All information on a job, including its result if it's available, does not wait for the result. |
async def add_user(self, username, password=None, display_name=None):
if not display_name:
display_name = username
user_facade = client.UserManagerFacade.from_connection(
self.connection())
users = [client.AddUser(display_name=display_name,
... | Add a user to this controller.
:param str username: Username
:param str password: Password
:param str display_name: Display name
:returns: A :class:`~juju.user.User` instance |
def pattern_to_regex(pattern: str) -> str:
if pattern and pattern[-1] == "*":
pattern = pattern[:-1]
end = ""
else:
end = "$"
for metac in META_CHARS:
pattern = pattern.replace(metac, "\\" + metac)
return "^" + VARS_PT.sub(regex_replacer, pattern) + end | convert url patten to regex |
def draw_status(self, writer, idx):
if self.term.is_a_tty:
writer(self.term.hide_cursor())
style = self.screen.style
writer(self.term.move(self.term.height - 1))
if idx == self.last_page:
last_end = u'(END)'
else:
last_e... | Conditionally draw status bar when output terminal is a tty.
:param writer: callable writes to output stream, receiving unicode.
:param idx: current page position index.
:type idx: int |
def _uniform_phi(M):
return np.random.uniform(-np.pi, np.pi, M) | Generate M random numbers in [-pi, pi). |
def pagination_links(paginator_page, show_pages, url_params=None,
first_page_label=None, last_page_label=None,
page_url=''):
return {
'items': paginator_page,
'show_pages': show_pages,
'url_params': url_params,
'first_page_label': first_page_... | Django template tag to display pagination links for a paginated
list of items.
Expects the following variables:
* the current :class:`~django.core.paginator.Page` of a
:class:`~django.core.paginator.Paginator` object
* a dictionary of the pages to be displayed, in the format
generated b... |
def Matches(self, registry_key, search_depth):
if self._key_path_segments is None:
key_path_match = None
else:
key_path_match = self._CheckKeyPath(registry_key, search_depth)
if not key_path_match:
return False, key_path_match
if search_depth != self._number_of_key_path_segments:... | Determines if the Windows Registry key matches the find specification.
Args:
registry_key (WinRegistryKey): Windows Registry key.
search_depth (int): number of key path segments to compare.
Returns:
tuple: contains:
bool: True if the Windows Registry key matches the find specificati... |
def check_folders(name):
if os.getcwd().endswith('analyses'):
correct = input('You are in an analyses folder. This will create '
'another analyses folder inside this one. Do '
'you want to continue? (y/N)')
if correct != 'y':
return False
... | Only checks and asks questions. Nothing is written to disk. |
def render_chart_to_file(self, template_name: str, chart: Any, path: str):
tpl = self.env.get_template(template_name)
html = tpl.render(chart=self.generate_js_link(chart))
write_utf8_html_file(path, self._reg_replace(html)) | Render a chart or page to local html files.
:param chart: A Chart or Page object
:param path: The destination file which the html code write to
:param template_name: The name of template file. |
def _get_sample_generator(samples):
if isinstance(samples, Mapping):
def samples_generator():
for ind in range(samples[list(samples.keys())[0]].shape[0]):
yield np.array([samples[s][ind, :] for s in sorted(samples)])
elif isinstance(samples, np.ndarray):
def samples_g... | Get a sample generator from the given polymorphic input.
Args:
samples (ndarray, dict or generator): either an matrix of shape (d, p, n) with d problems, p parameters and
n samples, or a dictionary with for every parameter a matrix with shape (d, n) or, finally,
a generator function... |
def linear_set_layer(layer_size,
inputs,
context=None,
activation_fn=tf.nn.relu,
dropout=0.0,
name=None):
with tf.variable_scope(
name, default_name="linear_set_layer", values=[inputs]):
outputs = conv1d... | Basic layer type for doing funky things with sets.
Applies a linear transformation to each element in the input set.
If a context is supplied, it is concatenated with the inputs.
e.g. One can use global_pool_1d to get a representation of the set which
can then be used as the context for the next layer.
... |
def _nan_minmax_object(func, fill_value, value, axis=None, **kwargs):
valid_count = count(value, axis=axis)
filled_value = fillna(value, fill_value)
data = getattr(np, func)(filled_value, axis=axis, **kwargs)
if not hasattr(data, 'dtype'):
data = dtypes.fill_value(value.dtype) if valid_count == ... | In house nanmin and nanmax for object array |
def encode_eternal_jwt_token(self, user, **custom_claims):
return self.encode_jwt_token(
user,
override_access_lifespan=VITAM_AETERNUM,
override_refresh_lifespan=VITAM_AETERNUM,
**custom_claims
) | This utility function encodes a jwt token that never expires
.. note:: This should be used sparingly since the token could become
a security concern if it is ever lost. If you use this
method, you should be sure that your application also
implements a black... |
def combine_relevance_tables(relevance_tables):
def _combine(a, b):
a.relevant |= b.relevant
a.p_value = a.p_value.combine(b.p_value, min, 1)
return a
return reduce(_combine, relevance_tables) | Create a combined relevance table out of a list of relevance tables,
aggregating the p-values and the relevances.
:param relevance_tables: A list of relevance tables
:type relevance_tables: List[pd.DataFrame]
:return: The combined relevance table
:rtype: pandas.DataFrame |
def build_plans(self):
if not self.__build_plans:
self.__build_plans = BuildPlans(self.__connection)
return self.__build_plans | Gets the Build Plans API client.
Returns:
BuildPlans: |
def _gst_available():
try:
import gi
except ImportError:
return False
try:
gi.require_version('Gst', '1.0')
except (ValueError, AttributeError):
return False
try:
from gi.repository import Gst
except ImportError:
return False
return True | Determine whether Gstreamer and the Python GObject bindings are
installed. |
def query_param(self, key, value=None, default=None, as_list=False):
parse_result = self.query_params()
if value is not None:
if isinstance(value, (list, tuple)):
value = list(map(to_unicode, value))
else:
value = to_unicode(value)
pars... | Return or set a query parameter for the given key
The value can be a list.
:param string key: key to look for
:param string default: value to return if ``key`` isn't found
:param boolean as_list: whether to return the values as a list
:param string value: the new query paramete... |
def istring(self, in_string=''):
new_string = IString(in_string)
new_string.set_std(self.features.get('casemapping'))
if not self._casemap_set:
self._imaps.append(new_string)
return new_string | Return a string that uses this server's IRC casemapping.
This string's equality with other strings, ``lower()``, and ``upper()`` takes this
server's casemapping into account. This should be used for things such as nicks and
channel names, where comparing strings using the correct casemapping ca... |
def copy_file(self, file_id, dest_folder_id):
return self.__request("POST", "/files/" + unicode(file_id) + "/copy",
data={ "parent": {"id": unicode(dest_folder_id)} }) | Copy file to new destination
Args:
file_id (int): ID of the folder.
dest_folder_id (int): ID of parent folder you are copying to.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
... |
def getBWTRange(self, start, end):
startBlockIndex = start >> self.bitPower
endBlockIndex = int(math.floor(float(end)/self.binSize))
trueStart = startBlockIndex*self.binSize
return self.decompressBlocks(startBlockIndex, endBlockIndex)[start-trueStart:end-trueStart] | This function masks the complexity of retrieving a chunk of the BWT from the compressed format
@param start - the beginning of the range to retrieve
@param end - the end of the range in normal python notation (bwt[end] is not part of the return)
@return - a range of integers representing the cha... |
def _centroids(self, verts):
r
value = sp.zeros([len(verts), 3])
for i, i_verts in enumerate(verts):
value[i] = np.mean(i_verts, axis=0)
return value | r'''
Function to calculate the centroid as the mean of a set of vertices.
Used for pore and throat. |
def clean(self):
super().clean()
if (
(self.user is None and not self.anonymous_user) or
(self.user and self.anonymous_user)
):
raise ValidationError(
_('A permission should target either a user or an anonymous user'),
) | Validates the current instance. |
def debug_variable_node_render(self, context):
try:
output = self.filter_expression.resolve(context)
output = template_localtime(output, use_tz=context.use_tz)
output = localize(output, use_l10n=context.use_l10n)
output = force_text(output)
except Exception as e:
if not h... | Like DebugVariableNode.render, but doesn't catch UnicodeDecodeError. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.