code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def acknowledge_gnome_notification():
x0, y0 = mouse_pos()
mouse_move(10000, 10000)
x, y = mouse_pos()
mouse_rmove(-x/2, 0)
mouse_click(LEFT)
time.sleep(.2)
mouse_move(x0, y0) | Moves mouse pointer to the bottom center of the screen and clicks on it. |
def _expandGLQ(self, zeros, lmax, lmax_calc):
if self.normalization == '4pi':
norm = 1
elif self.normalization == 'schmidt':
norm = 2
elif self.normalization == 'unnorm':
norm = 3
elif self.normalization == 'ortho':
norm = 4
else:
... | Evaluate the coefficients on a Gauss-Legendre quadrature grid. |
def _traverse_command(self, name, *args, **kwargs):
args = list(args)
args.insert(0, self.name)
return super(InstanceHashField, self)._traverse_command(name, *args, **kwargs) | Add key AND the hash field to the args, and call the Redis command. |
def add(self, snapshot, component='main'):
try:
self.components[component].append(snapshot)
except KeyError:
self.components[component] = [snapshot] | Add snapshot of component to publish |
def reset(self):
for key in list(self.keys()): self.iterators[key] = _itertools.cycle(self[key])
return self | Resets the style cycle. |
def parse(self, instrs):
instrs_reil = []
try:
for instr in instrs:
instr_lower = instr.lower()
if instr_lower not in self._cache:
self._cache[instr_lower] = instruction.parseString(
instr_lower)[0]
i... | Parse an IR instruction. |
def _extract_yaml_block(self, indent, fh):
raw_yaml = []
indent_match = re.compile(r"^{}".format(indent))
try:
fh.next()
while indent_match.match(fh.peek()):
raw_yaml.append(fh.next().replace(indent, "", 1))
if self.yaml_block_end.match(fh.... | Extract a raw yaml block from a file handler |
def _BuildAuthenticatorResponse(self, app_id, client_data, plugin_response):
encoded_client_data = self._Base64Encode(client_data)
signature_data = str(plugin_response['signatureData'])
key_handle = str(plugin_response['keyHandle'])
response = {
'clientData': encoded_client_data,
'signat... | Builds the response to return to the caller. |
def _SignedVarintEncoder():
local_chr = chr
def EncodeSignedVarint(write, value):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
value >>= 7
while value:
write(0x80|bits)
bits = value & 0x7f
value >>= 7
return write(bits)
return EncodeSignedVarint | Return an encoder for a basic signed varint value. |
def all_minutes(self):
opens_in_ns = self._opens.values.astype(
'datetime64[ns]',
).view('int64')
closes_in_ns = self._closes.values.astype(
'datetime64[ns]',
).view('int64')
return DatetimeIndex(
compute_all_minutes(opens_in_ns, closes_in_ns),... | Returns a DatetimeIndex representing all the minutes in this calendar. |
def search_line(line, search, searchtype):
if searchtype == 're' or searchtype == 'word':
return re.search(search, line)
elif searchtype == 'pos':
return searcher.search_out(line, search)
elif searchtype == 'hyper':
return searcher.hypernym_search(line, search) | Return True if search term is found in given line, False otherwise. |
def _pickle_load(path):
_, ext = os.path.splitext(path)
topology = None
if sys.version_info.major == 2:
if ext == '.pickle2':
with open(path, 'rb') as f:
topology = pickle.load(f)
elif ext in ('.pickle3', '.pickle'):
wit... | Loads pickled topology. Careful with Python versions though! |
def make_call_with_cb(self, fun, *args):
cid, event = self.handler.register_callback()
argscp = list(args)
argscp.append(cid)
self.make_call(fun, *argscp)
return event | Makes an API call with a callback to wait for |
def _connect(self):
try:
while True:
_LOGGER.info('Trying to connect to %s', self.server_address)
try:
yield from asyncio.wait_for(
self.loop.create_connection(
lambda: self.protocol, *self.server... | Connect to the socket. |
def savePkeyPem(self, pkey, path):
with s_common.genfile(path) as fd:
fd.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) | Save a private key in PEM format to a file outside the certdir. |
def _firmware_update(firmwarefile='', host='',
directory=''):
dest = os.path.join(directory, firmwarefile[7:])
__salt__['cp.get_file'](firmwarefile, dest)
username = __pillar__['proxy']['admin_user']
password = __pillar__['proxy']['admin_password']
__salt__['dracr.update_firmwar... | Update firmware for a single host |
def rgetattr(obj, attr, *args):
def _getattr(obj, attr):
return getattr(obj, attr, *args)
return functools.reduce(_getattr, [obj] + attr.split(".")) | Get attr that handles dots in attr name. |
def custom_object_prefix_strict(instance):
if (instance['type'] not in enums.TYPES and
instance['type'] not in enums.RESERVED_OBJECTS and
not CUSTOM_TYPE_PREFIX_RE.match(instance['type'])):
yield JSONError("Custom object type '%s' should start with 'x-' "
"fol... | Ensure custom objects follow strict naming style conventions. |
def open(cls, sock, chunk_type, isatty, chunk_eof_type=None, buf_size=None, select_timeout=None):
with cls.open_multi(sock,
(chunk_type,),
(isatty,),
chunk_eof_type,
buf_size,
select_timeout) as c... | Yields the write side of a pipe that will copy appropriately chunked values to a socket. |
def fraction_correct_fuzzy_linear_create_vector(z, z_cutoff, z_fuzzy_range):
assert(z_fuzzy_range * 2 < z_cutoff)
if (z == None or numpy.isnan(z)):
return None
elif (z >= z_cutoff + z_fuzzy_range):
return [0, 0, 1]
elif (z <= -z_cutoff - z_fuzzy_range):
return [1, 0, 0]
elif ... | A helper function for fraction_correct_fuzzy_linear. |
def _stop_remote_console(self):
if self._telnet_server:
self._telnet_server.close()
yield from self._telnet_server.wait_closed()
self._remote_pipe.close()
self._telnet_server = None | Stops remote console support for this VM. |
def preview(self, when=timezone.now(), **kwargs):
return self.operate_on(when=when, apply=False, **kwargs) | Preview transactions, but don't actually save changes to list. |
def create_bandwidth_limit_rule(self, policy, body=None):
return self.post(self.qos_bandwidth_limit_rules_path % policy,
body=body) | Creates a new bandwidth limit rule. |
def _match_lhs(cp, rules):
rule_matches = []
for rule in rules:
reactant_pattern = rule.rule_expression.reactant_pattern
for rule_cp in reactant_pattern.complex_patterns:
if _cp_embeds_into(rule_cp, cp):
rule_matches.append(rule)
break
return rule_... | Get rules with a left-hand side matching the given ComplexPattern. |
def getblock(self, hash: str) -> dict:
return cast(dict, self.api_fetch('getblock?hash=' + hash)) | Returns information about the block with the given hash. |
def copytree(src, dst, symlinks=False, ignore=None):
if not osp.exists(dst):
os.makedirs(dst)
for item in os.listdir(src):
s = osp.join(src, item)
d = osp.join(dst, item)
if osp.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s... | Copy from source directory to destination |
def separate_reach_logs(log_str):
log_lines = log_str.splitlines()
reach_logs = []
reach_lines = []
adding_reach_lines = False
for l in log_lines[:]:
if not adding_reach_lines and 'Beginning reach' in l:
adding_reach_lines = True
elif adding_reach_lines and 'Reach finishe... | Get the list of reach logs from the overall logs. |
def for_json(self):
value = super(DatetimeField, self).for_json()
if isinstance(value, pendulum.Interval):
return value.in_seconds() * 1000
if isinstance(value, datetime):
return self.format_datetime(value)
if isinstance(value, pendulum.Time):
return s... | Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals |
def getVersion():
print('epochs version:', str(CDFepoch.version) + '.' +
str(CDFepoch.release) + '.'+str(CDFepoch.increment)) | Shows the code version. |
def refresh_context(self):
User = self.model('res.user')
self.context = User.get_preferences(True)
return self.context | Get the default context of the user and save it |
def parse_eep(self, rorg_func=None, rorg_type=None, direction=None, command=None):
if rorg_func is not None and rorg_type is not None:
self.select_eep(rorg_func, rorg_type, direction, command)
provides, values = self.eep.get_values(self._profile, self._bit_data, self._bit_status)
sel... | Parse EEP based on FUNC and TYPE |
def cross_entropy_error(self, input_data, targets, average=True,
cache=None, prediction=False,
sum_errors=True):
loss = []
if cache is None:
cache = self.n_tasks * [None]
for targets_task, cache_task, task in \
izip(... | Computes the cross-entropy error for all tasks. |
def first(self):
page = self.get_page(num_elements=1)
data = self.extract_data(page)
if data:
return data[0] | Extract the first AppNexus object present in the response |
def config_babel(app):
" Init application with babel. "
babel.init_app(app)
def get_locale():
return request.accept_languages.best_match(app.config['BABEL_LANGUAGES'])
babel.localeselector(get_locale) | Init application with babel. |
def _get_session(server):
if server in _sessions:
return _sessions[server]
config = _get_spacewalk_configuration(server)
if not config:
raise Exception('No config for \'{0}\' found on master'.format(server))
session = _get_client_and_key(config['api_url'], config['username'], config['pas... | Get session and key |
def to_array(self, channels=2):
return np.linspace(self.volume, self.volume,
self.duration * channels).reshape(self.duration, channels) | Generate the array of multipliers for the dynamic |
def execd_submodule_paths(command, execd_dir=None):
for module_path in execd_module_paths(execd_dir):
path = os.path.join(module_path, command)
if os.access(path, os.X_OK) and os.path.isfile(path):
yield path | Generate a list of full paths to the specified command within exec_dir. |
def mgmt_nw_id(cls):
if cls._mgmt_nw_uuid is None:
tenant_id = cls.l3_tenant_id()
if not tenant_id:
return
net = bc.get_plugin().get_networks(
bc.context.get_admin_context(),
{'tenant_id': [tenant_id],
'name': [... | Returns id of the management network. |
def print_trace(self):
traceback.print_exc()
for tb in self.tracebacks:
print tb,
print '' | Prints stack trace for current exceptions chain. |
def room_members(self, stream_id):
req_hook = 'pod/v2/room/' + str(stream_id) + '/membership/list'
req_args = None
status_code, response = self.__rest__.GET_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | get list of room members |
def _mp_run_check(tasks, results, options):
try:
for index, change in iter(tasks.get, None):
change.check()
squashed = squash(change, options=options)
change.clear()
results.put((index, squashed))
except KeyboardInterrupt:
return | a helper function for multiprocessing with DistReport. |
def spin_py(self):
return conversions.primary_spin(self.mass1, self.mass2, self.spin1y,
self.spin2y) | Returns the y-component of the spin of the primary mass. |
def rbetabin(alpha, beta, n, size=None):
phi = np.random.beta(alpha, beta, size)
return np.random.binomial(n, phi) | Random beta-binomial variates. |
def moveToPoint(self, xxx_todo_changeme):
(x,y) = xxx_todo_changeme
self.set_x(float(self.get_x()) + float(x))
self.set_y(float(self.get_y()) + float(y)) | Moves the rect to the point x,y |
def printInspectors():
from argos.application import ArgosApplication
argosApp = ArgosApplication()
argosApp.loadOrInitRegistries()
for regItem in argosApp.inspectorRegistry.items:
print(regItem.fullName) | Prints a list of inspectors |
def _get_paths(self, fullname):
real_path = os.path.join(*fullname[len(self.package_prefix):].split('.'))
for base_path in sys.path:
if base_path == '':
base_path = os.getcwd()
path = os.path.join(base_path, real_path)
yield path + '.ipynb'
... | Generate ordered list of paths we should look for fullname module in |
def can_fetch(self, useragent, url):
if self.disallow_all:
return False
if self.allow_all:
return True
parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url))
url = urllib.parse.urlunparse(('','',parsed_url.path,
parsed_url.params,parsed_url.quer... | using the parsed robots.txt decide if useragent can fetch url |
def source_roots(self, document_path):
files = _utils.find_parents(self._root_path, document_path, ['setup.py']) or []
return [os.path.dirname(setup_py) for setup_py in files] | Return the source roots for the given document. |
def _get_number_of_slices(self, slice_type):
if slice_type == SliceType.AXIAL:
return self.dimensions[self.axial_orientation.normal_component]
elif slice_type == SliceType.SAGITTAL:
return self.dimensions[self.sagittal_orientation.normal_component]
elif slice_type == Slic... | Get the number of slices in a certain direction |
def stop_workers(self, _join_arbiter=True):
self._must_stop.set()
self._workers.stop()
self._result_notifier.join()
self._broker.stop()
if _join_arbiter:
self._arbiter.join()
self._reset() | Stop the workers and wait for them to terminate. |
def _handle_sigint(self, signum: int, frame: Any) -> None:
logger.warning("Catched SIGINT")
self.shutdown() | Shutdown after processing current task. |
def make_request(self, method, path, headers=None, data='', host=None,
auth_path=None, sender=None, override_num_retries=None):
http_request = self.build_base_http_request(method, path, auth_path,
{}, headers, data, host)
return se... | Makes a request to the server, with stock multiple-retry logic. |
def bind(self, callback):
handlers = self._handlers
if self._self is None:
raise RuntimeError('%s already fired, cannot add callbacks' % self)
if handlers is None:
handlers = []
self._handlers = handlers
handlers.append(callback) | Bind a ``callback`` to this event. |
def external_metadata(self, datasource_type=None, datasource_id=None):
if datasource_type == 'druid':
datasource = ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session)
elif datasource_type == 'table':
database = (
db.sessio... | Gets column info from the source system |
def trial(log_dir=None,
upload_dir=None,
sync_period=None,
trial_prefix="",
param_map=None,
init_logging=True):
global _trial
if _trial:
raise ValueError("A trial already exists in the current context")
local_trial = Trial(
log_dir=log_dir,
... | Generates a trial within a with context. |
def length(self):
return np.sqrt(np.sum(self**2, axis=1)).view(np.ndarray) | Array of vector lengths |
def delete_flag(self):
self.__status = Flag.NotFlagged
self.__start = None
self.__due_date = None
self.__completed = None
self._track_changes() | Sets this message as un flagged |
def _serialize(
self,
element,
value,
state
):
xml_value = _hooks_apply_before_serialize(self._hooks, state, value)
if xml_value is None:
if self._default is None:
serialized_value = Text('')
else:
... | Serialize the value to the element. |
def path(path_name=None, override=None, *, root=None, name=None, ext=None,
inject=None, relpath=None, reduce=False):
path_name, identity, root = _initialize(path_name, override, root, inject)
new_name = _process_name(path_name, identity, name, ext)
new_directory = _process_directory(path_name, iden... | Path manipulation black magic |
def create(self, product_id, period, csr, software_id, organization_handle,
approver_email=None, signature_hash_algorithm=None, domain_validation_methods=None,
hostnames=None, technical_handle=None):
response = self.request(E.createSslCertRequest(
E.productId(product_id... | Order a new SSL certificate. |
def dim_upper_extent_dict(self):
return { d.name: d.upper_extent for d in self._dims.itervalues()} | Returns a mapping of dimension name to upper_extent |
def _get_registry_names(self, registry):
return ', '.join(
f.__name__ if not isinstance(f, tuple) else f[0].__name__
for f in getattr(self, registry, [])) | Returns functions names for a registry |
def cast_like(x, y):
x = tf.convert_to_tensor(x)
y = tf.convert_to_tensor(y)
if x.dtype.base_dtype == y.dtype.base_dtype:
return x
cast_x = tf.cast(x, y.dtype)
if cast_x.device != x.device:
x_name = "(eager Tensor)"
try:
x_name = x.name
except AttributeError:
pass
tf.logging.wa... | Cast x to y's dtype, if necessary. |
def _make_stream_transport(self):
msg_queue = queue.Queue()
with self._stream_transport_map_lock:
self._last_id_used = (self._last_id_used % STREAM_ID_LIMIT) + 1
for local_id in itertools.islice(
itertools.chain(
range(self._last_id_used, STREAM_ID_LIMIT),
range... | Create an AdbStreamTransport with a newly allocated local_id. |
def package_path(self, team, user, package):
return os.path.join(self.user_path(team, user), package) | Returns the path to a package repository. |
def _preflight_check(desired, fromrepo, **kwargs):
if 'pkg.check_db' not in __salt__:
return {}
ret = {'suggest': {}, 'no_suggest': []}
pkginfo = __salt__['pkg.check_db'](
*list(desired.keys()), fromrepo=fromrepo, **kwargs
)
for pkgname in pkginfo:
if pkginfo[pkgname]['found'... | Perform platform-specific checks on desired packages |
def seven_zip(archive, items, self_extracting=False):
if not isinstance(items, (list, tuple)):
items = [items]
if self_extracting:
return er(_get_sz(), "a", "-ssw", "-sfx", archive, *items)
else:
return er(_get_sz(), "a", "-ssw", archive, *items) | Create a 7z archive. |
def init(self, access_key=None, secret_key=None):
if not access_key and not secret_key:
self._router.post_init(org_id=self.organizationId, data='{"initCloudAccount": true}')
else:
self._router.post_init(org_id=self.organizationId, data='{}')
ca_data = dict(accessKey=a... | Mimics wizard's environment preparation |
def translate(self, mo):
attrs = {}
groupdict = mo.groupdict()
for name, value in compat_iteritems(groupdict):
if value is None:
value = None
elif self._int_re.match(value):
value = int(value)
elif self._float_re.match(value):
... | Extract a structure from a match object, while translating the types in the process. |
def name(self):
with self._bt_interface.connect(self._mac) as connection:
name = connection.read_handle(_HANDLE_READ_NAME)
if not name:
raise BluetoothBackendException("Could not read NAME using handle %s"
" from Mi Temp sensor %s" % (h... | Return the name of the sensor. |
def build_histogram(data, colorscale=None, nbins=10):
if colorscale is None:
colorscale = colorscale_default
colorscale = _colors_to_rgb(colorscale)
h_min, h_max = 0, 1
hist, bin_edges = np.histogram(data, range=(h_min, h_max), bins=nbins)
bin_mids = np.mean(np.array(list(zip(bin_edges, bin_... | Build histogram of data based on values of color_function |
def save_initial_state(self):
paths = self.paths
self.initial_widget = self.get_widget()
self.initial_cursors = {}
for i, editor in enumerate(self.widgets):
if editor is self.initial_widget:
self.initial_path = paths[i]
try:
self.in... | Save initial cursors and initial active widget. |
def othertype(self, othertype):
if othertype is not None:
self._el.set('TYPE', 'OTHER')
self._el.set('OTHERTYPE', othertype) | Set the ``OTHERTYPE`` attribute value. |
def _package_conf_file_to_dir(file_name):
if file_name in SUPPORTED_CONFS:
path = BASE_PATH.format(file_name)
if os.path.exists(path):
if os.path.isdir(path):
return False
else:
os.rename(path, path + '.tmpbak')
os.mkdir(path, 0... | Convert a config file to a config directory. |
def picard_sort(picard, align_bam, sort_order="coordinate",
out_file=None, compression_level=None, pipe=False):
base, ext = os.path.splitext(align_bam)
if out_file is None:
out_file = "%s-sort%s" % (base, ext)
if not file_exists(out_file):
with tx_tmpdir(picard._config) as tm... | Sort a BAM file by coordinates. |
def header2dict(self, names, struct_format, data):
unpacked_data = struct.unpack(struct_format, data)
return dict(zip(names, unpacked_data)) | Unpack the raw received IP and ICMP header information to a dict. |
def remove_variable(self, name):
index = self._get_var_index(name)
self._A = np.delete(self.A, index, 1)
del self.bounds[name]
del self._variables[name]
self._update_variable_indices()
self._reset_solution() | Remove a variable from the problem. |
def _create_handler_directory(self, handler_name, api_version, server_type):
directory = "{}/api/v{}/{}/{}".format(self._directory, api_version, server_type, handler_name)
os.makedirs(directory, exist_ok=True)
with open("{}/api/v{}/{}/{}.rst".format(self._directory, api_version, server_type, han... | Create a directory for the handler and add an index inside |
def get(cls, key):
if isinstance(key, Enum) and not isinstance(key, cls):
raise TypeError("Cannot type cast between enums")
if isinstance(key, int):
if not int(key) in cls._values:
raise KeyError("There is no enum with key %d" % key)
return cls._values... | str, int or Enum => Enum |
def ushort(filename):
import pyfits
f=pyfits.open(filename,mode='update')
f[0].scale('int16','',bzero=32768)
f.flush()
f.close() | Ushort a the pixels |
def room_temperature(self):
tmp = None
tmp2 = None
for user in self.users:
obj = self.users[user]
if obj.current_values['processing']:
if tmp is None:
tmp = obj.current_values['room_temp']
else:
tmp =... | Return room temperature for both sides of bed. |
def BuildTemplate(self,
context=None,
output=None,
fleetspeak_service_config=None):
context = context or []
context.append("Arch:%s" % self.GetArch())
self.platform = platform.system()
context.append("Platform:%s" % self.platform)
context.a... | Find template builder and call it. |
def security(self):
return {k: v for i in self.pdf.resolvedObjects.items() for k, v in i[1].items()} | Print security object information for a pdf document |
def create_tables(self, tables):
cursor = self.get_cursor()
for table in tables:
columns = mslookup_tables[table]
try:
cursor.execute('CREATE TABLE {0}({1})'.format(
table, ', '.join(columns)))
except sqlite3.OperationalError as err... | Creates database tables in sqlite lookup db |
def _serialize_data(self, data):
if self.request and self.request.query_params.get('hydrate_data', False):
serializer = DataSerializer(data, many=True, read_only=True)
serializer.bind('data', self)
return serializer.data
else:
return [d.id for d in data] | Return serialized data or list of ids, depending on `hydrate_data` query param. |
def as_tree(self):
"Return the nodes as a list of lists."
if self._nodes is None:
return [self]
ret = [self]
for node in self._nodes:
ret.append(node.as_tree())
return ret | Return the nodes as a list of lists. |
def _deep_merge_dict(a, b):
for k, v in b.items():
if k in a and isinstance(a[k], dict) and isinstance(v, dict):
_deep_merge_dict(a[k], v)
else:
a[k] = v | Additively merge right side dict into left side dict. |
def load_writer(writer, ppp_config_dir=None, **writer_kwargs):
if ppp_config_dir is None:
ppp_config_dir = get_environ_config_dir()
config_fn = writer + ".yaml" if "." not in writer else writer
config_files = config_search_paths(
os.path.join("writers", config_fn), ppp_config_dir)
writer... | Find and load writer `writer` in the available configuration files. |
def create_migration(self, app, fixture_path):
self.monkey_patch_migration_template(app, fixture_path)
out = StringIO()
management.call_command('makemigrations', app.label, empty=True, stdout=out)
self.restore_migration_template()
self.stdout.write(out.getvalue()) | Create a data migration for app that uses fixture_path. |
def copy(self, target, timeout=500):
if self.metadata and 'encoding' in self.metadata:
with io.open(target,'w', encoding=self.metadata['encoding']) as f:
for line in self:
f.write(line)
else:
with io.open(target,'wb') as f:
for ... | Copy or download this file to a new local file |
def _entryChanged(self, entry):
self.purrer.save()
if entry.tw_item:
number = entry.tw_item._ientry
entry.tw_item = None
self.etw.takeTopLevelItem(number)
if number:
after = self.etw.topLevelItem(number - 1)
else:
... | This is called when a log entry is changed |
def pretty_str(p, decimal_places=2, print_zero=True, label_columns=False):
if len(p.shape) == 1:
return vector_str(p, decimal_places, print_zero)
if len(p.shape) == 2:
return matrix_str(p, decimal_places, print_zero, label_columns)
raise Exception('Invalid array with shape {0}'.format(p.shap... | Pretty-print a matrix or vector. |
def _parse_precinct_size(spcod):
spcod = np.frombuffer(spcod, dtype=np.uint8)
precinct_size = []
for item in spcod:
ep2 = (item & 0xF0) >> 4
ep1 = item & 0x0F
precinct_size.append((2 ** ep1, 2 ** ep2))
return tuple(precinct_size) | Compute precinct size from SPcod or SPcoc. |
def follow_hand(poppy, delta):
right_arm_position = poppy.l_arm_chain.end_effector + delta
poppy.r_arm_chain.goto(right_arm_position, 0.5, wait=True) | Tell the right hand to follow the left hand |
def refresh(self):
if not self._client:
return
current_images = self._client.images()
self.clear()
self._update(current_images)
for image in current_images:
tags = image.get('RepoTags')
if tags:
self.update({tag: image['Id'] for... | Fetches image and their ids from the client. |
def _arguments_repr(self):
document_class_repr = (
'dict' if self.document_class is dict
else repr(self.document_class))
uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(self.uuid_representation,
self.uuid_representation)
... | Representation of the arguments used to create this object. |
def generate_key_data_from_nonce(server_nonce, new_nonce):
server_nonce = server_nonce.to_bytes(16, 'little', signed=True)
new_nonce = new_nonce.to_bytes(32, 'little', signed=True)
hash1 = sha1(new_nonce + server_nonce).digest()
hash2 = sha1(server_nonce + new_nonce).digest()
hash3 = sha1(new_nonce ... | Generates the key data corresponding to the given nonce |
def parse(self):
expr = self.expression()
if not isinstance(self.current_token, EndToken):
raise InvalidTokenError("Unconsumed trailing tokens.")
return expr | Parse the flow of tokens, and return their evaluation. |
def fill(h1: Histogram1D, ax: Axes, **kwargs):
show_stats = kwargs.pop("show_stats", False)
density = kwargs.pop("density", False)
cumulative = kwargs.pop("cumulative", False)
kwargs["label"] = kwargs.get("label", h1.name)
data = get_data(h1, cumulative=cumulative, density=density)
_apply_xy_lim... | Fill plot of 1D histogram. |
def _validate_type(self):
if not isinstance(self._value, self._type):
title = '{} has an invalid type'.format(self._key_name())
description = '{} must be a {}'.format(self._key_name(), self._type.__name__)
self._add_error(title=title, description=description) | Validation to ensure value is the correct type |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.