code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def stop(self):
if self.container_id is None:
raise Exception('No Docker Selenium container was running')
check_call(['docker', 'stop', self.container_id])
self.container_id = None | Stop the Docker container |
def preTranslate(self, tx, ty):
self.e += tx * self.a + ty * self.c
self.f += tx * self.b + ty * self.d
return self | Calculate pre translation and replace current matrix. |
def _resolve_children(self, ldap_user, groups):
for child in self.children:
if isinstance(child, LDAPGroupQuery):
yield child.resolve(ldap_user, groups)
else:
yield groups.is_member_of(child) | Generates the query result for each child. |
def r_hat(self):
_, y_vals, values, colors = self.labels_ticks_and_vals()
for y, value, color in zip(y_vals, values, colors):
if value.ndim != 2 or value.shape[0] < 2:
yield y, None, color
else:
yield y, _get_split_rhat(value), color | Get rhat data for the variable. |
def hasDefault(self, param):
param = self._resolveParam(param)
return param in self._defaultParamMap | Checks whether a param has a default value. |
def gen_bsh(src1, src2, dst):
assert src1.size == src2.size
return ReilBuilder.build(ReilMnemonic.BSH, src1, src2, dst) | Return a BSH instruction. |
def _do_save_as(self, filename):
if len(self.spectrum.x) < 2:
raise RuntimeError("Spectrum must have at least two points")
if os.path.isfile(filename):
os.unlink(filename)
hdu = self.spectrum.to_hdu()
overwrite_fits(hdu, filename) | Saves spectrum back to FITS file. |
def read_uic_image_property(fh):
size = struct.unpack('B', fh.read(1))[0]
name = struct.unpack('%is' % size, fh.read(size))[0][:-1]
flags, prop = struct.unpack('<IB', fh.read(5))
if prop == 1:
value = struct.unpack('II', fh.read(8))
value = value[0] / value[1]
else:
size = st... | Read UIC ImagePropertyEx tag from file and return as dict. |
async def _run_state(self, responder, state, trigger, request) \
-> BaseState:
user_trigger = trigger
try:
if trigger:
await state.handle()
else:
await state.confused()
for i in range(0, settings.MAX_INTERNAL_JUMPS + 1):
... | Execute the state, or if execution fails handle it. |
def extend (name, values):
assert isinstance(name, basestring)
assert is_iterable_typed(values, basestring)
name = add_grist (name)
__validate_feature (name)
feature = __all_features [name]
if feature.implicit:
for v in values:
if v in __implicit_features:
rai... | Adds the given values to the given feature. |
def steadystate(A, max_iter=100):
P = np.linalg.matrix_power(A, max_iter)
v = []
for i in range(len(P)):
if not np.any([np.allclose(P[i], vi, ) for vi in v]):
v.append(P[i])
return normalize(np.sum(v, axis=0)) | Empirically determine the steady state probabilities from a stochastic matrix |
def reconstruct_cdm(dm, absolute_angles, all_points, W=None):
from pylocus.point_set import dmi_from_V, sdm_from_dmi, get_V
from pylocus.mds import signedMDS
N = all_points.shape[0]
V = get_V(absolute_angles, dm)
dmx = dmi_from_V(V, 0)
dmy = dmi_from_V(V, 1)
sdmx = sdm_from_dmi(dmx, N)
s... | Reconstruct point set from angle and distance measurements, using coordinate difference matrices. |
def csv_line_items(self):
if not hasattr(self, '_csv_line_items'):
url = '{}/{}'.format(self.base_url, self.id)
self._csv_line_items = self.harvest._get_element_values(url, self.element_name).next().get('csv-line-items', '')
return self._csv_line_items | Invoices from lists omit csv-line-items |
def user_info(self, kv):
key, value = kv
self.__user_info[key] = value | Sets user_info dict entry through a tuple. |
def create_onvif_service(self, name, from_template=True, portType=None):
name = name.lower()
xaddr, wsdl_file = self.get_definition(name)
with self.services_lock:
svt = self.services_template.get(name)
if svt and from_template and self.use_services_template.get(name):
... | Create ONVIF service client |
def _tempfilepager(generator, cmd, color):
import tempfile
filename = tempfile.mktemp()
text = "".join(generator)
if not color:
text = strip_ansi(text)
encoding = get_best_encoding(sys.stdout)
with open_stream(filename, 'wb')[0] as f:
f.write(text.encode(encoding))
try:
... | Page through text by invoking a program on a temporary file. |
def migrateProvPre010(self, newslab):
did_migrate = self._migrate_db_pre010('prov', newslab)
if not did_migrate:
return
self._migrate_db_pre010('provs', newslab) | Check for any pre-010 provstacks and migrate those to the new slab. |
def data_in_label(intvl_in, dtype_in_time, dtype_in_vert=False):
intvl_lbl = intvl_in
time_lbl = dtype_in_time
lbl = '_'.join(['from', intvl_lbl, time_lbl]).replace('__', '_')
vert_lbl = dtype_in_vert if dtype_in_vert else False
if vert_lbl:
lbl = '_'.join([lbl, vert_lbl]).replace('__', '_')... | Create string label specifying the input data of a calculation. |
def crc7(data):
crc = 0
for c in data:
crc = CRC7_TABLE[crc ^ c]
return crc | Compute CRC of a whole message. |
def picard_formatconverter(picard, align_sam):
out_bam = "%s.bam" % os.path.splitext(align_sam)[0]
if not file_exists(out_bam):
with tx_tmpdir(picard._config) as tmp_dir:
with file_transaction(picard._config, out_bam) as tx_out_bam:
opts = [("INPUT", align_sam),
... | Convert aligned SAM file to BAM format. |
def available_state(self, state: State) -> Tuple[State, ...]:
result = []
for gene in self.genes:
result.extend(self.available_state_for_gene(gene, state))
if len(result) > 1 and state in result:
result.remove(state)
return tuple(result) | Return the state reachable from a given state. |
def path(self):
"Returns the path up to the root for the current node."
if self.parent:
return '.'.join([self.parent.path, str(self.identifier)])
else:
return self.identifier if self.identifier else self.__class__.__name__ | Returns the path up to the root for the current node. |
def lookup(self, tmp):
if tmp < 0 or tmp > self.types_used:
l.debug("Invalid temporary number %d", tmp)
raise IndexError(tmp)
return self.types[tmp] | Return the type of temporary variable `tmp` as an enum string |
def _get_domain_event_detail(event, detail):
event_name = _get_libvirt_enum_string('VIR_DOMAIN_EVENT_', event)
if event_name == 'unknown':
return event_name, 'unknown'
prefix = 'VIR_DOMAIN_EVENT_{0}_'.format(event_name.upper())
detail_name = _get_libvirt_enum_string(prefix, detail)
return ev... | Convert event and detail numeric values into a tuple of human readable strings |
def adaptive_universal_transformer_multilayer_hard():
hparams = adaptive_universal_transformer_multilayer_tpu()
hparams.batch_size = 256
hparams.hard_attention_k = 8
hparams.add_step_timing_signal = True
hparams.self_attention_type = "dot_product_relative_v2"
hparams.max_relative_position = 256
return hpa... | Multi-layer config for adaptive Transformer with hard attention. |
def initialize_ray():
if threading.current_thread().name == "MainThread":
plasma_directory = None
object_store_memory = os.environ.get("MODIN_MEMORY", None)
if os.environ.get("MODIN_OUT_OF_CORE", "False").title() == "True":
from tempfile import gettempdir
plasma_direc... | Initializes ray based on environment variables and internal defaults. |
def proselint(paths=None, version=None, clean=None, debug=None,
output_json=None, time=None, demo=None, compact=None):
if time:
click.echo(timing_test())
return
if debug or clean:
clear_cache()
if demo:
paths = [demo_file]
filepaths = extract_files(list(path... | A CLI for proselint, a linter for prose. |
def reply(self):
reply = []
for f in self.fragments:
if not (f.hidden or f.quoted):
reply.append(f.content)
return '\n'.join(reply) | Captures reply message within email |
def plot(self, ax=None, **kwargs):
ax, fig, plt = get_ax_fig_plt(ax)
yy = [len(v) for v in self.values]
ax.plot(self.binvals, yy, **kwargs)
return fig | Plot the histogram with matplotlib, returns `matplotlib` figure. |
def render_category(slug):
try:
category = EntryCategory.objects.get(slug=slug)
except EntryCategory.DoesNotExist:
pass
else:
return {'category': category}
return {} | Template tag to render a category with all it's entries. |
def execute(self, conn, name='', transaction = False):
binds={}
if name:
op = ('=', 'like')['%' in name]
sql = self.sql + " WHERE pg.physics_group_name %s :physicsgroup" % (op)
binds = {"physicsgroup": name}
else:
sql = self.sql
self.logge... | returns id for a given physics group name |
def create_hparams(hparams_set,
hparams_overrides_str="",
data_dir=None,
problem_name=None,
hparams_path=None):
hparams = registry.hparams(hparams_set)
if hparams_path and tf.gfile.Exists(hparams_path):
hparams = create_hparams_from_jso... | Create HParams with data_dir and problem hparams, if kwargs provided. |
def requiredGPU_MB(self, n):
from darknet.core import darknet_with_cuda
if (darknet_with_cuda()):
free = getFreeGPU_MB()
print("Yolo: requiredGPU_MB: required, free", n, free)
if (free == -1):
return True
return (free>=n)
else:
... | Required GPU memory in MBytes |
def setupEmptyTree(self):
if hasattr(self, "tree"):
del self.tree
self.tree = QtWidgets.QTreeWidget(self)
self.tree.header().close() | Setup empty Tree at startup. |
def add_response_headers(self, headers, **overrides):
response_headers = self.route.get('response_headers', {}).copy()
response_headers.update(headers)
return self.where(response_headers=response_headers, **overrides) | Adds the specified response headers while keeping existing ones in-tact |
def plot_knee(self, ):
import matplotlib.pyplot as plt
plt.figure(figsize=(8, 8))
plt.plot(self.x, self.y)
plt.vlines(self.knee, plt.ylim()[0], plt.ylim()[1]) | Plot the curve and the knee, if it exists |
def less_strict_bool(x):
if x is None:
return False
elif x is True or x is False:
return x
else:
return strict_bool(x) | Idempotent and None-safe version of strict_bool. |
def forward(self, is_train=False):
for texec in self.train_execs:
texec.forward(is_train=is_train) | Perform a forward pass on each executor. |
def recall_series(y_true, y_score, k=None, value=True):
y_true, y_score = to_float(y_true, y_score)
top = _argsort(y_score, k)
if not value:
y_true = 1-y_true
a = np.nan_to_num(y_true[top]).cumsum()
return pd.Series(a, index=np.arange(1, len(a)+1)) | Returns series of length k whose i-th entry is the recall in the top i |
def write_record(self, event_str):
header = struct.pack('Q', len(event_str))
header += struct.pack('I', masked_crc32c(header))
footer = struct.pack('I', masked_crc32c(event_str))
self._writer.write(header + event_str + footer) | Writes a serialized event to file. |
def activate_language(instances, language):
language = (
language if language in get_supported_languages() else get_fallback_language()
)
for instance in instances:
instance.activate_language(language) | Activates the given language for the given instances. |
def _clear_namespace():
ok_names = set(default_backend.__dict__)
ok_names.update(['gl2', 'glplus'])
NS = globals()
for name in list(NS.keys()):
if name.lower().startswith('gl'):
if name not in ok_names:
del NS[name] | Clear names that are not part of the strict ES API |
def tax_ids(self):
fetch = select([self.nodes.c.tax_id]).execute().fetchall()
ids = [t[0] for t in fetch]
return ids | Return all tax_ids in node table |
def potential_purviews(self, direction, mechanism, purviews=False):
all_purviews = utils.powerset(self.node_indices)
return irreducible_purviews(
self.cm, direction, mechanism, all_purviews) | Override Subsystem implementation using Network-level indices. |
def raster_to_shape(raster):
left = raster.bounds.left
right = raster.bounds.right
top = raster.bounds.top
bottom = raster.bounds.bottom
top_left = (left, top)
top_right = (right, top)
bottom_left = (left, bottom)
bottom_right = (right, bottom)
return Polygon((
top_left, top_... | Take a raster and return a polygon representing the outer edge. |
def _factln(num):
if num < 20:
log_factorial = log(factorial(num))
else:
log_factorial = num * log(num) - num + log(num * (1 + 4 * num * (
1 + 2 * num))) / 6.0 + log(pi) / 2
return log_factorial | Computes logfactorial regularly for tractable numbers, uses Ramanujans approximation otherwise. |
def _split_index(params):
if isinstance(params, list):
return [params[0], _split_index(params[1])]
elif isinstance(params, dict):
if INDEX in params.keys():
return _split_index(params[VALUE])
result = dict()
for key in params:
result[key] = _split_index(pa... | Delete index infromation from params |
def encode_simple(d):
if isinstance(d, unicode):
return d.encode()
if isinstance(d, list):
return list(map(encode_simple, d))
if isinstance(d, dict):
return dict([(encode_simple(k), encode_simple(v)) for k, v in d.items()])
return d | Encode strings in basic python objects. |
def all_bases (type):
assert isinstance(type, basestring)
result = []
while type:
result.append (type)
type = __types [type]['base']
return result | Returns type and all of its bases, in the order of their distance from type. |
def signature(self):
mods = ", ".join(self.modifiers)
return "{} SUBROUTINE {}({})".format(mods, self.name,
self.parameters_as_string()) | Returns the signature definition for the subroutine. |
def _iterative_matches(self, nodes):
nodelen = len(nodes)
if 0 >= self.min:
yield 0, {}
results = []
for alt in self.content:
for c, r in generate_matches(alt, nodes):
yield c, r
results.append((c, r))
while results:
... | Helper to iteratively yield the matches. |
def timing(name=''):
start = datetime.datetime.now()
timestamp = start.strftime('%H:%M')
tf.logging.info('Starting job [%s] at %s', name, timestamp)
yield
end = datetime.datetime.now()
timestamp = end.strftime('%H:%M')
tf.logging.info('Finished job [%s] at %s', name, timestamp)
duration = end - start
... | Log start, end, and duration. |
def colors_to_dict(colors, img):
return {
"wallpaper": img,
"alpha": util.Color.alpha_num,
"special": {
"background": colors[0],
"foreground": colors[15],
"cursor": colors[15]
},
"colors": {
"color0": colors[0],
"col... | Convert list of colors to pywal format. |
def draw_graph(G: nx.DiGraph, filename: str):
A = to_agraph(G)
A.graph_attr["rankdir"] = "LR"
A.draw(filename, prog="dot") | Draw a networkx graph with Pygraphviz. |
def failover_limitation(self):
if not self.reachable:
return 'not reachable'
if self.tags.get('nofailover', False):
return 'not allowed to promote'
if self.watchdog_failed:
return 'not watchdog capable'
return None | Returns reason why this node can't promote or None if everything is ok. |
def contains_is_html(cls, data):
for key, val in data.items():
if isinstance(key, str) and key.endswith("IsHTML"):
return True
if isinstance(val, (OrderedDict, dict)) and cls.contains_is_html(val):
return True
return False | Detect if the problem has at least one "xyzIsHTML" key |
def _SnakeCaseToCamelCase(path_name):
result = []
after_underscore = False
for c in path_name:
if c.isupper():
raise Error('Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
... | Converts a path name from snake_case to camelCase. |
def visit_ifexp(self, node):
return "%s if %s else %s" % (
self._precedence_parens(node, node.body, is_left=True),
self._precedence_parens(node, node.test, is_left=True),
self._precedence_parens(node, node.orelse, is_left=False),
) | return an astroid.IfExp node as string |
def getall(self, key, default=_marker):
identity = self._title(key)
res = [v for i, k, v in self._impl._items if i == identity]
if res:
return res
if not res and default is not _marker:
return default
raise KeyError('Key not found: %r' % key) | Return a list of all values matching the key. |
def gf_poly_mul_simple(p, q):
r = bytearray(len(p) + len(q) - 1)
for j in xrange(len(q)):
for i in xrange(len(p)):
r[i + j] ^= gf_mul(p[i], q[j])
return r | Multiply two polynomials, inside Galois Field |
def log_level_type(arg):
if not arg.upper() in ('NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'):
raise argparse.ArgumentTypeError("{0} is not a valid log level".format(repr(arg)))
return getattr(logging, arg.upper()) | An argparse type representing a logging level. |
def _compute_product(map1, map2):
data = map1.data * map2.data
return HpxMap(data, map1.hpx) | Make a map that is the product of two maps |
def parse_swf (url_data):
linkfinder = linkparse.swf_url_re.finditer
for mo in linkfinder(url_data.get_content()):
url = mo.group()
url_data.add_url(url) | Parse a SWF file for URLs. |
def print_subtree(self, fobj=sys.stdout, level=0):
fobj.write("{}{!r}\n".format(" " * (level * 2), self))
for child in self.get_children():
child.print_subtree(fobj, level + 1) | Print this group node and the subtree rooted at it |
def addFileAnnot(self, point, buffer, filename, ufilename=None, desc=None):
CheckParent(self)
val = _fitz.Page_addFileAnnot(self, point, buffer, filename, ufilename, desc)
if not val: return
val.thisown = True
val.parent = weakref.proxy(self)
self._annot_refs[id(val)] = v... | Add a 'FileAttachment' annotation at location 'point'. |
def dirBrowser(self):
if capable.OF_TKFD_IN_EPAR:
fname = askdirectory(parent=self.entry, title="Select Directory")
else:
raise NotImplementedError('Fix popupChoices() logic.')
if not fname:
return
self.choice.set(fname)
self.lastSelection = No... | Invoke a tkinter directory dialog |
def _service_by_name(name):
services = _available_services()
name = name.lower()
if name in services:
return services[name]
for service in six.itervalues(services):
if service['file_path'].lower() == name:
return service
basename, ext = os.path.splitext(service['filen... | Return the service info for a service by label, filename or path |
def WritePathInfos(self, client_id, path_infos):
try:
self._MultiWritePathInfos({client_id: path_infos})
except MySQLdb.IntegrityError as error:
raise db.UnknownClientError(client_id=client_id, cause=error) | Writes a collection of path_info records for a client. |
def volumes_delete(storage_pool, logger):
try:
for vol_name in storage_pool.listVolumes():
try:
vol = storage_pool.storageVolLookupByName(vol_name)
vol.delete(0)
except libvirt.libvirtError:
logger.exception(
"Unable... | Deletes all storage volume disks contained in the given storage pool. |
def _step(self, theme, direction):
if not self.themes:
self.reload()
key = (theme.source, theme.name)
for i, val in enumerate(self.themes):
if (val.source, val.name) == key:
index = i
break
else:
self.themes.insert(0, th... | Traverse the list in the given direction and return the next theme |
def remove_empty_dirs(path):
for root, dirs, files in os.walk(path):
for d in dirs:
dir_path = os.path.join(root, d)
if not os.listdir(dir_path):
os.rmdir(dir_path) | removes empty dirs under a given path |
def _showError(self, msg="", title="Error"):
self.errorWidget.setError(msg=msg, title=title) | Shows an error message. |
async def make_default_options_response(self) -> Response:
methods = _request_ctx_stack.top.url_adapter.allowed_methods()
return self.response_class('', headers={'Allow': ', '.join(methods)}) | This is the default route function for OPTIONS requests. |
def pack(self):
diag = (self.ver << 5) + self.diag
flags = (self.state << 6) + self.flags
length = len(self)
return struct.pack(self._PACK_STR, diag, flags, self.detect_mult,
length, self.my_discr, self.your_discr,
self.desired_min_tx... | Encode a BFD Control packet without authentication section. |
def announce(self, number):
self.client.publish(self.keys.internal, self.keys.key(number))
self.message('{} granted'.format(number)) | Announce an indicator change on both channels. |
def _bsecurate_cli_make_graph_file(args):
curate.make_graph_file(args.basis, args.outfile, args.render, args.version, args.data_dir)
return '' | Handles the make-graph-file subcommand |
def our_IsUsableForDesktopGUI(m):
if guess_bitDepth(Q.CGDisplayModeCopyPixelEncoding(m)) != 24:
return False
if Q.CGDisplayModeGetWidth(m) < 640:
return False
if Q.CGDisplayModeGetHeight(m) < 480:
return False
return True | A more leniant version of CGDisplayModeIsUsableForDesktopGUI |
def print_stats(img):
stat = ImageStat.Stat(img)
print("extrema : ", stat.extrema)
print("count : ", stat.count)
print("sum : ", stat.sum)
print("sum2 : ", stat.sum2)
print("mean : ", stat.mean)
print("median : ", stat.median)
print("rms : ", stat.rm... | prints stats, remember that img should already have been loaded |
def H(self) -> 'Kraus':
operators = [op.H for op in self.operators]
return Kraus(operators, self.weights) | Return the complex conjugate of this Kraus operation |
def bind_model_cls(self, model_cls):
if self.model_cls:
raise errors.Error('"{0}" has been already bound to "{1}" and '
'could not be rebound to "{2}"'.format(
self, self.model_cls, model_cls))
self.model_cls = model_cls
... | Bind field to model class. |
def invalidate(self):
if self._invalidated:
return
else:
self._invalidated = True
self.on_invalidate.fire()
if self.eventloop is not None:
def redraw():
self._invalidated = False
self._redraw()
if self.max_re... | Thread safe way of sending a repaint trigger to the input event loop. |
def delete_all_versions(self, model_name, obj_pk):
if self.cache:
for version in self.versions:
key = self.key_for(version, model_name, obj_pk)
self.cache.delete(key) | Delete all versions of a cached instance. |
def print_preview(self):
from qtpy.QtPrintSupport import QPrintPreviewDialog
editor = self.get_current_editor()
printer = Printer(mode=QPrinter.HighResolution,
header_font=self.get_plugin_font('printer_header'))
preview = QPrintPreviewDialog(printer, self)
... | Print preview for current file |
def parse_volumedetect_stats(self):
logger.info(
"Running first pass volumedetect filter for stream {}".format(self.stream_id)
)
filter_str = '[0:{}]volumedetect'.format(self.stream_id)
cmd = [
self.media_file.ffmpeg_normalize.ffmpeg_exe, '-nostdin', '-y',
... | Use ffmpeg with volumedetect filter to get the mean volume of the input file. |
def PauseHunt(hunt_id, reason=None):
hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
if hunt_obj.hunt_state != hunt_obj.HuntState.STARTED:
raise OnlyStartedHuntCanBePausedError(hunt_obj)
data_store.REL_DB.UpdateHuntObject(
hunt_id, hunt_state=hunt_obj.HuntState.PAUSED, hunt_state_comment=reason)
... | Pauses a hunt with a given id. |
def visit_Str(self, node: AST, dfltChaining: bool = True) -> str:
return repr(node.s) | Return `node`s string representation. |
def reset(self):
for index in range(self.counts_len):
self.counts[index] = 0
self.total_count = 0
self.min_value = sys.maxsize
self.max_value = 0
self.start_time_stamp_msec = sys.maxsize
self.end_time_stamp_msec = 0 | Reset the histogram to a pristine state |
def create_default_config():
config = configparser.RawConfigParser()
config.add_section('global')
config.set('global', 'env_source_rc', False)
config.add_section('shell')
config.set('shell', 'bash', "true")
config.set('shell', 'zsh', "true")
config.set('shell', 'gui', "true")
return conf... | Create a default configuration object, with all parameters filled |
def append_string(t, string):
node = t.tree
if string:
if len(node) == 0:
if node.text is not None:
node.text += string
else:
node.text = string
else:
child = list(node)[-1]
if child.tail is not None:
... | Append a string to a node, as text or tail of last child. |
def do(self, command, files=None, use_long_polling=False, request_timeout=None, **query):
url, params = self._prepare_request(command, query)
return {
"url": url, "params": params, "files": files, "stream": use_long_polling,
"verify": True,
"timeout": request_timeout
... | Return the request params we would send to the api. |
def allowed_transitions():
try:
sdp_state = SDPState()
return sdp_state.allowed_target_states[sdp_state.current_state]
except KeyError:
LOG.error("Key Error")
return dict(state="KeyError", reason="KeyError") | Get target states allowed for the current state. |
def _build_menu(self, context_menu: QMenu):
logger.debug("Show tray icon enabled in settings: {}".format(cm.ConfigManager.SETTINGS[cm.SHOW_TRAY_ICON]))
self._fill_context_menu_with_model_item_actions(context_menu)
context_menu.addAction(self.action_view_script_error)
context_menu.addActi... | Build the context menu. |
def build(self, builder):
params = dict(ODMVersion="1.3",
FileType=self.filetype,
CreationDateTime=self.creationdatetime,
Originator=self.originator,
FileOID=self.fileoid,
xmlns="http://www.cdisc.org/ns... | Build XML object, return the root, this is a copy for consistency and testing |
def create_rbd_image(service, pool, image, sizemb):
cmd = ['rbd', 'create', image, '--size', str(sizemb), '--id', service,
'--pool', pool]
check_call(cmd) | Create a new RADOS block device. |
def record_received(self, msg):
release_lock = False
userdata = msg.userdata
rec = ALDBRecord.create_from_userdata(userdata)
self._records[rec.mem_addr] = rec
_LOGGER.debug('ALDB Record: %s', rec)
rec_count = self._load_action.rec_count
if rec_count == 1 or self._... | Handle ALDB record received from device. |
def jsonhash(obj, root=True, exclude=None, hash_func=_jsonhash_sha1):
if isinstance(obj, Mapping):
if root and exclude:
obj = {k: v for k, v in obj.iteritems() if k not in exclude}
result = sorted(
(k, jsonhash(v, False)) for k, v in obj.iteritems())
elif isinstance(obj, ... | calculate the objects hash based on all field values |
def to_header(self):
result = []
for value, quality in self:
if quality != 1:
value = '%s;q=%s' % (value, quality)
result.append(value)
return ','.join(result) | Convert the header set into an HTTP header string. |
def _inherited_row(row, base_rows_from_pillar, ret):
base_rows = []
for base_row_from_pillar in base_rows_from_pillar:
base_row = __salt__['pillar.get'](base_row_from_pillar)
if base_row:
base_rows.append(base_row)
elif base_row_from_pillar != _DEFAULT_ROW_PILLAR:
... | Return a row with properties from parents. |
def _clear_expired_zones(self):
zones = []
for z in list(self._zones.keys()):
zones += [z]
for z in zones:
if self._zones[z].status != Zone.CLEAR and self._zone_expired(z):
self._update_zone(z, Zone.CLEAR) | Update zone status for all expired zones. |
def DomainFactory(domain_name, cmds):
klass = type(str(domain_name), (BaseDomain,), {})
for c in cmds:
command = get_command(domain_name, c['name'])
setattr(klass, c['name'], classmethod(command))
return klass | Dynamically create Domain class and set it's methods. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.