code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def add_log_entry(self, process_name, timeperiod, msg):
tree = self.get_tree(process_name)
node = tree.get_node(process_name, timeperiod)
node.add_log_entry([datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), msg]) | adds a non-persistent log entry to the tree node |
def find_node_api_version(self, node_pyxb):
max_major = 0
for s in node_pyxb.services.service:
max_major = max(max_major, int(s.version[1:]))
return max_major | Find the highest API major version supported by node. |
def remove(self, item):
'Remove first occurence of item. Raise ValueError if not found'
i = self.index(item)
del self._keys[i]
del self._items[i] | Remove first occurence of item. Raise ValueError if not found |
def _save_image(image, filename, return_img=None):
if not image.size:
raise Exception('Empty image. Have you run plot() first?')
if isinstance(filename, str):
if isinstance(vtki.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(vtki.FIGURE_PATH, filename)
if not return_img:
return imageio.imwrite(filename, image)
imageio.imwrite(filename, image)
return image | Internal helper for saving a NumPy image array |
def assert_is_not(expected, actual, message=None, extra=None):
assert expected is not actual, _assert_fail_message(
message, expected, actual, "is", extra
) | Raises an AssertionError if expected is actual. |
def _initialize_session():
sys_random = random.SystemRandom()
_thread_local_storage.secret_key = b''.join(
int2byte(sys_random.randint(0, 255))
for _ in range(SECRET_KEY_BITS // 8)) | Generate a new session key and store it to thread local storage. |
def white(cls):
"Make the text foreground color white."
wAttributes = cls._get_text_attributes()
wAttributes &= ~win32.FOREGROUND_MASK
wAttributes |= win32.FOREGROUND_GREY
cls._set_text_attributes(wAttributes) | Make the text foreground color white. |
def create_from_dictionary(self, datas):
configuration = ObjectConfiguration()
if "uri" in datas:
configuration.uri = str(datas["uri"])
if "title" in datas:
configuration.title = str(datas["title"])
if "description" in datas:
configuration.description = str(datas["description"])
return configuration | Return a populated object Configuration from dictionnary datas |
def _GetArgsDescription(self, args_type):
args = {}
if args_type:
for type_descriptor in args_type.type_infos:
if not type_descriptor.hidden:
args[type_descriptor.name] = {
"description": type_descriptor.description,
"default": type_descriptor.default,
"type": "",
}
if type_descriptor.type:
args[type_descriptor.name]["type"] = type_descriptor.type.__name__
return args | Get a simplified description of the args_type for a flow. |
def genes(self):
return [ExpGene.from_series(g)
for i, g in self.reset_index().iterrows()] | Return a list of all genes. |
def meta_enter_message(python_input):
def get_text_fragments():
return [('class:accept-message', ' [Meta+Enter] Execute ')]
def extra_condition():
" Only show when... "
b = python_input.default_buffer
return (
python_input.show_meta_enter_message and
(not b.document.is_cursor_at_the_end or
python_input.accept_input_on_enter is None) and
'\n' in b.text)
visible = ~is_done & has_focus(DEFAULT_BUFFER) & Condition(extra_condition)
return ConditionalContainer(
content=Window(FormattedTextControl(get_text_fragments)),
filter=visible) | Create the `Layout` for the 'Meta+Enter` message. |
def _get_tool_dict(self, tool_name):
tool = getattr(self, tool_name)
standard_attrs, custom_attrs = self._get_button_attrs(tool)
return dict(
name=tool_name,
label=getattr(tool, 'label', tool_name),
standard_attrs=standard_attrs,
custom_attrs=custom_attrs,
) | Represents the tool as a dict with extra meta. |
def db_type(cls, dtype):
if isinstance(dtype, ExtensionDtype):
return cls.type_map.get(dtype.kind)
elif hasattr(dtype, 'char'):
return cls.type_map.get(dtype.char) | Given a numpy dtype, Returns a generic database type |
def cycle_windows(tree, direction):
wanted = {
"orientation": ("vertical" if direction in ("up", "down")
else "horizontal"),
"direction": (1 if direction in ("down", "right")
else -1),
}
split = find_parent_split(tree.focused.parent, wanted["orientation"])
if split:
child_ids = [child.id for child in split.children]
focus_idx = child_ids.index(split.focused_child.id)
next_idx = (focus_idx + wanted['direction']) % len(child_ids)
next_node = split.children[next_idx]
return find_focusable(next_node)
return None | Cycle through windows of the current workspace |
def function(self, fun, *args, **kwargs):
func = self.sminion.functions[fun]
args, kwargs = salt.minion.load_args_and_kwargs(
func,
salt.utils.args.parse_input(args, kwargs=kwargs),)
return func(*args, **kwargs) | Call a single salt function |
def draw_widget(self, item):
if item:
self.filter_remove(remember=True)
selected_id = self.treedata[item]['id']
item = self.get_toplevel_parent(item)
widget_id = self.treedata[item]['id']
wclass = self.treedata[item]['class']
xmlnode = self.tree_node_to_xml('', item)
self.previewer.draw(item, widget_id, xmlnode, wclass)
self.previewer.show_selected(item, selected_id)
self.filter_restore() | Create a preview of the selected treeview item |
def deleteMask(self,signature):
if signature in self.masklist:
self.masklist[signature] = None
else:
log.warning("No matching mask") | Delete just the mask that matches the signature given. |
def pid(self):
pf = self.path('cmd.pid')
if not os.path.exists(pf):
return None
with open(pf, 'r') as f:
return int(f.read()) | The integer PID of the subprocess or None. |
def filterGraph(graph, node_fnc):
nodes = filter(lambda l: node_fnc(l), graph.nodes())
edges = {}
gedges = graph.edges()
for u in gedges:
if u not in nodes:
continue
for v in gedges[u]:
if v not in nodes:
continue
try:
edges[u].append(v)
except KeyError:
edges[u] = [v]
return Graph(nodes, edges) | Remove all nodes for with node_fnc does not hold |
def add_filter(self, component, filter_group="pyxley-filter"):
if getattr(component, "name") != "Filter":
raise Exception("Component is not an instance of Filter")
if filter_group not in self.filters:
self.filters[filter_group] = []
self.filters[filter_group].append(component) | Add a filter to the layout. |
def _get_training_data(vrn_files):
out = {"SNP": [], "INDEL": []}
for name, train_info in [("train_hapmap", "known=false,training=true,truth=true,prior=15.0"),
("train_omni", "known=false,training=true,truth=true,prior=12.0"),
("train_1000g", "known=false,training=true,truth=false,prior=10.0"),
("dbsnp", "known=true,training=false,truth=false,prior=2.0")]:
if name not in vrn_files:
return {}
else:
out["SNP"].append((name.replace("train_", ""), train_info, vrn_files[name]))
if "train_indels" in vrn_files:
out["INDEL"].append(("mills", "known=true,training=true,truth=true,prior=12.0",
vrn_files["train_indels"]))
else:
return {}
return out | Retrieve training data, returning an empty set of information if not available. |
def dates(self):
dates = []
d = self.date_a
while d < self.date_b:
dates.append(d)
d += datetime.timedelta(1)
return dates | Returns a list of dates in this date interval. |
def should_cache(self, request, response):
if not getattr(request, '_cache_update_cache', False):
return False
if not response.status_code in getattr(settings, 'BETTERCACHE_CACHEABLE_STATUS', CACHEABLE_STATUS):
return False
if getattr(settings, 'BETTERCACHE_ANONYMOUS_ONLY', False) and self.session_accessed and request.user.is_authenticated:
return False
if self.has_uncacheable_headers(response):
return False
return True | Given the request and response should it be cached |
def chunks(f, chunk_size=None):
if not chunk_size:
chunk_size = 64 * 2 ** 10
if hasattr(f, "seek"):
f.seek(0)
while True:
data = f.read(chunk_size)
if not data:
break
yield data | Read the file and yield chucks of ``chunk_size`` bytes. |
def wrap_inner(node, tag):
children = list(node.childNodes)
wrap_node = node.ownerDocument.createElement(tag)
for c in children:
wrap_node.appendChild(c)
node.appendChild(wrap_node) | Wrap the given tag around the contents of a node. |
def _add_handler(logger, handler=None, loglevel=None):
handler.setLevel(loglevel or DEFAULT_LOGLEVEL)
if handler.level <= logging.DEBUG:
_fmt = '%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/' \
'%(lineno)04d@%(module)-10.9s| %(message)s'
handler.setFormatter(logging.Formatter(_fmt))
else:
handler.setFormatter(logging.Formatter(
'%(asctime)s| %(levelname)-8s| %(message)s'
))
logger.addHandler(handler) | Add a handler to an existing logging.Logger object |
def channel(layer, n_channel, batch=None):
if batch is None:
return lambda T: tf.reduce_mean(T(layer)[..., n_channel])
else:
return lambda T: tf.reduce_mean(T(layer)[batch, ..., n_channel]) | Visualize a single channel |
def authenticate(self) -> bool:
with IHCController._mutex:
if not self.client.authenticate(self._username, self._password):
return False
if self._ihcevents:
self.client.enable_runtime_notifications(
self._ihcevents.keys())
return True | Authenticate and enable the registered notifications |
def _update_data(self, name, value, timestamp, interval, config, conn):
i_time = config['i_calc'].to_bucket(timestamp)
if not config['coarse']:
r_time = config['r_calc'].to_bucket(timestamp)
else:
r_time = None
stmt = self._table.update().where(
and_(
self._table.c.name==name,
self._table.c.interval==interval,
self._table.c.i_time==i_time,
self._table.c.r_time==r_time)
).values({self._table.c.value: value})
rval = conn.execute( stmt )
return rval.rowcount | Support function for insert. Should be called within a transaction |
def data(self,data):
assert type(data) is np.ndarray
assert data.shape[1] == self.nCols
for i in range(self.nCols):
self.colData[i]=data[:,i].tolist() | Given a 2D numpy array, fill colData with it. |
def parse(data):
reader = io.BytesIO(data)
headers = []
while reader.tell() < len(data):
h = Header()
h.tag = int.from_bytes(reader.read(2), byteorder='big', signed=False)
h.taglen = int.from_bytes(reader.read(2), byteorder='big', signed=False)
h.tagdata = reader.read(h.taglen)
headers.append(h)
return headers | returns a list of header tags |
def buy_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None):
data = {'amount': amount, 'price': price}
if limit_price is not None:
data['limit_price'] = limit_price
url = self._construct_url("buy/", base, quote)
return self._post(url, data=data, return_json=True, version=2) | Order to buy amount of bitcoins for specified price. |
async def install_mediaroom_protocol(responses_callback, box_ip=None):
from . import version
_LOGGER.debug(version)
loop = asyncio.get_event_loop()
mediaroom_protocol = MediaroomProtocol(responses_callback, box_ip)
sock = create_socket()
await loop.create_datagram_endpoint(lambda: mediaroom_protocol, sock=sock)
return mediaroom_protocol | Install an asyncio protocol to process NOTIFY messages. |
def send_by_packet(self, data):
total_sent = 0
while total_sent < PACKET_SIZE:
sent = self.sock.send(data[total_sent:])
if sent == 0:
raise RuntimeError("socket connection broken")
total_sent += sent
return total_sent | Send data by packet on socket |
def c_getprocs(self):
getprocs = []
for name, dummy_args in self.funcs:
if name == 'GetProcAddress':
if callable(self.opts.hash_func):
continue
getter = 'get_proc_by_string'
elif self.opts.no_structs:
getter = 'GetProcAddress'
else:
getter = 'windll->kernel32.GetProcAddress'
if callable(self.opts.hash_func):
getter = 'get_proc_by_hash'
if self.opts.no_structs:
var = name
else:
var = 'windll->{}.{}'.format(self.name, name)
getproc = '{} = ({} *){}({}, {}{});\n'.format(
var,
self._c_type_name(name),
getter,
self._c_base_var(),
self.opts.prefix, name
)
getprocs.append(getproc + self._c_null_check(var))
return getprocs | Get the getprocs of the module. |
def indices_to_labels(self, indices: Sequence[int]) -> List[str]:
return [(self.INDEX_TO_LABEL[index]) for index in indices] | Converts a sequence of indices into their corresponding labels. |
def accepts(self, package):
return (
self._name == package.name
and self._constraint.allows(package.version)
and (not package.is_prerelease() or self.allows_prereleases())
) | Determines if the given package matches this dependency. |
async def main():
async with aiohttp.ClientSession() as session:
data = Luftdaten(SENSOR_ID, loop, session)
await data.get_data()
if not await data.validate_sensor():
print("Station is not available:", data.sensor_id)
return
if data.values and data.meta:
print("Sensor values:", data.values)
print("Location:", data.meta['latitude'], data.meta['longitude']) | Sample code to retrieve the data. |
def capfirst(x):
x = to_string(x).strip()
if x:
return x[0].upper() + x[1:].lower()
else:
return x | Capitalise the first letter of ``x``. |
def from_cli(cls, opt):
injection_file = opt.injection_file
chirp_time_window = \
opt.injection_filter_rejector_chirp_time_window
match_threshold = opt.injection_filter_rejector_match_threshold
coarsematch_deltaf = opt.injection_filter_rejector_coarsematch_deltaf
coarsematch_fmax = opt.injection_filter_rejector_coarsematch_fmax
seg_buffer = opt.injection_filter_rejector_seg_buffer
if opt.injection_filter_rejector_f_lower is not None:
f_lower = opt.injection_filter_rejector_f_lower
else:
f_lower = opt.low_frequency_cutoff
return cls(injection_file, chirp_time_window, match_threshold,
f_lower, coarsematch_deltaf=coarsematch_deltaf,
coarsematch_fmax=coarsematch_fmax,
seg_buffer=seg_buffer) | Create an InjFilterRejector instance from command-line options. |
def read_registry(self):
lm = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
want_scan = False
try:
try:
tcp_params = _winreg.OpenKey(lm,
r'SYSTEM\CurrentControlSet'
r'\Services\Tcpip\Parameters')
want_scan = True
except EnvironmentError:
tcp_params = _winreg.OpenKey(lm,
r'SYSTEM\CurrentControlSet'
r'\Services\VxD\MSTCP')
try:
self._config_win32_fromkey(tcp_params)
finally:
tcp_params.Close()
if want_scan:
interfaces = _winreg.OpenKey(lm,
r'SYSTEM\CurrentControlSet'
r'\Services\Tcpip\Parameters'
r'\Interfaces')
try:
i = 0
while True:
try:
guid = _winreg.EnumKey(interfaces, i)
i += 1
key = _winreg.OpenKey(interfaces, guid)
if not self._win32_is_nic_enabled(lm, guid, key):
continue
try:
self._config_win32_fromkey(key)
finally:
key.Close()
except EnvironmentError:
break
finally:
interfaces.Close()
finally:
lm.Close() | Extract resolver configuration from the Windows registry. |
async def _inform_watchdog(self):
async with self._wd_lock:
if self._watchdog_task is None:
return
self._watchdog_task.cancel()
try:
await self._watchdog_task
except asyncio.CancelledError:
self._watchdog_task = self.loop.create_task(self._watchdog(
self._watchdog_timeout)) | Inform the watchdog of activity. |
def _json_request(self, method, path, data):
response = self._request(method, path, data=data)
_raise_for_status(response)
if len(response.data) > 0:
return _json_from_response(response)
return response.data | Issue request against the crate HTTP API. |
def Search(self,key):
results = []
for template in self.templates:
if template.id.lower().find(key.lower()) != -1: results.append(template)
elif template.name.lower().find(key.lower()) != -1: results.append(template)
return(results) | Search template list by providing partial name, ID, or other key. |
def create_label(self, name, justify=Gtk.Justification.CENTER, wrap_mode=True, tooltip=None):
label = Gtk.Label()
name = name.replace('|', '\n')
label.set_markup(name)
label.set_justify(justify)
label.set_line_wrap(wrap_mode)
if tooltip is not None:
label.set_has_tooltip(True)
label.connect("query-tooltip", self.parent.tooltip_queries, tooltip)
return label | The function is used for creating lable with HTML text |
def files(self, data):
current_files = self.files
if current_files:
raise RuntimeError('Can not update existing files.')
for key in data:
current_files[key] = data[key] | Set files from data. |
def create(cls):
if cls not in cls._instances:
cls._instances[cls] = cls()
return cls._instances[cls] | Return always the same instance of the backend class |
def identical(self, other):
try:
return (utils.dict_equiv(self.attrs, other.attrs) and
self.equals(other))
except (TypeError, AttributeError):
return False | Like equals, but also checks attributes. |
def split32(data):
all_pieces = []
for position in range(0, len(data), 32):
piece = data[position:position + 32]
all_pieces.append(piece)
return all_pieces | Split data into pieces of 32 bytes. |
def fetch(self):
length = np.random.randint(1, self._curr_length + 1)
nesting = np.random.randint(1, self._curr_nesting + 1)
return length, nesting | Samples up to current difficulty. |
def content(self):
if isinstance(self.__report_file, io.StringIO):
return self.__report_file.getvalue()
else:
return '' | Return report content as a string if mode == STRINGIO else an empty string |
def parseJSON(self, f):
try:
parsed_json = json.load(f['f'])
except Exception as e:
print(e)
log.warn("Could not parse DamageProfiler JSON: '{}'".format(f['fn']))
return None
s_name = self.clean_s_name(parsed_json['metadata']['sample_name'],'')
self.add_data_source(f, s_name)
self.threepGtoAfreq_data[s_name] = parsed_json['dmg_3p']
self.fivepCtoTfreq_data[s_name] = parsed_json['dmg_5p']
self.lgdist_fw_data[s_name] = parsed_json['lendist_fw']
self.lgdist_rv_data[s_name] = parsed_json['lendist_rv']
self.summary_metrics_data[s_name] = parsed_json['summary_stats'] | Parse the JSON output from DamageProfiler and save the summary statistics |
def active_pt_window(self):
" The active prompt_toolkit layout Window. "
if self.active_tab:
w = self.active_tab.active_window
if w:
return w.pt_window | The active prompt_toolkit layout Window. |
def _get_translation_for_locale(self, locale):
translations = None
for dirname in self.paths:
catalog = Translations.load(dirname, [locale], domain=self.domain)
if translations is None:
if isinstance(catalog, Translations):
translations = catalog
continue
try:
translations.merge(catalog)
except AttributeError:
if isinstance(catalog, NullTranslations):
current_app.logger.debug(
'Compiled translations seems to be missing'
' in {0}.'.format(dirname))
continue
raise
return translations or NullTranslations() | Get translation for a specific locale. |
def _RemoveCompletedPathspec(self, index):
pathspec = self.state.indexed_pathspecs[index]
request_data = self.state.request_data_list[index]
self.state.indexed_pathspecs[index] = None
self.state.request_data_list[index] = None
self.state.pending_hashes.pop(index, None)
self.state.pending_files.pop(index, None)
self._TryToStartNextPathspec()
return pathspec, request_data | Removes a pathspec from the list of pathspecs. |
def remove_all_nexusnve_bindings():
LOG.debug("remove_all_nexusport_bindings() called")
session = bc.get_writer_session()
session.query(nexus_models_v2.NexusNVEBinding).delete()
session.flush() | Removes all nexusnve bindings. |
def compute_merkletree_with(
merkletree: MerkleTreeState,
lockhash: LockHash,
) -> Optional[MerkleTreeState]:
result = None
leaves = merkletree.layers[LEAVES]
if lockhash not in leaves:
leaves = list(leaves)
leaves.append(Keccak256(lockhash))
result = MerkleTreeState(compute_layers(leaves))
return result | Register the given lockhash with the existing merkle tree. |
def configure_root(self, config, incremental=False):
root = logging.getLogger()
self.common_logger_config(root, config, incremental) | Configure a root logger from a dictionary. |
def uncamel(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() | Transform CamelCase naming convention into C-ish convention. |
def _rotate_vector(x, y, x2, y2, x1, y1):
angle = atan2(y2 - y1, x2 - x1)
cos_rad = cos(angle)
sin_rad = sin(angle)
return cos_rad * x + sin_rad * y, -sin_rad * x + cos_rad * y | rotate x,y vector over x2-x1, y2-y1 angle |
def _send_request(self):
self._generate_hash()
params = self._get_params()
try:
resp = self._endpoint(**params)
logger.debug(resp)
except WebFault, e:
logger.exception('An error occurred while making the SOAP request.')
return None
self.response = XmlDictConfig(ElementTree.XML(smart_str(resp)))
self.response = normalize_dictionary_values(self.response)
if self.response['status']['errorCode'] != 'OK':
logger.error(resp)
return self.response | Make the SOAP request and convert the result to a dictionary. |
def appendDatastore(self, store):
if not isinstance(store, Datastore):
raise TypeError("stores must be of type %s" % Datastore)
self._stores.append(store) | Appends datastore `store` to this collection. |
def _datastore_api(self):
if self._datastore_api_internal is None:
if self._use_grpc:
self._datastore_api_internal = make_datastore_api(self)
else:
self._datastore_api_internal = HTTPDatastoreAPI(self)
return self._datastore_api_internal | Getter for a wrapped API object. |
def _split_token_to_subtokens(token, subtoken_dict, max_subtoken_length):
ret = []
start = 0
token_len = len(token)
while start < token_len:
for end in xrange(min(token_len, start + max_subtoken_length), start, -1):
subtoken = token[start:end]
if subtoken in subtoken_dict:
ret.append(subtoken)
start = end
break
else:
raise ValueError("Was unable to split token \"%s\" into subtokens." %
token)
return ret | Splits a token into subtokens defined in the subtoken dict. |
def generate_np(self, x_val, **kwargs):
_, feedable, _feedable_types, hash_key = self.construct_variables(kwargs)
if hash_key not in self.graphs:
with tf.variable_scope(None, 'attack_%d' % len(self.graphs)):
with tf.device('/gpu:0'):
x = tf.placeholder(tf.float32, shape=x_val.shape, name='x')
inputs, outputs = self.generate(x, **kwargs)
from runner import RunnerMultiGPU
runner = RunnerMultiGPU(inputs, outputs, sess=self.sess)
self.graphs[hash_key] = runner
runner = self.graphs[hash_key]
feed_dict = {'x': x_val}
for name in feedable:
feed_dict[name] = feedable[name]
fvals = runner.run(feed_dict)
while not runner.is_finished():
fvals = runner.run()
return fvals['adv_x'] | Facilitates testing this attack. |
def harmonic_mean(data):
if not data:
raise StatisticsError('harmonic_mean requires at least one data point')
divisor = sum(map(lambda x: 1.0 / x if x else 0.0, data))
return len(data) / divisor if divisor else 0.0 | Return the harmonic mean of data |
def add_badge(self, kind):
badge = self.get_badge(kind)
if badge:
return badge
if kind not in getattr(self, '__badges__', {}):
msg = 'Unknown badge type for {model}: {kind}'
raise db.ValidationError(msg.format(model=self.__class__.__name__,
kind=kind))
badge = Badge(kind=kind)
if current_user.is_authenticated:
badge.created_by = current_user.id
self.update(__raw__={
'$push': {
'badges': {
'$each': [badge.to_mongo()],
'$position': 0
}
}
})
self.reload()
post_save.send(self.__class__, document=self)
on_badge_added.send(self, kind=kind)
return self.get_badge(kind) | Perform an atomic prepend for a new badge |
def checkIsConsistent(self):
if is_an_array(self.mask) and self.mask.shape != self.data.shape:
raise ConsistencyError("Shape mismatch mask={}, data={}"
.format(self.mask.shape != self.data.shape)) | Raises a ConsistencyError if the mask has an incorrect shape. |
def format_installed_dap(name, full=False):
dap_data = get_installed_daps_detailed().get(name)
if not dap_data:
raise DapiLocalError('DAP "{dap}" is not installed, can not query for info.'.format(dap=name))
locations = [os.path.join(data['location'], '') for data in dap_data]
for location in locations:
dap = dapi.Dap(None, fake=True, mimic_filename=name)
meta_path = os.path.join(location, 'meta', name + '.yaml')
with open(meta_path, 'r') as fh:
dap.meta = dap._load_meta(fh)
dap.files = _get_assistants_snippets(location, name)
dap._find_bad_meta()
format_local_dap(dap, full=full, custom_location=os.path.dirname(location)) | Formats information about an installed DAP in a human readable form to list of lines |
def _u_distance_correlation_sqr_naive(x, y, exponent=1):
return _distance_sqr_stats_naive_generic(
x, y,
matrix_centered=_u_distance_matrix,
product=u_product,
exponent=exponent).correlation_xy | Bias-corrected distance correlation estimator between two matrices. |
def getLate(self):
for analysis in self.getAnalyses():
if analysis.review_state == "retracted":
continue
analysis_obj = api.get_object(analysis)
if analysis_obj.isLateAnalysis():
return True
return False | Return True if there is at least one late analysis in this Request |
def shutdown(self):
self.signal_children(signal.SIGABRT)
waiting = 0
while self.living_children:
time.sleep(0.5)
waiting += 1
if waiting == self.MAX_SHUTDOWN_WAIT:
self.signal_children(signal.SIGKILL)
break | Send SIGABRT to child processes to instruct them to stop |
def clean(self, value):
if value is None:
value = self.default
try:
value = self.to_python(value)
self.validate(value)
except ValidationError as error:
raise ValidationError("invalid value for {}: {}".format(
self.name,
error.args[0]
))
return value | Run validators and return the clean value. |
def resolve_object_number(self, ref):
if not isinstance(ref, ObjectNumber):
on = ObjectNumber.parse(ref)
else:
on = ref
ds_on = on.as_dataset
return ds_on | Resolve a variety of object numebrs to a dataset number |
def safestr(value):
if not value or isinstance(value, (int, float, bool, long)):
return value
elif isinstance(value, (date, datetime)):
return value.isoformat()
else:
return unicode(value) | Ensure type to string serialization |
def exec_cmd(self, cmd):
cmd = self._cmd_str(cmd)
logmsg = 'Executing command: {0}'.format(cmd)
if self.passwd:
logmsg = logmsg.replace(self.passwd, ('*' * 6))
if 'decode("base64")' in logmsg or 'base64.b64decode(' in logmsg:
log.debug('Executed SHIM command. Command logged to TRACE')
log.trace(logmsg)
else:
log.debug(logmsg)
ret = self._run_cmd(cmd)
return ret | Execute a remote command |
def register(cls, package_type):
if not issubclass(package_type, cls):
raise TypeError('package_type must be a subclass of Package.')
cls._REGISTRY.add(package_type) | Register a concrete implementation of a Package to be recognized by pex. |
def described_as(self, description, *args):
if len(args):
description = description.format(*args)
self.description = description
return self | Specify a custom message for the matcher |
def list(args):
jm = setup(args)
jm.list(job_ids=get_ids(args.job_ids), print_array_jobs=args.print_array_jobs, print_dependencies=args.print_dependencies, status=args.status, long=args.long, print_times=args.print_times, ids_only=args.ids_only, names=args.names) | Lists the jobs in the given database. |
def load(self):
try:
with open(self.file_root + '.pkl', "rb") as f:
return pickle.load(f)
except IOError:
raise CacheMissing(self.file_root) | Load cache from file using pickle. |
def _exprparser(
expr, scope, lang=None, conf=None, configurable=None,
safe=DEFAULT_SAFE, besteffort=DEFAULT_BESTEFFORT, tostr=False
):
if scope is None:
scope = {}
scope.update({
'configurable': configurable,
'conf': conf
})
expr = REGEX_EXPR_R.sub(
_refrepl(
configurable=configurable, conf=conf, safe=safe, scope=scope,
besteffort=besteffort
), expr
)
result = resolve(
expr=expr, name=lang, safe=safe, scope=scope, tostr=tostr,
besteffort=besteffort
)
return result | In charge of parsing an expression and return a python object. |
def add_command_arguments(parser):
parser.add_argument(
'--noinput',
'--no-input',
action='store_const',
const=False,
dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--failfast', action='store_const', const=True, dest='failfast',
help=('Tells Django to stop running the '
'test suite after first failed test.'),
)
parser.add_argument(
'-r', '--reverse', action='store_const', const=True, dest='reverse',
help='Reverses test cases order.',
)
parser.add_argument(
'--use-existing-database',
action='store_true',
default=False,
help="Don't create a test database. USE AT YOUR OWN RISK!",
)
parser.add_argument(
'-k', '--keepdb',
action='store_const',
const=True,
help="Preserves the test DB between runs.",
)
parser.add_argument(
'-S', '--simple',
action='store_true',
default=False,
help="Use simple test runner that supports Django's"
" testing client only (no web browser automation)"
) | Additional command line arguments for the behave management command |
def _getel(key, value):
if key in ['HorizontalRule', 'Null']:
return elt(key, 0)()
elif key in ['Plain', 'Para', 'BlockQuote', 'BulletList',
'DefinitionList', 'HorizontalRule', 'Null']:
return elt(key, 1)(value)
return elt(key, len(value))(*value) | Returns an element given a key and value. |
def geoid(self):
if self.valuetype_class.is_geoid():
return self
for c in self.table.columns:
if c.parent == self.name and c.valuetype_class.is_geoid():
return c | Return first child of the column, or self that is marked as a geographic identifier |
def replace(self, html):
self.html = html
text = html.text()
positions = []
def perform_replacement(match):
offset = sum(positions)
start, stop = match.start() + offset, match.end() + offset
s = self.html[start:stop]
if self._is_replacement_allowed(s):
repl = match.expand(self.replacement)
self.html[start:stop] = repl
else:
repl = match.group()
positions.append(match.end())
return repl
while True:
if positions:
text = text[positions[-1]:]
text, n = self.pattern.subn(perform_replacement, text, count=1)
if not n:
break | Perform replacements on given HTML fragment. |
def err_exit(msg, rc=1):
print(msg, file=sys.stderr)
sys.exit(rc) | Print msg to stderr and exit with rc. |
async def async_open(self) -> None:
await self._loop.create_connection(
lambda: self,
self._host,
self._port) | Opens connection to the LifeSOS ethernet interface. |
def loads(self, data, raw=False):
data = self.decrypt(data)
if not data.startswith(self.PICKLE_PAD):
return {}
load = self.serial.loads(data[len(self.PICKLE_PAD):], raw=raw)
return load | Decrypt and un-serialize a python object |
def not_found(self, *args, **kwargs):
kwargs['api'] = self.api
return not_found(*args, **kwargs) | Defines the handler that should handle not found requests against this API |
def meaningful_value(self, val, undefined):
if isinstance(val, fields.files.FieldFile):
return val.name and not (
isinstance(undefined, fields.files.FieldFile) and val == undefined)
return val is not None and val != undefined | Check if val is considered non-empty. |
def _normalize_words(words, acronyms):
for i, _ in enumerate(words):
if words[i].upper() in acronyms:
words[i] = words[i].upper()
else:
if not words[i].isupper():
words[i] = words[i].capitalize()
return words | Normalize case of each word to PascalCase. |
def sortmerna_detailed_barplot (self):
keys = OrderedDict()
metrics = set()
for sample in self.sortmerna:
for key in self.sortmerna[sample]:
if not key in ["total", "rRNA", "non_rRNA"] and not "_pct" in key:
metrics.add(key)
for key in metrics:
keys[key] = { 'name': key.replace("_count","") }
pconfig = {
'id': 'sortmerna-detailed-plot',
'title': 'SortMeRNA: Hit Counts',
'ylab': 'Reads'
}
self.add_section( plot = bargraph.plot(self.sortmerna, keys, pconfig) ) | Make the HighCharts HTML to plot the sortmerna rates |
def handle_units(changeset):
units, records = {}, {}
for service_name, service in sorted(changeset.bundle['services'].items()):
for i in range(service.get('num_units', 0)):
record_id = 'addUnit-{}'.format(changeset.next_action())
unit_name = '{}/{}'.format(service_name, i)
records[record_id] = {
'id': record_id,
'method': 'addUnit',
'args': [
'${}'.format(changeset.services_added[service_name]),
None,
],
'requires': [changeset.services_added[service_name]],
}
units[unit_name] = {
'record': record_id,
'service': service_name,
'unit': i,
}
_handle_units_placement(changeset, units, records) | Populate the change set with addUnit changes. |
def _set_whitespaces_flags(self, show):
doc = self.document()
options = doc.defaultTextOption()
if show:
options.setFlags(options.flags() |
QtGui.QTextOption.ShowTabsAndSpaces)
else:
options.setFlags(
options.flags() & ~QtGui.QTextOption.ShowTabsAndSpaces)
doc.setDefaultTextOption(options) | Sets show white spaces flag |
def value(self):
user = self.trigger.agentml.request_log.most_recent().user
groups = self.trigger.agentml.request_log.most_recent().groups
if len(self._element):
message = ''.join(map(str, self.trigger.agentml.parse_tags(self._element, self.trigger)))
else:
message = self._element.text
default = attribute(self._element, 'default', '')
response = self.trigger.agentml.get_reply(user.id, message, groups)
return response or default | Return the value of the redirect response |
def _ifconfig_getnode():
for args in ('', '-a', '-av'):
mac = _find_mac('ifconfig', args, ['hwaddr', 'ether'], lambda i: i+1)
if mac:
return mac
import socket
ip_addr = socket.gethostbyname(socket.gethostname())
mac = _find_mac('arp', '-an', [ip_addr], lambda i: -1)
if mac:
return mac
mac = _find_mac('lanscan', '-ai', ['lan0'], lambda i: 0)
if mac:
return mac
return None | Get the hardware address on Unix by running ifconfig. |
def header_check(self, content):
encode = None
m = RE_HTML_ENCODE.search(content)
if m:
enc = m.group(1).decode('ascii')
try:
codecs.getencoder(enc)
encode = enc
except LookupError:
pass
else:
encode = self._has_xml_encode(content)
return encode | Special HTML encoding check. |
def all_tags(self):
tags = set()
for post in self:
tags.update(post.tags)
return list(tags) | Returns a list of all the unique tags, as strings, that posts in the collection have. |
def xlabel_halo(ax):
import matplotlib.patheffects as effects
for tick in ax.get_xticklabels() + [ax.xaxis.label]:
tick.set_path_effects([effects.withStroke(linewidth=4, foreground='w')]) | Add a white "halo" around the xlabels. |
def make_uninstall(parser):
parser.add_argument(
'host',
metavar='HOST',
nargs='+',
help='hosts to uninstall Ceph from',
)
parser.set_defaults(
func=uninstall,
) | Remove Ceph packages from remote hosts. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.