code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def resort_client_actions(portal):
sorted_actions = [
"edit",
"contacts",
"view",
"analysisrequests",
"batches",
"samplepoints",
"profiles",
"templates",
"specs",
"orders",
"reports_listing"
]
type_info = portal.portal_types.getTypeInfo("Client")
actions = filter(lambda act: act.id in sorted_actions, type_info._actions)
missing = filter(lambda act: act.id not in sorted_actions, type_info._actions)
actions = sorted(actions, key=lambda act: sorted_actions.index(act.id))
if missing:
actions.extend(missing)
type_info._actions = actions | Resorts client action views |
def time_emd(emd_type, data):
emd = {
'cause': _CAUSE_EMD,
'effect': pyphi.subsystem.effect_emd,
'hamming': pyphi.utils.hamming_emd
}[emd_type]
def statement():
for (d1, d2) in data:
emd(d1, d2)
results = timeit.repeat(statement, number=NUMBER, repeat=REPEAT)
return min(results) | Time an EMD command with the given data as arguments |
def read(self, frame, data):
"Returns a list of values, eats all of data."
seq = []
while data:
elem, data = self.spec.read(frame, data)
seq.append(elem)
return seq, data | Returns a list of values, eats all of data. |
def fillNullValues(col, rows):
'Fill null cells in col with the previous non-null value'
lastval = None
nullfunc = isNullFunc()
n = 0
rowsToFill = list(rows)
for r in Progress(col.sheet.rows, 'filling'):
try:
val = col.getValue(r)
except Exception as e:
val = e
if nullfunc(val) and r in rowsToFill:
if lastval:
col.setValue(r, lastval)
n += 1
else:
lastval = val
col.recalc()
status("filled %d values" % n) | Fill null cells in col with the previous non-null value |
def get(self, user_name: str) -> User:
user = current_user()
if user.is_admin or user.name == user_name:
return self._get_or_abort(user_name)
else:
abort(403) | Gets the User Resource. |
def calc_all_routes_info(self, npaths=3, real_time=True, stop_at_bounds=False, time_delta=0):
routes = self.get_route(npaths, time_delta)
results = {route['routeName']: self._add_up_route(route['results'], real_time=real_time, stop_at_bounds=stop_at_bounds) for route in routes}
route_time = [route[0] for route in results.values()]
route_distance = [route[1] for route in results.values()]
self.log.info('Time %.2f - %.2f minutes, distance %.2f - %.2f km.', min(route_time), max(route_time), min(route_distance), max(route_distance))
return results | Calculate all route infos. |
def _run_validators(self, value):
errors = []
for v in self.validators:
try:
v(value)
except ValidationError, e:
errors.extend(e.messages)
if errors:
raise ValidationError(errors) | Execute all associated validators. |
def XanyKXany(self):
result = np.empty((self.P,self.F_any.shape[1],self.F_any.shape[1]), order='C')
for p in range(self.P):
X1D = self.Fstar_any * self.D[:,p:p+1]
X1X2 = X1D.T.dot(self.Fstar_any)
result[p] = X1X2
return result | compute self covariance for any |
def _skip(self, cnt):
while cnt > 0:
if self._cur_avail == 0:
if not self._open_next():
break
if cnt > self._cur_avail:
cnt -= self._cur_avail
self._remain -= self._cur_avail
self._cur_avail = 0
else:
self._fd.seek(cnt, 1)
self._cur_avail -= cnt
self._remain -= cnt
cnt = 0 | RAR Seek, skipping through rar files to get to correct position |
def listify(p:OptListOrItem=None, q:OptListOrItem=None):
"Make `p` listy and the same length as `q`."
if p is None: p=[]
elif isinstance(p, str): p = [p]
elif not isinstance(p, Iterable): p = [p]
else:
try: a = len(p)
except: p = [p]
n = q if type(q)==int else len(p) if q is None else len(q)
if len(p)==1: p = p * n
assert len(p)==n, f'List len mismatch ({len(p)} vs {n})'
return list(p) | Make `p` listy and the same length as `q`. |
def _parse_canonical_int32(doc):
i_str = doc['$numberInt']
if len(doc) != 1:
raise TypeError('Bad $numberInt, extra field(s): %s' % (doc,))
if not isinstance(i_str, string_type):
raise TypeError('$numberInt must be string: %s' % (doc,))
return int(i_str) | Decode a JSON int32 to python int. |
def tag_info(self):
return self.__class__.__name__ + (
'(%r)' % self.name if self.name
else "") + ": " + self.valuestr() | Return Unicode string with class, name and unnested value. |
def process_response(self, response: requests.models.Response) -> dict:
try:
output = response.json()
except json.JSONDecodeError:
raise self.BadResponseError(
'Json not returned with status code [' + str(response.status_code) + ']')
if response.status_code == 400:
return output
if response.status_code not in [200, 201]:
raise self.BadResponseError(
str(output) + ': with status code [' + str(response.status_code) +
'] and params:' + str(output))
return output['data'] | Checks for correct data response and status codes |
def readline(self, timeout = 0.1):
try:
return self._q.get(block = timeout is not None,
timeout = timeout)
except Empty:
return None | Try to read a line from the stream queue. |
def config_status():
s = boto3.Session()
client = s.client('config')
channels = client.describe_delivery_channel_status()[
'DeliveryChannelsStatus']
for c in channels:
print(yaml.safe_dump({
c['name']: dict(
snapshot=str(
c['configSnapshotDeliveryInfo'].get('lastSuccessfulTime')),
history=str(
c['configHistoryDeliveryInfo'].get('lastSuccessfulTime')),
stream=str(
c['configStreamDeliveryInfo'].get('lastStatusChangeTime'))
),
}, default_flow_style=False)) | Check config status in an account. |
def url(self, request):
if self.pk:
if self.has_description():
return request.build_absolute_uri(reverse('assignment_description_file', args=[self.pk]))
else:
return self.download
else:
return None | Return absolute URL for assignment description. |
def post(self, request, ext):
try:
plugin = plugins.get(ext)
data, meta = self.get_post_data(request)
data = plugin.load(data)
except UnknownPlugin:
raise Http404
else:
content = plugin.render(data)
return self.render_to_response(content) | Render data for plugin and return text response. |
def validate_parameters(self):
for p in self.params:
if p not in self.known_params:
raise errors.UnknownParameter(p, self.known_params) | Validate that the parameters are correctly specified. |
def _agent_from_distribution(distribution, value=-1, agent_id=None):
if value < 0:
value = random.random()
for d in sorted(distribution, key=lambda x: x['threshold']):
threshold = d['threshold']
if not ((agent_id is not None and threshold == STATIC_THRESHOLD and agent_id in d['ids']) or \
(value >= threshold[0] and value < threshold[1])):
continue
state = {}
if 'state' in d:
state = deepcopy(d['state'])
return d['agent_type'], state
raise Exception('Distribution for value {} not found in: {}'.format(value, distribution)) | Used in the initialization of agents given an agent distribution. |
def _parse_block(self):
if self._iter_rows is not None:
return
rows = _avro_rows(self._block, self._avro_schema)
self._num_items = self._block.avro_rows.row_count
self._remaining = self._block.avro_rows.row_count
self._iter_rows = iter(rows) | Parse metadata and rows from the block only once. |
def download(self, request, **kwargs):
self.method_check(request, allowed=['get'])
basic_bundle = self.build_bundle(request=request)
tileset = self.cached_obj_get(
bundle=basic_bundle,
**self.remove_api_resource_names(kwargs))
filename = helpers.get_tileset_filename(tileset)
filename = os.path.abspath(filename)
if os.path.isfile(filename):
response = serve(request, os.path.basename(filename), os.path.dirname(filename))
response['Content-Disposition'] = 'attachment; filename="{}"'.format(os.path.basename(filename))
else:
response = self.create_response(request, {'status': 'not generated'})
return response | proxy for the helpers.tileset_download method |
def packet_in_handler(self, req_igmp, msg):
ofproto = msg.datapath.ofproto
if ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
in_port = msg.in_port
else:
in_port = msg.match['in_port']
if (igmp.IGMP_TYPE_REPORT_V1 == req_igmp.msgtype or
igmp.IGMP_TYPE_REPORT_V2 == req_igmp.msgtype):
self._do_report(req_igmp, in_port, msg)
elif igmp.IGMP_TYPE_LEAVE == req_igmp.msgtype:
self._do_leave(req_igmp, in_port, msg) | the process when the querier received IGMP. |
def WalkChildren(elem):
for child in elem.childNodes:
yield child
for elem in WalkChildren(child):
yield elem | Walk the XML tree of children below elem, returning each in order. |
def _on_error(self, websock, e):
if isinstance(e, (
websocket.WebSocketConnectionClosedException,
ConnectionResetError)):
self.logger.error('websocket closed, did chrome die?')
else:
self.logger.error(
'exception from websocket receiver thread',
exc_info=1)
brozzler.thread_raise(self.calling_thread, BrowsingException) | Raises BrowsingException in the thread that created this instance. |
def _on_text_changed(self):
if not self._cleaning:
ln = TextHelper(self).cursor_position()[0]
self._modified_lines.add(ln) | Adjust dirty flag depending on editor's content |
def _is_bval_type_a(grouped_dicoms):
bval_tag = Tag(0x2001, 0x1003)
bvec_x_tag = Tag(0x2005, 0x10b0)
bvec_y_tag = Tag(0x2005, 0x10b1)
bvec_z_tag = Tag(0x2005, 0x10b2)
for group in grouped_dicoms:
if bvec_x_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_x_tag])) and \
bvec_y_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_y_tag])) and \
bvec_z_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_z_tag])) and \
bval_tag in group[0] and _is_float(common.get_fl_value(group[0][bval_tag])) and \
common.get_fl_value(group[0][bval_tag]) != 0:
return True
return False | Check if the bvals are stored in the first of 2 currently known ways for single frame dti |
def strip_html_comments(text):
lines = text.splitlines(True)
new_lines = filter(lambda line: not line.startswith("<!--"), lines)
return "".join(new_lines) | Strip HTML comments from a unicode string. |
def create_deployment(deployment_name,
token_manager=None,
app_url=defaults.APP_URL):
headers = token_manager.get_access_token_headers()
payload = {
'name': deployment_name,
'isAdmin': True
}
deployment_url = environment.get_deployment_url(app_url=app_url)
response = requests.post('%s/api/v1/deployments' % deployment_url,
data=json.dumps(payload),
headers=headers)
if response.status_code == 201:
return response.json()
else:
raise JutException('Error %s: %s' % (response.status_code, response.text)) | create a deployment with the specified name |
def dumps(self):
with closing(StringIO()) as fileobj:
self.dump(fileobj)
return fileobj.getvalue() | Dump this instance as YAML. |
def dispatch_job_hook(self, link, key, job_config, logfile, stream=sys.stdout):
raise NotImplementedError("SysInterface.dispatch_job_hook") | Hook to dispatch a single job |
def _raise_on_bad_jar_filename(jar_filename):
if jar_filename is None:
return
if not isinstance(jar_filename, string_type):
raise TypeError("jar_filename is not a string: %r" % jar_filename)
if not os.path.exists(jar_filename):
raise ValueError("jar_filename does not exist: %r" % jar_filename) | Ensure that jar_filename is a valid path to a jar file. |
def ping_directories_handler(sender, **kwargs):
entry = kwargs['instance']
if entry.is_visible and settings.SAVE_PING_DIRECTORIES:
for directory in settings.PING_DIRECTORIES:
DirectoryPinger(directory, [entry]) | Ping directories when an entry is saved. |
def concat(x, y, axis=0):
if all([isinstance(df, (pd.DataFrame, pd.Series)) for df in [x, y]]):
return pd.concat([x, y], axis=axis)
else:
if axis == 0:
return np.concatenate([x, y])
else:
return np.column_stack([x, y]) | Concatenate a sequence of pandas or numpy objects into one entity. |
def from_xml(self, doc):
import xml.sax
handler = DomainDumpParser(self)
xml.sax.parse(doc, handler)
return handler | Load this domain based on an XML document |
def step(self):
self.director.window.switch_to()
self.director.window.dispatch_events()
self.director.window.dispatch_event('on_draw')
self.director.window.flip()
pyglet.clock.tick() | Step the engine one tick |
def score_evidence_list(self, evidences):
def _score(evidences):
if not evidences:
return 0
sources = [ev.source_api for ev in evidences]
uniq_sources = numpy.unique(sources)
syst_factors = {s: self.prior_probs['syst'][s]
for s in uniq_sources}
rand_factors = {k: [] for k in uniq_sources}
for ev in evidences:
rand_factors[ev.source_api].append(
evidence_random_noise_prior(
ev,
self.prior_probs['rand'],
self.subtype_probs))
neg_prob_prior = 1
for s in uniq_sources:
neg_prob_prior *= (syst_factors[s] +
numpy.prod(rand_factors[s]))
prob_prior = 1 - neg_prob_prior
return prob_prior
pos_evidence = [ev for ev in evidences if
not ev.epistemics.get('negated')]
neg_evidence = [ev for ev in evidences if
ev.epistemics.get('negated')]
pp = _score(pos_evidence)
np = _score(neg_evidence)
score = pp * (1 - np)
return score | Return belief score given a list of supporting evidences. |
def _get_dependencies(sql):
dependencies = []
for (_, placeholder, dollar, _) in SqlStatement._get_tokens(sql):
if placeholder:
variable = placeholder[1:]
if variable not in dependencies:
dependencies.append(variable)
elif dollar:
raise Exception('Invalid sql; $ with no following $ or identifier: %s.' % sql)
return dependencies | Return the list of variables referenced in this SQL. |
def flush(self):
if self.flag[0] != 'r':
with self.write_mutex:
if hasattr(self.db, 'sync'):
self.db.sync()
else:
flag = self.flag
if flag[0] == 'n':
flag = 'c' + flag[1:]
self.db.close()
self.db = self.open(self.path, flag, self.mode, **self.open_kwargs) | Synchronizes data to the underlying database file. |
def in_same_box(self, a, b):
assert a in self.micro_indices
assert b in self.micro_indices
for part in self.partition:
if a in part and b in part:
return True
return False | Return ``True`` if nodes ``a`` and ``b``` are in the same box. |
def handle_noargs(self, **options):
r = get_r()
since = datetime.utcnow() - timedelta(days=1)
metrics = {}
categories = r.metric_slugs_by_category()
for category_name, slug_list in categories.items():
metrics[category_name] = []
for slug in slug_list:
metric_values = r.get_metric_history(slug, since=since)
metrics[category_name].append(
(slug, metric_values)
)
template = "redis_metrics/email/report.{fmt}"
data = {
'today': since,
'metrics': metrics,
}
message = render_to_string(template.format(fmt='txt'), data)
message_html = render_to_string(template.format(fmt='html'), data)
msg = EmailMultiAlternatives(
subject="Redis Metrics Report",
body=message,
from_email=settings.DEFAULT_FROM_EMAIL,
to=[email for name, email in settings.ADMINS]
)
msg.attach_alternative(message_html, "text/html")
msg.send() | Send Report E-mails. |
def decode_arr(data):
data = data.encode('utf-8')
return frombuffer(base64.b64decode(data), float64) | Extract a numpy array from a base64 buffer |
def _extract_zip(archive, dest=None, members=None):
dest = dest or os.getcwd()
members = members or archive.infolist()
for member in members:
if isinstance(member, basestring):
member = archive.getinfo(member)
_extract_zip_member(archive, member, dest) | Extract the ZipInfo object to a real file on the path targetpath. |
def same_syllabic_feature(ch1, ch2):
if ch1 == '.' or ch2 == '.':
return False
ch1 = 'V' if ch1 in VOWELS else 'C' if ch1 in CONSONANTS else None
ch2 = 'V' if ch2 in VOWELS else 'C' if ch2 in CONSONANTS else None
return ch1 == ch2 | Return True if ch1 and ch2 are both vowels or both consonants. |
def combining_goal(state):
((corner, edge), (L, U, F, D, R, B)) = state
if "U" not in corner or "U" not in edge: return False
if set(edge).issubset(set(corner)): return True
elif set(edge.facings.keys()).issubset(set(corner.facings.keys())): return False
opposite = {"L":"R", "R":"L", "F":"B", "B":"F"}
edge_facings = list(edge)
for i, (face, square) in enumerate(edge_facings):
if face == "U":
if square != corner[opposite[edge_facings[(i+1)%2][0]]]:
return False
else:
if square != corner["U"]:
return False
return True | Check if two Cubies are combined on the U face. |
def stage_all(self):
LOGGER.info('Staging all files')
self.repo.git.add(A=True) | Stages all changed and untracked files |
def _parse_total_magnetization(line, lines):
toks = line.split()
res = {"number of electrons": float(toks[3])}
if len(toks) > 5:
res["total magnetization"] = float(toks[5])
return res | Parse the total magnetization, which is somewhat hidden |
def chunks(self):
if not hasattr(self, '_it'):
class ChunkIterator(object):
def __iter__(iter):
return iter
def __next__(iter):
try:
chunk = self._next_chunk()
except StopIteration:
if self.loop:
self._init_stretching()
return iter.__next__()
raise
return chunk
next = __next__
self._it = ChunkIterator()
return self._it | Returns a chunk iterator over the sound. |
def args_update(self):
for key, value in self._config_data.items():
setattr(self._default_args, key, value) | Update the argparser namespace with any data from configuration file. |
def getVersionNumber(self):
print '%s call getVersionNumber' % self.port
versionStr = self.__sendCommand(WPANCTL_CMD + 'getprop -v NCP:Version')[0]
return self.__stripValue(versionStr) | get OpenThreadWpan stack firmware version number |
def com_google_fonts_check_currency_chars(ttFont):
def font_has_char(ttFont, codepoint):
for subtable in ttFont['cmap'].tables:
if codepoint in subtable.cmap:
return True
return False
failed = False
OPTIONAL = {
}
MANDATORY = {
0x20AC: "EURO SIGN"
}
for codepoint, charname in OPTIONAL.items():
if not font_has_char(ttFont, codepoint):
failed = True
yield WARN, f"Font lacks \"{charname}\" character (unicode: 0x{codepoint:04X})"
for codepoint, charname in MANDATORY.items():
if not font_has_char(ttFont, codepoint):
failed = True
yield FAIL, f"Font lacks \"{charname}\" character (unicode: 0x{codepoint:04X})"
if not failed:
yield PASS, "Font has all expected currency sign characters." | Font has all expected currency sign characters? |
def isAllowed(self, assoc_type, session_type):
assoc_good = (assoc_type, session_type) in self.allowed_types
matches = session_type in getSessionTypes(assoc_type)
return assoc_good and matches | Is this combination of association type and session type allowed? |
def docker_py_dict(self):
return {
'image': self.image,
'command': self.cmd,
'hostname': self.hostname,
'user': self.user,
'detach': self.detach,
'stdin_open': self.open_stdin,
'tty': self.tty,
'ports': self.exposed_ports,
'environment': self.env,
'volumes': self.volumes,
'network_disabled': self.network_disabled,
'entrypoint': self.entry_point,
'working_dir': self.working_dir,
'domainname': self.domain_name,
'labels': self.labels
} | Convert object to match valid docker-py properties. |
def _run_germline(bam_file, data, ref_file, region, out_file):
work_dir = utils.safe_makedir("%s-work" % utils.splitext_plus(out_file)[0])
region_bed = strelka2.get_region_bed(region, [data], out_file, want_gzip=False)
example_dir = _make_examples(bam_file, data, ref_file, region_bed, out_file, work_dir)
if _has_candidate_variants(example_dir):
tfrecord_file = _call_variants(example_dir, region_bed, data, out_file)
return _postprocess_variants(tfrecord_file, data, ref_file, out_file)
else:
return vcfutils.write_empty_vcf(out_file, data["config"], [dd.get_sample_name(data)]) | Single sample germline variant calling. |
def _make_symbol_function(handle, name, func_name):
code, doc_str = _generate_symbol_function_code(handle, name, func_name)
local = {}
exec(code, None, local)
symbol_function = local[func_name]
symbol_function.__name__ = func_name
symbol_function.__doc__ = doc_str
symbol_function.__module__ = 'mxnet.symbol'
return symbol_function | Create a symbol function by handle and function name. |
def _jamo_to_hangul_char(lead, vowel, tail=0):
lead = ord(lead) - _JAMO_LEAD_OFFSET
vowel = ord(vowel) - _JAMO_VOWEL_OFFSET
tail = ord(tail) - _JAMO_TAIL_OFFSET if tail else 0
return chr(tail + (vowel - 1) * 28 + (lead - 1) * 588 + _JAMO_OFFSET) | Return the Hangul character for the given jamo characters. |
def writetofile(self, filename):
f = open(filename, "w")
f.write(self.read())
f.close() | Writes the in-memory zip to a file. |
def map_parameters(cls, params):
d = {}
for k, v in six.iteritems(params):
d[cls.FIELD_MAP.get(k.lower(), k)] = v
return d | Maps parameters to form field names |
def from_percent(position_percent):
if not isinstance(position_percent, int):
raise PyVLXException("Position::position_percent_has_to_be_int")
if position_percent < 0:
raise PyVLXException("Position::position_percent_has_to_be_positive")
if position_percent > 100:
raise PyVLXException("Position::position_percent_out_of_range")
return bytes([position_percent*2, 0]) | Create raw value out of percent position. |
def parse(filename):
with open(filename) as f:
parser = ASDLParser()
return parser.parse(f.read()) | Parse ASDL from the given file and return a Module node describing it. |
def sample_stats_prior_to_xarray(self):
data = self.sample_stats_prior
if not isinstance(data, dict):
raise TypeError("DictConverter.sample_stats_prior is not a dictionary")
return dict_to_dataset(data, library=None, coords=self.coords, dims=self.dims) | Convert sample_stats_prior samples to xarray. |
def key(
seq: Sequence,
tooth: Callable[[Sequence], str] = (
lambda seq: str(random.SystemRandom().choice(seq)).strip()
),
nteeth: int = 6,
delimiter: str = ' ',
) -> str:
return delimiter.join(tooth(seq) for _ in range(nteeth)) | Concatenate strings generated by the tooth function. |
def abort(self):
if self._handle.closed:
return
elif self._protocol is None:
raise TransportError('transport not started')
self._handle.close(self._on_close_complete)
assert self._handle.closed | Close the transport immediately. |
def getAttribute(self, attribute):
if type(attribute) in (list, tuple):
if len(attribute) != 2:
raise LookupError, 'To access attributes must use name or (namespace,name)'
ns_dict = self.attributes.get(attribute[0])
if ns_dict is None:
return None
return ns_dict.get(attribute[1])
return self.attributes.get(attribute) | return requested attribute value or None |
def process_fastq_plain(fastq, **kwargs):
logging.info("Nanoget: Starting to collect statistics from plain fastq file.")
inputfastq = handle_compressed_input(fastq)
return ut.reduce_memory_usage(pd.DataFrame(
data=[res for res in extract_from_fastq(inputfastq) if res],
columns=["quals", "lengths"]
).dropna()) | Combine metrics extracted from a fastq file. |
def success(self, cmd, desc=''):
return self._label_desc(cmd, desc, self.success_color) | Style for a success message. |
def hsts_header(self):
hsts_policy = 'max-age={0}'.format(self.hsts_age)
if self.hsts_include_subdomains:
hsts_policy += '; includeSubDomains'
return hsts_policy | Returns the proper HSTS policy. |
def from_tfp(
posterior=None,
*,
var_names=None,
model_fn=None,
feed_dict=None,
posterior_predictive_samples=100,
posterior_predictive_size=1,
observed=None,
coords=None,
dims=None
):
return TfpConverter(
posterior=posterior,
var_names=var_names,
model_fn=model_fn,
feed_dict=feed_dict,
posterior_predictive_samples=posterior_predictive_samples,
posterior_predictive_size=posterior_predictive_size,
observed=observed,
coords=coords,
dims=dims,
).to_inference_data() | Convert tfp data into an InferenceData object. |
def init_build_dir(self):
logger.debug("Initializing %s" % self.build_dir)
if self.verbosity > 1:
self.stdout.write("Initializing build directory")
if self.fs.exists(self.build_dir):
self.fs.removetree(self.build_dir)
self.fs.makedirs(self.build_dir) | Clear out the build directory and create a new one. |
def writeXMLFile(filename, content):
xmlfile = open(filename, 'w')
content = etree.tostring(content, pretty_print=True)
xmlfile.write(content)
xmlfile.close() | Used only for debugging to write out intermediate files |
def index(self, prefix):
if self.is_external_url_prefix(prefix):
prefix = 'http'
for i, urltype in enumerate(self._url_types):
if urltype.prefix == prefix:
return i
return None | Return the model index for a prefix. |
def getOverlayTransformTrackedDeviceComponent(self, ulOverlayHandle, pchComponentName, unComponentNameSize):
fn = self.function_table.getOverlayTransformTrackedDeviceComponent
punDeviceIndex = TrackedDeviceIndex_t()
result = fn(ulOverlayHandle, byref(punDeviceIndex), pchComponentName, unComponentNameSize)
return result, punDeviceIndex | Gets the transform information when the overlay is rendering on a component. |
def fix_crinfo(crinfo, to="axis"):
crinfo = np.asarray(crinfo)
if crinfo.shape[0] == 2:
crinfo = crinfo.T
return crinfo | Function recognize order of crinfo and convert it to proper format. |
def _get_cpu_virtualization(self):
try:
cpu_vt = self._get_bios_setting('ProcVirtualization')
except exception.IloCommandNotSupportedError:
return False
if cpu_vt == 'Enabled':
vt_status = True
else:
vt_status = False
return vt_status | get cpu virtualization status. |
def delete(context, resource, id, **kwargs):
etag = kwargs.pop('etag', None)
id = id
subresource = kwargs.pop('subresource', None)
subresource_id = kwargs.pop('subresource_id', None)
uri = '%s/%s/%s' % (context.dci_cs_api, resource, id)
if subresource:
uri = '%s/%s/%s' % (uri, subresource, subresource_id)
r = context.session.delete(uri, timeout=HTTP_TIMEOUT,
headers={'If-match': etag})
return r | Delete a specific resource |
def identify(self, req, resp, resource, uri_kwargs):
try:
return req.get_header('X-Api-Key', True)
except (KeyError, HTTPMissingHeader):
pass | Initialize X-Api-Key authentication middleware. |
def subnode(self, node):
self.children.append(node)
node.parent = self
node.adjust_interleave(node.interleave) | Make `node` receiver's child. |
def list_build_configurations_for_set(id=None, name=None, page_size=200, page_index=0, sort="", q=""):
content = list_build_configurations_for_set_raw(id, name, page_size, page_index, sort, q)
if content:
return utils.format_json_list(content) | List all build configurations in a given BuildConfigurationSet. |
def write(filename, mesh, fmt_version, write_binary=True):
try:
writer = _writers[fmt_version]
except KeyError:
try:
writer = _writers[fmt_version.split(".")[0]]
except KeyError:
raise ValueError(
"Need mesh format in {} (got {})".format(
sorted(_writers.keys()), fmt_version
)
)
writer.write(filename, mesh, write_binary=write_binary) | Writes a Gmsh msh file. |
def validate_unicode_decode_error_handler(dummy, value):
if value not in _UNICODE_DECODE_ERROR_HANDLERS:
raise ValueError("%s is an invalid Unicode decode error handler. "
"Must be one of "
"%s" % (value, tuple(_UNICODE_DECODE_ERROR_HANDLERS)))
return value | Validate the Unicode decode error handler option of CodecOptions. |
def _dist_obs_oracle(oracle, query, trn_list):
a = np.subtract(query, [oracle.f_array[t] for t in trn_list])
return (a * a).sum(axis=1) | A helper function calculating distances between a feature and frames in oracle. |
def convert_file(filename, renderer):
try:
with open(filename, 'r') as fin:
rendered = mistletoe.markdown(fin, renderer)
print(rendered, end='')
except OSError:
sys.exit('Cannot open file "{}".'.format(filename)) | Parse a Markdown file and dump the output to stdout. |
def status(name, init_system, verbose):
try:
status = Serv(init_system, verbose=verbose).status(name)
except ServError as ex:
sys.exit(ex)
click.echo(json.dumps(status, indent=4, sort_keys=True)) | WIP! Try at your own expense |
def refresh(self):
try:
self._private_to_public = self.cloud_discovery.discover_nodes()
except Exception as ex:
self.logger.warning("Failed to load addresses from Hazelcast.cloud: {}".format(ex.args[0]),
extra=self._logger_extras) | Refreshes the internal lookup table if necessary. |
def build_mutation_pruner_plugin() -> LaserPlugin:
from mythril.laser.ethereum.plugins.implementations.mutation_pruner import (
MutationPruner,
)
return MutationPruner() | Creates an instance of the mutation pruner plugin |
def _fit_and_score(est, x, y, scorer, train_index, test_index, parameters, fit_params, predict_params):
X_train, y_train = _safe_split(est, x, y, train_index)
train_params = fit_params.copy()
est.set_params(**parameters)
est.fit(X_train, y_train, **train_params)
test_predict_params = predict_params.copy()
X_test, y_test = _safe_split(est, x, y, test_index, train_index)
score = scorer(est, X_test, y_test, **test_predict_params)
if not isinstance(score, numbers.Number):
raise ValueError("scoring must return a number, got %s (%s) instead."
% (str(score), type(score)))
return score | Train survival model on given data and return its score on test data |
def health_check(self):
logger.debug('Health Check on Table: {namespace}'.format(
namespace=self.namespace
))
try:
self.get_all()
return True
except ClientError as e:
logger.exception(e)
logger.error('Error encountered with Database. Assume unhealthy')
return False | Gets a single item to determine if Dynamo is functioning. |
def serialize(self):
return {
'name' : self.name,
'weight' : self.weight,
'value' : self.value,
'msgs' : self.msgs,
'children' : [i.serialize() for i in self.children]
} | Returns a serializable dictionary that represents the result object |
def query_base_timer(self):
(_, _, time) = unpack('<ccI', self.con.send_xid_command("e3", 6))
return time | gets the value from the device's base timer |
def getContainerName(job):
return '--'.join([str(job),
base64.b64encode(os.urandom(9), b'-_').decode('utf-8')])\
.replace("'", '').replace('"', '').replace('_', '') | Create a random string including the job name, and return it. |
def copy(self, src, dst, suppress_layouts=False):
url = '/'.join([src.drive,
'api/copy',
str(src.relative_to(src.drive)).rstrip('/')])
params = {'to': str(dst.relative_to(dst.drive)).rstrip('/'),
'suppressLayouts': int(suppress_layouts)}
text, code = self.rest_post(url,
params=params,
session=src.session,
verify=src.verify,
cert=src.cert)
if code not in [200, 201]:
raise RuntimeError("%s" % text) | Copy artifact from src to dst |
def _get_warped_variance(self, mean, std, pred_init=None, deg_gauss_hermite=20):
gh_samples, gh_weights = np.polynomial.hermite.hermgauss(deg_gauss_hermite)
gh_samples = gh_samples[:, None]
gh_weights = gh_weights[None, :]
arg1 = gh_weights.dot(self._get_warped_term(mean, std, gh_samples,
pred_init=pred_init) ** 2) / np.sqrt(np.pi)
arg2 = self._get_warped_mean(mean, std, pred_init=pred_init,
deg_gauss_hermite=deg_gauss_hermite)
return arg1 - (arg2 ** 2) | Calculate the warped variance by using Gauss-Hermite quadrature. |
def _get_versions_manifest(manifest_dir):
all_pkgs = _manifest_progs + [p.get("name", p["cmd"]) for p in _cl_progs] + [p["name"] for p in _alt_progs]
if os.path.exists(manifest_dir):
out = []
for plist in ["toolplus", "python", "r", "debian", "custom"]:
pkg_file = os.path.join(manifest_dir, "%s-packages.yaml" % plist)
if os.path.exists(pkg_file):
with open(pkg_file) as in_handle:
pkg_info = yaml.safe_load(in_handle)
if not pkg_info:
continue
added = []
for pkg in all_pkgs:
if pkg in pkg_info:
added.append(pkg)
out.append({"program": pkg, "version": pkg_info[pkg]["version"]})
for x in added:
all_pkgs.remove(x)
out.sort(key=lambda x: x["program"])
for pkg in all_pkgs:
out.append({"program": pkg, "version": ""})
return out | Retrieve versions from a pre-existing manifest of installed software. |
def regions_coverage(bed_file, target_name, data):
ready_bed = tz.get_in(["depth", target_name, "regions"], data)
if ready_bed:
return ready_bed
else:
return run_mosdepth(data, target_name, bed_file).regions | Generate coverage over regions of interest using mosdepth. |
def _augment_node(node: BaseEntity) -> BaseEntity:
rv = node.copy()
rv['id'] = node.as_sha512()
rv['bel'] = node.as_bel()
for m in chain(node.get(MEMBERS, []), node.get(REACTANTS, []), node.get(PRODUCTS, [])):
m.update(_augment_node(m))
return rv | Add the SHA-512 identifier to a node's dictionary. |
def islive(self, state):
reachable = [state]
i = 0
while i < len(reachable):
current = reachable[i]
if current in self.finals:
return True
if current in self.map:
for symbol in self.map[current]:
next = self.map[current][symbol]
if next not in reachable:
reachable.append(next)
i += 1
return False | A state is "live" if a final state can be reached from it. |
def _startReapingProcesses(self):
lc = LoopingCall(self._reapAllProcesses)
lc.clock = self._reactor
lc.start(0.1, False) | Start a LoopingCall that calls reapAllProcesses. |
def persist(self, storageLevel):
if not isinstance(storageLevel, StorageLevel):
raise TypeError("`storageLevel` should be a StorageLevel, got %s" % type(storageLevel))
javaStorageLevel = self._java_matrix_wrapper._sc._getJavaStorageLevel(storageLevel)
self._java_matrix_wrapper.call("persist", javaStorageLevel)
return self | Persists the underlying RDD with the specified storage level. |
def getPorts(self):
if self.ports:
return self.ports
if not self._gotPorts:
self.ports = [
portpicker.pick_unused_port(),
portpicker.pick_unused_port(),
portpicker.pick_unused_port(),
]
self._gotPorts = True
return self.ports | acquire ports to be used by the SC2 client launched by this process |
def bounds(self):
google_x, google_y = self.google
pixel_x_west, pixel_y_north = google_x * TILE_SIZE, google_y * TILE_SIZE
pixel_x_east, pixel_y_south = (google_x + 1) * TILE_SIZE, (google_y + 1) * TILE_SIZE
point_min = Point.from_pixel(pixel_x=pixel_x_west, pixel_y=pixel_y_south, zoom=self.zoom)
point_max = Point.from_pixel(pixel_x=pixel_x_east, pixel_y=pixel_y_north, zoom=self.zoom)
return point_min, point_max | Gets the bounds of a tile represented as the most west and south point and the most east and north point |
def mate_bottom(self):
" bottom of the stator"
return Mate(self, CoordSystem(
origin=(0, 0, -self.length/2),
xDir=(1, 0, 0),
normal=(0, 0, -1)
)) | bottom of the stator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.