text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def __generate_point(self, index_cluster):
"""!
@brief Generates point in line with parameters of specified cluster.
@param[in] index_cluster (uint): Index of cluster whose parameters are used for point generation.
@return (list) New generated point in line with normal distribution and cluster parameters.
"""
return [ random.gauss(self.__cluster_centers[index_cluster][index_dimension],
self.__cluster_width[index_cluster] / 2.0)
for index_dimension in range(self.__dimension) ] | [
"def",
"__generate_point",
"(",
"self",
",",
"index_cluster",
")",
":",
"return",
"[",
"random",
".",
"gauss",
"(",
"self",
".",
"__cluster_centers",
"[",
"index_cluster",
"]",
"[",
"index_dimension",
"]",
",",
"self",
".",
"__cluster_width",
"[",
"index_cluster",
"]",
"/",
"2.0",
")",
"for",
"index_dimension",
"in",
"range",
"(",
"self",
".",
"__dimension",
")",
"]"
] | 48.166667 | 32.25 |
def fixPoint(self, plotterPoint, canvasPoint):
'adjust visibleBox.xymin so that canvasPoint is plotted at plotterPoint'
self.visibleBox.xmin = canvasPoint.x - self.canvasW(plotterPoint.x-self.plotviewBox.xmin)
self.visibleBox.ymin = canvasPoint.y - self.canvasH(plotterPoint.y-self.plotviewBox.ymin)
self.refresh() | [
"def",
"fixPoint",
"(",
"self",
",",
"plotterPoint",
",",
"canvasPoint",
")",
":",
"self",
".",
"visibleBox",
".",
"xmin",
"=",
"canvasPoint",
".",
"x",
"-",
"self",
".",
"canvasW",
"(",
"plotterPoint",
".",
"x",
"-",
"self",
".",
"plotviewBox",
".",
"xmin",
")",
"self",
".",
"visibleBox",
".",
"ymin",
"=",
"canvasPoint",
".",
"y",
"-",
"self",
".",
"canvasH",
"(",
"plotterPoint",
".",
"y",
"-",
"self",
".",
"plotviewBox",
".",
"ymin",
")",
"self",
".",
"refresh",
"(",
")"
] | 68.4 | 35.6 |
def _readline(self, timeout=1):
"""
Read line from serial port.
:param timeout: timeout, default is 1
:return: stripped line or None
"""
line = self.port.readline(timeout=timeout)
return strip_escape(line.strip()) if line is not None else line | [
"def",
"_readline",
"(",
"self",
",",
"timeout",
"=",
"1",
")",
":",
"line",
"=",
"self",
".",
"port",
".",
"readline",
"(",
"timeout",
"=",
"timeout",
")",
"return",
"strip_escape",
"(",
"line",
".",
"strip",
"(",
")",
")",
"if",
"line",
"is",
"not",
"None",
"else",
"line"
] | 32.444444 | 11.333333 |
def do_raw_get(self, line):
"""raw_get <peer>
"""
def f(p, args):
result = p.raw_get()
tree = ET.fromstring(result)
validate(tree)
print(et_tostring_pp(tree))
self._request(line, f) | [
"def",
"do_raw_get",
"(",
"self",
",",
"line",
")",
":",
"def",
"f",
"(",
"p",
",",
"args",
")",
":",
"result",
"=",
"p",
".",
"raw_get",
"(",
")",
"tree",
"=",
"ET",
".",
"fromstring",
"(",
"result",
")",
"validate",
"(",
"tree",
")",
"print",
"(",
"et_tostring_pp",
"(",
"tree",
")",
")",
"self",
".",
"_request",
"(",
"line",
",",
"f",
")"
] | 23 | 13 |
def projC(gamma, q):
"""return the KL projection on the column constrints """
return np.multiply(gamma, q / np.maximum(np.sum(gamma, axis=0), 1e-10)) | [
"def",
"projC",
"(",
"gamma",
",",
"q",
")",
":",
"return",
"np",
".",
"multiply",
"(",
"gamma",
",",
"q",
"/",
"np",
".",
"maximum",
"(",
"np",
".",
"sum",
"(",
"gamma",
",",
"axis",
"=",
"0",
")",
",",
"1e-10",
")",
")"
] | 51.666667 | 18.333333 |
def for_stmt(self, for_loc, target, in_loc, iter, for_colon_loc, body, else_opt):
"""for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]"""
stmt = ast.For(target=self._assignable(target), iter=iter, body=body, orelse=[],
keyword_loc=for_loc, in_loc=in_loc, for_colon_loc=for_colon_loc,
else_loc=None, else_colon_loc=None,
loc=for_loc.join(body[-1].loc))
if else_opt:
stmt.else_loc, stmt.else_colon_loc, stmt.orelse = else_opt
stmt.loc = stmt.loc.join(stmt.orelse[-1].loc)
return stmt | [
"def",
"for_stmt",
"(",
"self",
",",
"for_loc",
",",
"target",
",",
"in_loc",
",",
"iter",
",",
"for_colon_loc",
",",
"body",
",",
"else_opt",
")",
":",
"stmt",
"=",
"ast",
".",
"For",
"(",
"target",
"=",
"self",
".",
"_assignable",
"(",
"target",
")",
",",
"iter",
"=",
"iter",
",",
"body",
"=",
"body",
",",
"orelse",
"=",
"[",
"]",
",",
"keyword_loc",
"=",
"for_loc",
",",
"in_loc",
"=",
"in_loc",
",",
"for_colon_loc",
"=",
"for_colon_loc",
",",
"else_loc",
"=",
"None",
",",
"else_colon_loc",
"=",
"None",
",",
"loc",
"=",
"for_loc",
".",
"join",
"(",
"body",
"[",
"-",
"1",
"]",
".",
"loc",
")",
")",
"if",
"else_opt",
":",
"stmt",
".",
"else_loc",
",",
"stmt",
".",
"else_colon_loc",
",",
"stmt",
".",
"orelse",
"=",
"else_opt",
"stmt",
".",
"loc",
"=",
"stmt",
".",
"loc",
".",
"join",
"(",
"stmt",
".",
"orelse",
"[",
"-",
"1",
"]",
".",
"loc",
")",
"return",
"stmt"
] | 55.909091 | 26.909091 |
def load_acknowledge_config(self, file_id):
"""
Loads the CWR acknowledge config
:return: the values matrix
"""
if self._cwr_defaults is None:
self._cwr_defaults = self._reader.read_yaml_file(
'acknowledge_config_%s.yml' % file_id)
return self._cwr_defaults | [
"def",
"load_acknowledge_config",
"(",
"self",
",",
"file_id",
")",
":",
"if",
"self",
".",
"_cwr_defaults",
"is",
"None",
":",
"self",
".",
"_cwr_defaults",
"=",
"self",
".",
"_reader",
".",
"read_yaml_file",
"(",
"'acknowledge_config_%s.yml'",
"%",
"file_id",
")",
"return",
"self",
".",
"_cwr_defaults"
] | 32.5 | 9.3 |
def get_all(self) -> List[Commodity]:
""" Loads all non-currency commodities, assuming they are stocks. """
query = (
self.query
.order_by(Commodity.namespace, Commodity.mnemonic)
)
return query.all() | [
"def",
"get_all",
"(",
"self",
")",
"->",
"List",
"[",
"Commodity",
"]",
":",
"query",
"=",
"(",
"self",
".",
"query",
".",
"order_by",
"(",
"Commodity",
".",
"namespace",
",",
"Commodity",
".",
"mnemonic",
")",
")",
"return",
"query",
".",
"all",
"(",
")"
] | 35.714286 | 15.857143 |
def sliding_window_3d(image, step_size, window_size, mask=None, only_whole=True, include_last=False):
"""
Creates generator of sliding windows.
:param image: input image
:param step_size: number of pixels we are going to skip in both the (x, y) direction
:param window_size: the width and height of the window we are going to extract
:param mask: region of interest, if None it will slide through the whole image
:param only_whole: if True - produces only windows of the given window_size
:return: generator that produce upper left corner of the window, center of the window and the sliding window itself
"""
if not isinstance(step_size, tuple):
step_size = (step_size, step_size, step_size)
if image.ndim == 2:
image = np.expand_dims(image, 0)
window_size = (1, window_size[0], window_size[1])
if mask is not None:
mask = np.expand_dims(mask, 0)
if mask is None:
mask = np.ones(image.shape, dtype=np.bool)
# slide a window across the image
for z in xrange(0, image.shape[0], step_size[0]):
# c_z = z + window_size[0] / 2.
for y in xrange(0, image.shape[1], step_size[1]):
# c_y = y + window_size[2] / 2.
for x in xrange(0, image.shape[2], step_size[2]):
# c_x = x + window_size[1] / 2.
# if c_z < mask.shape[0] and c_x < mask.shape[2] and c_y < mask.shape[1] and mask[c_z, c_y, c_x]:
# yield the current window
end_x = x + window_size[1]
end_y = y + window_size[2]
end_z = z + window_size[0]
if only_whole and (end_z > image.shape[0] or end_x > image.shape[2] or end_y > image.shape[1]):
# if only_whole:
continue
# elif include_last:
# mask_out = np.zeros(image.shape, dtype=np.bool)
# x = image.shape[2] - window_size[1]
# y = image.shape[1] - window_size[2]
# z = image.shape[0] - window_size[0]
# end_x = image.shape[2]
# end_y = image.shape[1]
# end_z = image.shape[0]
#
# mask_out[z:end_z, y:end_y, x:end_x] = True
# yield (x, y, z, mask_out, image[z:end_z, y:end_y, x:end_x])
else:
mask_out = np.zeros(image.shape, dtype=np.bool)
mask_out[z:end_z, y:end_y, x:end_x] = True
yield (x, y, z, mask_out, image[z:end_z, y:end_y, x:end_x]) | [
"def",
"sliding_window_3d",
"(",
"image",
",",
"step_size",
",",
"window_size",
",",
"mask",
"=",
"None",
",",
"only_whole",
"=",
"True",
",",
"include_last",
"=",
"False",
")",
":",
"if",
"not",
"isinstance",
"(",
"step_size",
",",
"tuple",
")",
":",
"step_size",
"=",
"(",
"step_size",
",",
"step_size",
",",
"step_size",
")",
"if",
"image",
".",
"ndim",
"==",
"2",
":",
"image",
"=",
"np",
".",
"expand_dims",
"(",
"image",
",",
"0",
")",
"window_size",
"=",
"(",
"1",
",",
"window_size",
"[",
"0",
"]",
",",
"window_size",
"[",
"1",
"]",
")",
"if",
"mask",
"is",
"not",
"None",
":",
"mask",
"=",
"np",
".",
"expand_dims",
"(",
"mask",
",",
"0",
")",
"if",
"mask",
"is",
"None",
":",
"mask",
"=",
"np",
".",
"ones",
"(",
"image",
".",
"shape",
",",
"dtype",
"=",
"np",
".",
"bool",
")",
"# slide a window across the image",
"for",
"z",
"in",
"xrange",
"(",
"0",
",",
"image",
".",
"shape",
"[",
"0",
"]",
",",
"step_size",
"[",
"0",
"]",
")",
":",
"# c_z = z + window_size[0] / 2.",
"for",
"y",
"in",
"xrange",
"(",
"0",
",",
"image",
".",
"shape",
"[",
"1",
"]",
",",
"step_size",
"[",
"1",
"]",
")",
":",
"# c_y = y + window_size[2] / 2.",
"for",
"x",
"in",
"xrange",
"(",
"0",
",",
"image",
".",
"shape",
"[",
"2",
"]",
",",
"step_size",
"[",
"2",
"]",
")",
":",
"# c_x = x + window_size[1] / 2.",
"# if c_z < mask.shape[0] and c_x < mask.shape[2] and c_y < mask.shape[1] and mask[c_z, c_y, c_x]:",
"# yield the current window",
"end_x",
"=",
"x",
"+",
"window_size",
"[",
"1",
"]",
"end_y",
"=",
"y",
"+",
"window_size",
"[",
"2",
"]",
"end_z",
"=",
"z",
"+",
"window_size",
"[",
"0",
"]",
"if",
"only_whole",
"and",
"(",
"end_z",
">",
"image",
".",
"shape",
"[",
"0",
"]",
"or",
"end_x",
">",
"image",
".",
"shape",
"[",
"2",
"]",
"or",
"end_y",
">",
"image",
".",
"shape",
"[",
"1",
"]",
")",
":",
"# if only_whole:",
"continue",
"# elif include_last:",
"# mask_out = np.zeros(image.shape, dtype=np.bool)",
"# x = image.shape[2] - window_size[1]",
"# y = image.shape[1] - window_size[2]",
"# z = image.shape[0] - window_size[0]",
"# end_x = image.shape[2]",
"# end_y = image.shape[1]",
"# end_z = image.shape[0]",
"#",
"# mask_out[z:end_z, y:end_y, x:end_x] = True",
"# yield (x, y, z, mask_out, image[z:end_z, y:end_y, x:end_x])",
"else",
":",
"mask_out",
"=",
"np",
".",
"zeros",
"(",
"image",
".",
"shape",
",",
"dtype",
"=",
"np",
".",
"bool",
")",
"mask_out",
"[",
"z",
":",
"end_z",
",",
"y",
":",
"end_y",
",",
"x",
":",
"end_x",
"]",
"=",
"True",
"yield",
"(",
"x",
",",
"y",
",",
"z",
",",
"mask_out",
",",
"image",
"[",
"z",
":",
"end_z",
",",
"y",
":",
"end_y",
",",
"x",
":",
"end_x",
"]",
")"
] | 53.55102 | 19.714286 |
def download_workflow_description_file(self, filename):
'''Downloads the workflow description and writes it to a *YAML* file.
Parameters
----------
filename: str
path to the file to which description should be written
See also
--------
:meth:`tmclient.api.TmClient.download_workflow_description`
'''
description = self.download_workflow_description()
logger.info('write workflow description to file: %s', filename)
with open(filename, 'w') as f:
content = yaml.safe_dump(
description, default_flow_style=False, explicit_start=True
)
f.write(content) | [
"def",
"download_workflow_description_file",
"(",
"self",
",",
"filename",
")",
":",
"description",
"=",
"self",
".",
"download_workflow_description",
"(",
")",
"logger",
".",
"info",
"(",
"'write workflow description to file: %s'",
",",
"filename",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"f",
":",
"content",
"=",
"yaml",
".",
"safe_dump",
"(",
"description",
",",
"default_flow_style",
"=",
"False",
",",
"explicit_start",
"=",
"True",
")",
"f",
".",
"write",
"(",
"content",
")"
] | 36.052632 | 23.842105 |
def _update_resume_for_completed(self):
# type: (Descriptor) -> None
"""Update resume for completion
:param Descriptor self: this
"""
if not self.is_resumable:
return
with self._meta_lock:
self._resume_mgr.add_or_update_record(
self.final_path, self._ase, self._chunk_size,
self._next_integrity_chunk, True, None,
) | [
"def",
"_update_resume_for_completed",
"(",
"self",
")",
":",
"# type: (Descriptor) -> None",
"if",
"not",
"self",
".",
"is_resumable",
":",
"return",
"with",
"self",
".",
"_meta_lock",
":",
"self",
".",
"_resume_mgr",
".",
"add_or_update_record",
"(",
"self",
".",
"final_path",
",",
"self",
".",
"_ase",
",",
"self",
".",
"_chunk_size",
",",
"self",
".",
"_next_integrity_chunk",
",",
"True",
",",
"None",
",",
")"
] | 35 | 10.166667 |
def violationScore(self,meterPos,pos_i=None,slot_i=None,num_slots=None,all_positions=None,parse=None):
"""call this on a MeterPosition to return an integer representing the violation value
for this Constraint in this MPos (0 represents no violation)"""
violation = None
if self.constr != None:
violation = self.constr.parse(meterPos)
else:
violation = self.__hardparse(meterPos,pos_i=pos_i,slot_i=slot_i,num_slots=num_slots,all_positions=all_positions,parse=parse)
#violation = self.__hardparse(meterPos)
if violation != "*":
meterPos.constraintScores[self] += violation
"""
print
print '>>',slot_i,num_slots, self.name, meterPos, violation, all_positions
for slot in meterPos.slots:
print slot, slot.feature('prom.stress')
print"""
return violation | [
"def",
"violationScore",
"(",
"self",
",",
"meterPos",
",",
"pos_i",
"=",
"None",
",",
"slot_i",
"=",
"None",
",",
"num_slots",
"=",
"None",
",",
"all_positions",
"=",
"None",
",",
"parse",
"=",
"None",
")",
":",
"violation",
"=",
"None",
"if",
"self",
".",
"constr",
"!=",
"None",
":",
"violation",
"=",
"self",
".",
"constr",
".",
"parse",
"(",
"meterPos",
")",
"else",
":",
"violation",
"=",
"self",
".",
"__hardparse",
"(",
"meterPos",
",",
"pos_i",
"=",
"pos_i",
",",
"slot_i",
"=",
"slot_i",
",",
"num_slots",
"=",
"num_slots",
",",
"all_positions",
"=",
"all_positions",
",",
"parse",
"=",
"parse",
")",
"#violation = self.__hardparse(meterPos)",
"if",
"violation",
"!=",
"\"*\"",
":",
"meterPos",
".",
"constraintScores",
"[",
"self",
"]",
"+=",
"violation",
"\"\"\"\n\t\tprint\n\t\tprint '>>',slot_i,num_slots, self.name, meterPos, violation, all_positions\n\t\tfor slot in meterPos.slots:\n\t\t\tprint slot, slot.feature('prom.stress')\n\t\tprint\"\"\"",
"return",
"violation"
] | 36.666667 | 22.428571 |
def get(self, default=None):
"""
return the cached value or default if it can't be found
:param default: default value
:return: cached value
"""
d = cache.get(self.key)
return ((json.loads(d.decode('utf-8')) if self.serialize else d)
if d is not None
else default) | [
"def",
"get",
"(",
"self",
",",
"default",
"=",
"None",
")",
":",
"d",
"=",
"cache",
".",
"get",
"(",
"self",
".",
"key",
")",
"return",
"(",
"(",
"json",
".",
"loads",
"(",
"d",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"if",
"self",
".",
"serialize",
"else",
"d",
")",
"if",
"d",
"is",
"not",
"None",
"else",
"default",
")"
] | 31.181818 | 13.545455 |
def _get_bls_stats(stimes,
smags,
serrs,
thistransdepth,
thistransduration,
ingressdurationfraction,
nphasebins,
thistransingressbin,
thistransegressbin,
thisbestperiod,
thisnphasebins,
magsarefluxes=False,
verbose=False):
'''
Actually calculates the stats.
'''
try:
# try getting the minimum light epoch using the phase bin method
me_epochbin = int((thistransegressbin +
thistransingressbin)/2.0)
me_phases = (
(stimes - stimes.min())/thisbestperiod -
npfloor((stimes - stimes.min())/thisbestperiod)
)
me_phases_sortind = npargsort(me_phases)
me_sorted_phases = me_phases[me_phases_sortind]
me_sorted_times = stimes[me_phases_sortind]
me_bins = nplinspace(0.0, 1.0, thisnphasebins)
me_bininds = npdigitize(me_sorted_phases, me_bins)
me_centertransit_ind = me_bininds == me_epochbin
me_centertransit_phase = (
npmedian(me_sorted_phases[me_centertransit_ind])
)
me_centertransit_timeloc = npwhere(
npabs(me_sorted_phases - me_centertransit_phase) ==
npmin(npabs(me_sorted_phases - me_centertransit_phase))
)
me_centertransit_time = me_sorted_times[
me_centertransit_timeloc
]
if me_centertransit_time.size > 1:
LOGWARNING('multiple possible times-of-center transits '
'found for period %.7f, picking the first '
'one from: %s' %
(thisbestperiod, repr(me_centertransit_time)))
thisminepoch = me_centertransit_time[0]
except Exception as e:
LOGEXCEPTION(
'could not determine the center time of transit for '
'the phased LC, trying SavGol fit instead...'
)
# fit a Savitsky-Golay instead and get its minimum
savfit = savgol_fit_magseries(stimes, smags, serrs,
thisbestperiod,
magsarefluxes=magsarefluxes,
verbose=verbose,
sigclip=None)
thisminepoch = savfit['fitinfo']['fitepoch']
if isinstance(thisminepoch, npndarray):
if verbose:
LOGWARNING('minimum epoch is actually an array:\n'
'%s\n'
'instead of a float, '
'are there duplicate time values '
'in the original input? '
'will use the first value in this array.'
% repr(thisminepoch))
thisminepoch = thisminepoch[0]
# set up trapezoid transit model to fit for this LC
transitparams = [
thisbestperiod,
thisminepoch,
thistransdepth,
thistransduration,
ingressdurationfraction*thistransduration
]
modelfit = traptransit_fit_magseries(
stimes,
smags,
serrs,
transitparams,
sigclip=None,
magsarefluxes=magsarefluxes,
verbose=verbose
)
# if the model fit succeeds, calculate SNR using the trapezoid model fit
if modelfit and modelfit['fitinfo']['finalparams'] is not None:
fitparams = modelfit['fitinfo']['finalparams']
fiterrs = modelfit['fitinfo']['finalparamerrs']
modelmags, actualmags, modelphase = (
modelfit['fitinfo']['fitmags'],
modelfit['magseries']['mags'],
modelfit['magseries']['phase']
)
subtractedmags = actualmags - modelmags
subtractedrms = npstd(subtractedmags)
fit_period, fit_epoch, fit_depth, fit_duration, fit_ingress_dur = (
fitparams
)
npts_in_transit = modelfit['fitinfo']['ntransitpoints']
transit_snr = (
npsqrt(npts_in_transit) * npabs(fit_depth/subtractedrms)
)
if verbose:
LOGINFO('refit best period: %.6f, '
'refit center of transit: %.5f' %
(fit_period, fit_epoch))
LOGINFO('npoints in transit: %s' % npts_in_transit)
LOGINFO('transit depth (delta): %.5f, '
'frac transit length (q): %.3f, '
' SNR: %.3f' %
(fit_depth,
fit_duration,
transit_snr))
return {'period':fit_period,
'epoch':fit_epoch,
'snr':transit_snr,
'transitdepth':fit_depth,
'transitduration':fit_duration,
'nphasebins':nphasebins,
'transingressbin':thistransingressbin,
'transegressbin':thistransegressbin,
'npoints_in_transit':npts_in_transit,
'blsmodel':modelmags,
'subtractedmags':subtractedmags,
'phasedmags':actualmags,
'phases':modelphase,
'fitparams':fitparams,
'fiterrs':fiterrs,
'fitinfo':modelfit}
# if the model fit doesn't work, then do the SNR calculation the old way
else:
# phase using this epoch
phased_magseries = phase_magseries_with_errs(stimes,
smags,
serrs,
thisbestperiod,
thisminepoch,
wrap=False,
sort=True)
tphase = phased_magseries['phase']
tmags = phased_magseries['mags']
# use the transit depth and duration to subtract the BLS transit
# model from the phased mag series. we're centered about 0.0 as the
# phase of the transit minimum so we need to look at stuff from
# [0.0, transitphase] and [1.0-transitphase, 1.0]
transitphase = thistransduration/2.0
transitindices = ((tphase < transitphase) |
(tphase > (1.0 - transitphase)))
# this is the BLS model
# constant = median(tmags) outside transit
# constant = thistransitdepth inside transit
blsmodel = npfull_like(tmags, npmedian(tmags))
if magsarefluxes:
# eebls.f returns +ve transit depth for fluxes
# so we need to subtract here to get fainter fluxes in transit
blsmodel[transitindices] = (
blsmodel[transitindices] - thistransdepth
)
else:
# eebls.f returns -ve transit depth for magnitudes
# so we need to subtract here to get fainter mags in transits
blsmodel[transitindices] = (
blsmodel[transitindices] - thistransdepth
)
# see __init__/get_snr_of_dip docstring for description of transit
# SNR equation, which is what we use for `thissnr`.
subtractedmags = tmags - blsmodel
subtractedrms = npstd(subtractedmags)
npts_in_transit = len(tmags[transitindices])
thissnr = (
npsqrt(npts_in_transit) * npabs(thistransdepth/subtractedrms)
)
# tell user about stuff if verbose = True
if verbose:
LOGINFO('refit best period: %.6f, '
'refit center of transit: %.5f' %
(thisbestperiod, thisminepoch))
LOGINFO('transit ingress phase = %.3f to %.3f' % (1.0 -
transitphase,
1.0))
LOGINFO('transit egress phase = %.3f to %.3f' % (0.0,
transitphase))
LOGINFO('npoints in transit: %s' % tmags[transitindices].size)
LOGINFO('transit depth (delta): %.5f, '
'frac transit length (q): %.3f, '
' SNR: %.3f' %
(thistransdepth,
thistransduration,
thissnr))
return {'period':thisbestperiod,
'epoch':thisminepoch,
'snr':thissnr,
'transitdepth':thistransdepth,
'transitduration':thistransduration,
'nphasebins':nphasebins,
'transingressbin':thistransingressbin,
'transegressbin':thistransegressbin,
'blsmodel':blsmodel,
'subtractedmags':subtractedmags,
'phasedmags':tmags,
'phases':tphase} | [
"def",
"_get_bls_stats",
"(",
"stimes",
",",
"smags",
",",
"serrs",
",",
"thistransdepth",
",",
"thistransduration",
",",
"ingressdurationfraction",
",",
"nphasebins",
",",
"thistransingressbin",
",",
"thistransegressbin",
",",
"thisbestperiod",
",",
"thisnphasebins",
",",
"magsarefluxes",
"=",
"False",
",",
"verbose",
"=",
"False",
")",
":",
"try",
":",
"# try getting the minimum light epoch using the phase bin method",
"me_epochbin",
"=",
"int",
"(",
"(",
"thistransegressbin",
"+",
"thistransingressbin",
")",
"/",
"2.0",
")",
"me_phases",
"=",
"(",
"(",
"stimes",
"-",
"stimes",
".",
"min",
"(",
")",
")",
"/",
"thisbestperiod",
"-",
"npfloor",
"(",
"(",
"stimes",
"-",
"stimes",
".",
"min",
"(",
")",
")",
"/",
"thisbestperiod",
")",
")",
"me_phases_sortind",
"=",
"npargsort",
"(",
"me_phases",
")",
"me_sorted_phases",
"=",
"me_phases",
"[",
"me_phases_sortind",
"]",
"me_sorted_times",
"=",
"stimes",
"[",
"me_phases_sortind",
"]",
"me_bins",
"=",
"nplinspace",
"(",
"0.0",
",",
"1.0",
",",
"thisnphasebins",
")",
"me_bininds",
"=",
"npdigitize",
"(",
"me_sorted_phases",
",",
"me_bins",
")",
"me_centertransit_ind",
"=",
"me_bininds",
"==",
"me_epochbin",
"me_centertransit_phase",
"=",
"(",
"npmedian",
"(",
"me_sorted_phases",
"[",
"me_centertransit_ind",
"]",
")",
")",
"me_centertransit_timeloc",
"=",
"npwhere",
"(",
"npabs",
"(",
"me_sorted_phases",
"-",
"me_centertransit_phase",
")",
"==",
"npmin",
"(",
"npabs",
"(",
"me_sorted_phases",
"-",
"me_centertransit_phase",
")",
")",
")",
"me_centertransit_time",
"=",
"me_sorted_times",
"[",
"me_centertransit_timeloc",
"]",
"if",
"me_centertransit_time",
".",
"size",
">",
"1",
":",
"LOGWARNING",
"(",
"'multiple possible times-of-center transits '",
"'found for period %.7f, picking the first '",
"'one from: %s'",
"%",
"(",
"thisbestperiod",
",",
"repr",
"(",
"me_centertransit_time",
")",
")",
")",
"thisminepoch",
"=",
"me_centertransit_time",
"[",
"0",
"]",
"except",
"Exception",
"as",
"e",
":",
"LOGEXCEPTION",
"(",
"'could not determine the center time of transit for '",
"'the phased LC, trying SavGol fit instead...'",
")",
"# fit a Savitsky-Golay instead and get its minimum",
"savfit",
"=",
"savgol_fit_magseries",
"(",
"stimes",
",",
"smags",
",",
"serrs",
",",
"thisbestperiod",
",",
"magsarefluxes",
"=",
"magsarefluxes",
",",
"verbose",
"=",
"verbose",
",",
"sigclip",
"=",
"None",
")",
"thisminepoch",
"=",
"savfit",
"[",
"'fitinfo'",
"]",
"[",
"'fitepoch'",
"]",
"if",
"isinstance",
"(",
"thisminepoch",
",",
"npndarray",
")",
":",
"if",
"verbose",
":",
"LOGWARNING",
"(",
"'minimum epoch is actually an array:\\n'",
"'%s\\n'",
"'instead of a float, '",
"'are there duplicate time values '",
"'in the original input? '",
"'will use the first value in this array.'",
"%",
"repr",
"(",
"thisminepoch",
")",
")",
"thisminepoch",
"=",
"thisminepoch",
"[",
"0",
"]",
"# set up trapezoid transit model to fit for this LC",
"transitparams",
"=",
"[",
"thisbestperiod",
",",
"thisminepoch",
",",
"thistransdepth",
",",
"thistransduration",
",",
"ingressdurationfraction",
"*",
"thistransduration",
"]",
"modelfit",
"=",
"traptransit_fit_magseries",
"(",
"stimes",
",",
"smags",
",",
"serrs",
",",
"transitparams",
",",
"sigclip",
"=",
"None",
",",
"magsarefluxes",
"=",
"magsarefluxes",
",",
"verbose",
"=",
"verbose",
")",
"# if the model fit succeeds, calculate SNR using the trapezoid model fit",
"if",
"modelfit",
"and",
"modelfit",
"[",
"'fitinfo'",
"]",
"[",
"'finalparams'",
"]",
"is",
"not",
"None",
":",
"fitparams",
"=",
"modelfit",
"[",
"'fitinfo'",
"]",
"[",
"'finalparams'",
"]",
"fiterrs",
"=",
"modelfit",
"[",
"'fitinfo'",
"]",
"[",
"'finalparamerrs'",
"]",
"modelmags",
",",
"actualmags",
",",
"modelphase",
"=",
"(",
"modelfit",
"[",
"'fitinfo'",
"]",
"[",
"'fitmags'",
"]",
",",
"modelfit",
"[",
"'magseries'",
"]",
"[",
"'mags'",
"]",
",",
"modelfit",
"[",
"'magseries'",
"]",
"[",
"'phase'",
"]",
")",
"subtractedmags",
"=",
"actualmags",
"-",
"modelmags",
"subtractedrms",
"=",
"npstd",
"(",
"subtractedmags",
")",
"fit_period",
",",
"fit_epoch",
",",
"fit_depth",
",",
"fit_duration",
",",
"fit_ingress_dur",
"=",
"(",
"fitparams",
")",
"npts_in_transit",
"=",
"modelfit",
"[",
"'fitinfo'",
"]",
"[",
"'ntransitpoints'",
"]",
"transit_snr",
"=",
"(",
"npsqrt",
"(",
"npts_in_transit",
")",
"*",
"npabs",
"(",
"fit_depth",
"/",
"subtractedrms",
")",
")",
"if",
"verbose",
":",
"LOGINFO",
"(",
"'refit best period: %.6f, '",
"'refit center of transit: %.5f'",
"%",
"(",
"fit_period",
",",
"fit_epoch",
")",
")",
"LOGINFO",
"(",
"'npoints in transit: %s'",
"%",
"npts_in_transit",
")",
"LOGINFO",
"(",
"'transit depth (delta): %.5f, '",
"'frac transit length (q): %.3f, '",
"' SNR: %.3f'",
"%",
"(",
"fit_depth",
",",
"fit_duration",
",",
"transit_snr",
")",
")",
"return",
"{",
"'period'",
":",
"fit_period",
",",
"'epoch'",
":",
"fit_epoch",
",",
"'snr'",
":",
"transit_snr",
",",
"'transitdepth'",
":",
"fit_depth",
",",
"'transitduration'",
":",
"fit_duration",
",",
"'nphasebins'",
":",
"nphasebins",
",",
"'transingressbin'",
":",
"thistransingressbin",
",",
"'transegressbin'",
":",
"thistransegressbin",
",",
"'npoints_in_transit'",
":",
"npts_in_transit",
",",
"'blsmodel'",
":",
"modelmags",
",",
"'subtractedmags'",
":",
"subtractedmags",
",",
"'phasedmags'",
":",
"actualmags",
",",
"'phases'",
":",
"modelphase",
",",
"'fitparams'",
":",
"fitparams",
",",
"'fiterrs'",
":",
"fiterrs",
",",
"'fitinfo'",
":",
"modelfit",
"}",
"# if the model fit doesn't work, then do the SNR calculation the old way",
"else",
":",
"# phase using this epoch",
"phased_magseries",
"=",
"phase_magseries_with_errs",
"(",
"stimes",
",",
"smags",
",",
"serrs",
",",
"thisbestperiod",
",",
"thisminepoch",
",",
"wrap",
"=",
"False",
",",
"sort",
"=",
"True",
")",
"tphase",
"=",
"phased_magseries",
"[",
"'phase'",
"]",
"tmags",
"=",
"phased_magseries",
"[",
"'mags'",
"]",
"# use the transit depth and duration to subtract the BLS transit",
"# model from the phased mag series. we're centered about 0.0 as the",
"# phase of the transit minimum so we need to look at stuff from",
"# [0.0, transitphase] and [1.0-transitphase, 1.0]",
"transitphase",
"=",
"thistransduration",
"/",
"2.0",
"transitindices",
"=",
"(",
"(",
"tphase",
"<",
"transitphase",
")",
"|",
"(",
"tphase",
">",
"(",
"1.0",
"-",
"transitphase",
")",
")",
")",
"# this is the BLS model",
"# constant = median(tmags) outside transit",
"# constant = thistransitdepth inside transit",
"blsmodel",
"=",
"npfull_like",
"(",
"tmags",
",",
"npmedian",
"(",
"tmags",
")",
")",
"if",
"magsarefluxes",
":",
"# eebls.f returns +ve transit depth for fluxes",
"# so we need to subtract here to get fainter fluxes in transit",
"blsmodel",
"[",
"transitindices",
"]",
"=",
"(",
"blsmodel",
"[",
"transitindices",
"]",
"-",
"thistransdepth",
")",
"else",
":",
"# eebls.f returns -ve transit depth for magnitudes",
"# so we need to subtract here to get fainter mags in transits",
"blsmodel",
"[",
"transitindices",
"]",
"=",
"(",
"blsmodel",
"[",
"transitindices",
"]",
"-",
"thistransdepth",
")",
"# see __init__/get_snr_of_dip docstring for description of transit",
"# SNR equation, which is what we use for `thissnr`.",
"subtractedmags",
"=",
"tmags",
"-",
"blsmodel",
"subtractedrms",
"=",
"npstd",
"(",
"subtractedmags",
")",
"npts_in_transit",
"=",
"len",
"(",
"tmags",
"[",
"transitindices",
"]",
")",
"thissnr",
"=",
"(",
"npsqrt",
"(",
"npts_in_transit",
")",
"*",
"npabs",
"(",
"thistransdepth",
"/",
"subtractedrms",
")",
")",
"# tell user about stuff if verbose = True",
"if",
"verbose",
":",
"LOGINFO",
"(",
"'refit best period: %.6f, '",
"'refit center of transit: %.5f'",
"%",
"(",
"thisbestperiod",
",",
"thisminepoch",
")",
")",
"LOGINFO",
"(",
"'transit ingress phase = %.3f to %.3f'",
"%",
"(",
"1.0",
"-",
"transitphase",
",",
"1.0",
")",
")",
"LOGINFO",
"(",
"'transit egress phase = %.3f to %.3f'",
"%",
"(",
"0.0",
",",
"transitphase",
")",
")",
"LOGINFO",
"(",
"'npoints in transit: %s'",
"%",
"tmags",
"[",
"transitindices",
"]",
".",
"size",
")",
"LOGINFO",
"(",
"'transit depth (delta): %.5f, '",
"'frac transit length (q): %.3f, '",
"' SNR: %.3f'",
"%",
"(",
"thistransdepth",
",",
"thistransduration",
",",
"thissnr",
")",
")",
"return",
"{",
"'period'",
":",
"thisbestperiod",
",",
"'epoch'",
":",
"thisminepoch",
",",
"'snr'",
":",
"thissnr",
",",
"'transitdepth'",
":",
"thistransdepth",
",",
"'transitduration'",
":",
"thistransduration",
",",
"'nphasebins'",
":",
"nphasebins",
",",
"'transingressbin'",
":",
"thistransingressbin",
",",
"'transegressbin'",
":",
"thistransegressbin",
",",
"'blsmodel'",
":",
"blsmodel",
",",
"'subtractedmags'",
":",
"subtractedmags",
",",
"'phasedmags'",
":",
"tmags",
",",
"'phases'",
":",
"tphase",
"}"
] | 36.345833 | 19.120833 |
def _set_src_ip_host(self, v, load=False):
"""
Setter method for src_ip_host, mapped from YANG variable /overlay/access_list/type/vxlan/extended/ext_seq/src_ip_host (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_src_ip_host is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_src_ip_host() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="src-ip-host", rest_name="src-ip-host", parent=self, choice=(u'choice-src-ip', u'case-src-ip-host'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'src ip host: A.B.C.D', u'display-when': u'(../dst-ip-host) or (../dst-ip) or (../dst-ip-any)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """src_ip_host must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="src-ip-host", rest_name="src-ip-host", parent=self, choice=(u'choice-src-ip', u'case-src-ip-host'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'src ip host: A.B.C.D', u'display-when': u'(../dst-ip-host) or (../dst-ip) or (../dst-ip-any)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__src_ip_host = t
if hasattr(self, '_set'):
self._set() | [
"def",
"_set_src_ip_host",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"RestrictedClassType",
"(",
"base_type",
"=",
"unicode",
",",
"restriction_dict",
"=",
"{",
"'pattern'",
":",
"u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'",
"}",
")",
",",
"is_leaf",
"=",
"True",
",",
"yang_name",
"=",
"\"src-ip-host\"",
",",
"rest_name",
"=",
"\"src-ip-host\"",
",",
"parent",
"=",
"self",
",",
"choice",
"=",
"(",
"u'choice-src-ip'",
",",
"u'case-src-ip-host'",
")",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"True",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'info'",
":",
"u'src ip host: A.B.C.D'",
",",
"u'display-when'",
":",
"u'(../dst-ip-host) or (../dst-ip) or (../dst-ip-any)'",
",",
"u'cli-incomplete-command'",
":",
"None",
"}",
"}",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-vxlan-visibility'",
",",
"defining_module",
"=",
"'brocade-vxlan-visibility'",
",",
"yang_type",
"=",
"'inet:ipv4-address'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"src_ip_host must be of a type compatible with inet:ipv4-address\"\"\"",
",",
"'defined-type'",
":",
"\"inet:ipv4-address\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"src-ip-host\", rest_name=\"src-ip-host\", parent=self, choice=(u'choice-src-ip', u'case-src-ip-host'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'src ip host: A.B.C.D', u'display-when': u'(../dst-ip-host) or (../dst-ip) or (../dst-ip-any)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='inet:ipv4-address', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__src_ip_host",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] | 105.5 | 50.636364 |
def dict_pop_or(d, key, default=None):
""" Try popping a key from a dict.
Instead of raising KeyError, just return the default value.
"""
val = default
with suppress(KeyError):
val = d.pop(key)
return val | [
"def",
"dict_pop_or",
"(",
"d",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"val",
"=",
"default",
"with",
"suppress",
"(",
"KeyError",
")",
":",
"val",
"=",
"d",
".",
"pop",
"(",
"key",
")",
"return",
"val"
] | 29.125 | 13.25 |
def writeProxy(self, obj):
"""
Encodes a proxied object to the stream.
@since: 0.6
"""
proxy = self.context.getProxyForObject(obj)
self.writeObject(proxy, is_proxy=True) | [
"def",
"writeProxy",
"(",
"self",
",",
"obj",
")",
":",
"proxy",
"=",
"self",
".",
"context",
".",
"getProxyForObject",
"(",
"obj",
")",
"self",
".",
"writeObject",
"(",
"proxy",
",",
"is_proxy",
"=",
"True",
")"
] | 23.444444 | 15.444444 |
async def _load_all(self):
'''
Load all the appointments from persistent storage
'''
to_delete = []
for iden, val in self._hivedict.items():
try:
appt = _Appt.unpack(val)
if appt.iden != iden:
raise s_exc.InconsistentStorage(mesg='iden inconsistency')
self._addappt(iden, appt)
self._next_indx = max(self._next_indx, appt.indx + 1)
except (s_exc.InconsistentStorage, s_exc.BadStorageVersion, s_exc.BadTime, TypeError, KeyError,
UnicodeDecodeError) as e:
logger.warning('Invalid appointment %r found in storage: %r. Removing.', iden, e)
to_delete.append(iden)
continue
for iden in to_delete:
await self._hivedict.pop(iden)
# Make sure we don't assign the same index to 2 appointments
if self.appts:
maxindx = max(appt.indx for appt in self.appts.values())
self._next_indx = maxindx + 1 | [
"async",
"def",
"_load_all",
"(",
"self",
")",
":",
"to_delete",
"=",
"[",
"]",
"for",
"iden",
",",
"val",
"in",
"self",
".",
"_hivedict",
".",
"items",
"(",
")",
":",
"try",
":",
"appt",
"=",
"_Appt",
".",
"unpack",
"(",
"val",
")",
"if",
"appt",
".",
"iden",
"!=",
"iden",
":",
"raise",
"s_exc",
".",
"InconsistentStorage",
"(",
"mesg",
"=",
"'iden inconsistency'",
")",
"self",
".",
"_addappt",
"(",
"iden",
",",
"appt",
")",
"self",
".",
"_next_indx",
"=",
"max",
"(",
"self",
".",
"_next_indx",
",",
"appt",
".",
"indx",
"+",
"1",
")",
"except",
"(",
"s_exc",
".",
"InconsistentStorage",
",",
"s_exc",
".",
"BadStorageVersion",
",",
"s_exc",
".",
"BadTime",
",",
"TypeError",
",",
"KeyError",
",",
"UnicodeDecodeError",
")",
"as",
"e",
":",
"logger",
".",
"warning",
"(",
"'Invalid appointment %r found in storage: %r. Removing.'",
",",
"iden",
",",
"e",
")",
"to_delete",
".",
"append",
"(",
"iden",
")",
"continue",
"for",
"iden",
"in",
"to_delete",
":",
"await",
"self",
".",
"_hivedict",
".",
"pop",
"(",
"iden",
")",
"# Make sure we don't assign the same index to 2 appointments",
"if",
"self",
".",
"appts",
":",
"maxindx",
"=",
"max",
"(",
"appt",
".",
"indx",
"for",
"appt",
"in",
"self",
".",
"appts",
".",
"values",
"(",
")",
")",
"self",
".",
"_next_indx",
"=",
"maxindx",
"+",
"1"
] | 41.56 | 21 |
def _replace_coerce(self, to_replace, value, inplace=True, regex=False,
convert=False, mask=None):
"""
Replace value corresponding to the given boolean array with another
value.
Parameters
----------
to_replace : object or pattern
Scalar to replace or regular expression to match.
value : object
Replacement object.
inplace : bool, default False
Perform inplace modification.
regex : bool, default False
If true, perform regular expression substitution.
convert : bool, default True
If true, try to coerce any object types to better types.
mask : array-like of bool, optional
True indicate corresponding element is ignored.
Returns
-------
A new block if there is anything to replace or the original block.
"""
if mask.any():
block = super()._replace_coerce(
to_replace=to_replace, value=value, inplace=inplace,
regex=regex, convert=convert, mask=mask)
if convert:
block = [b.convert(by_item=True, numeric=False, copy=True)
for b in block]
return block
return self | [
"def",
"_replace_coerce",
"(",
"self",
",",
"to_replace",
",",
"value",
",",
"inplace",
"=",
"True",
",",
"regex",
"=",
"False",
",",
"convert",
"=",
"False",
",",
"mask",
"=",
"None",
")",
":",
"if",
"mask",
".",
"any",
"(",
")",
":",
"block",
"=",
"super",
"(",
")",
".",
"_replace_coerce",
"(",
"to_replace",
"=",
"to_replace",
",",
"value",
"=",
"value",
",",
"inplace",
"=",
"inplace",
",",
"regex",
"=",
"regex",
",",
"convert",
"=",
"convert",
",",
"mask",
"=",
"mask",
")",
"if",
"convert",
":",
"block",
"=",
"[",
"b",
".",
"convert",
"(",
"by_item",
"=",
"True",
",",
"numeric",
"=",
"False",
",",
"copy",
"=",
"True",
")",
"for",
"b",
"in",
"block",
"]",
"return",
"block",
"return",
"self"
] | 37.470588 | 17.588235 |
def visit(self, node):
"""walk on the tree from <node>, getting callbacks from handler"""
method = self.get_callbacks(node)[0]
if method is not None:
method(node) | [
"def",
"visit",
"(",
"self",
",",
"node",
")",
":",
"method",
"=",
"self",
".",
"get_callbacks",
"(",
"node",
")",
"[",
"0",
"]",
"if",
"method",
"is",
"not",
"None",
":",
"method",
"(",
"node",
")"
] | 38.8 | 9.6 |
def parse_number_of_html_pages(html_question):
"""Parse number of answer pages to paginate over them.
:param html_question: raw HTML question element
:returns: an integer with the number of pages
"""
bs_question = bs4.BeautifulSoup(html_question, "html.parser")
try:
bs_question.select('div.paginator')[0]
except IndexError:
return 1
else:
return int(bs_question.select('div.paginator')[0].attrs['data-num-pages']) | [
"def",
"parse_number_of_html_pages",
"(",
"html_question",
")",
":",
"bs_question",
"=",
"bs4",
".",
"BeautifulSoup",
"(",
"html_question",
",",
"\"html.parser\"",
")",
"try",
":",
"bs_question",
".",
"select",
"(",
"'div.paginator'",
")",
"[",
"0",
"]",
"except",
"IndexError",
":",
"return",
"1",
"else",
":",
"return",
"int",
"(",
"bs_question",
".",
"select",
"(",
"'div.paginator'",
")",
"[",
"0",
"]",
".",
"attrs",
"[",
"'data-num-pages'",
"]",
")"
] | 35.928571 | 20.571429 |
def permute(self, qubits: Qubits) -> 'Density':
"""Return a copy of this state with qubit labels permuted"""
vec = self.vec.permute(qubits)
return Density(vec.tensor, vec.qubits, self._memory) | [
"def",
"permute",
"(",
"self",
",",
"qubits",
":",
"Qubits",
")",
"->",
"'Density'",
":",
"vec",
"=",
"self",
".",
"vec",
".",
"permute",
"(",
"qubits",
")",
"return",
"Density",
"(",
"vec",
".",
"tensor",
",",
"vec",
".",
"qubits",
",",
"self",
".",
"_memory",
")"
] | 53.25 | 7.25 |
def index(
config, date=None, directory=None, concurrency=5, accounts=None,
tag=None, verbose=False):
"""index traildbs directly from s3 for multiple accounts.
context: assumes a daily traildb file in s3 with dated key path
"""
logging.basicConfig(level=(verbose and logging.DEBUG or logging.INFO))
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('elasticsearch').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('c7n.worker').setLevel(logging.INFO)
with open(config) as fh:
config = yaml.safe_load(fh.read())
jsonschema.validate(config, CONFIG_SCHEMA)
date = get_date_path(date, delta=24)
directory = directory or "/tmp"
with ProcessPoolExecutor(max_workers=concurrency) as w:
futures = {}
jobs = []
for account in config.get('accounts'):
if accounts and account['name'] not in accounts:
continue
if tag:
found = False
for t in account['tags'].values():
if tag == t:
found = True
break
if not found:
continue
for region in account.get('regions'):
p = (config, account, region, date, directory)
jobs.append(p)
for j in jobs:
log.debug("submit account:{} region:{} date:{}".format(
j[1]['name'], j[2], j[3]))
futures[w.submit(index_account_trails, *j)] = j
# Process completed
for f in as_completed(futures):
config, account, region, date, directory = futures[f]
if f.exception():
log.warning("error account:{} region:{} error:{}".format(
account['name'], region, f.exception()))
continue
log.info("complete account:{} region:{}".format(
account['name'], region)) | [
"def",
"index",
"(",
"config",
",",
"date",
"=",
"None",
",",
"directory",
"=",
"None",
",",
"concurrency",
"=",
"5",
",",
"accounts",
"=",
"None",
",",
"tag",
"=",
"None",
",",
"verbose",
"=",
"False",
")",
":",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"(",
"verbose",
"and",
"logging",
".",
"DEBUG",
"or",
"logging",
".",
"INFO",
")",
")",
"logging",
".",
"getLogger",
"(",
"'botocore'",
")",
".",
"setLevel",
"(",
"logging",
".",
"WARNING",
")",
"logging",
".",
"getLogger",
"(",
"'elasticsearch'",
")",
".",
"setLevel",
"(",
"logging",
".",
"WARNING",
")",
"logging",
".",
"getLogger",
"(",
"'urllib3'",
")",
".",
"setLevel",
"(",
"logging",
".",
"WARNING",
")",
"logging",
".",
"getLogger",
"(",
"'requests'",
")",
".",
"setLevel",
"(",
"logging",
".",
"WARNING",
")",
"logging",
".",
"getLogger",
"(",
"'c7n.worker'",
")",
".",
"setLevel",
"(",
"logging",
".",
"INFO",
")",
"with",
"open",
"(",
"config",
")",
"as",
"fh",
":",
"config",
"=",
"yaml",
".",
"safe_load",
"(",
"fh",
".",
"read",
"(",
")",
")",
"jsonschema",
".",
"validate",
"(",
"config",
",",
"CONFIG_SCHEMA",
")",
"date",
"=",
"get_date_path",
"(",
"date",
",",
"delta",
"=",
"24",
")",
"directory",
"=",
"directory",
"or",
"\"/tmp\"",
"with",
"ProcessPoolExecutor",
"(",
"max_workers",
"=",
"concurrency",
")",
"as",
"w",
":",
"futures",
"=",
"{",
"}",
"jobs",
"=",
"[",
"]",
"for",
"account",
"in",
"config",
".",
"get",
"(",
"'accounts'",
")",
":",
"if",
"accounts",
"and",
"account",
"[",
"'name'",
"]",
"not",
"in",
"accounts",
":",
"continue",
"if",
"tag",
":",
"found",
"=",
"False",
"for",
"t",
"in",
"account",
"[",
"'tags'",
"]",
".",
"values",
"(",
")",
":",
"if",
"tag",
"==",
"t",
":",
"found",
"=",
"True",
"break",
"if",
"not",
"found",
":",
"continue",
"for",
"region",
"in",
"account",
".",
"get",
"(",
"'regions'",
")",
":",
"p",
"=",
"(",
"config",
",",
"account",
",",
"region",
",",
"date",
",",
"directory",
")",
"jobs",
".",
"append",
"(",
"p",
")",
"for",
"j",
"in",
"jobs",
":",
"log",
".",
"debug",
"(",
"\"submit account:{} region:{} date:{}\"",
".",
"format",
"(",
"j",
"[",
"1",
"]",
"[",
"'name'",
"]",
",",
"j",
"[",
"2",
"]",
",",
"j",
"[",
"3",
"]",
")",
")",
"futures",
"[",
"w",
".",
"submit",
"(",
"index_account_trails",
",",
"*",
"j",
")",
"]",
"=",
"j",
"# Process completed",
"for",
"f",
"in",
"as_completed",
"(",
"futures",
")",
":",
"config",
",",
"account",
",",
"region",
",",
"date",
",",
"directory",
"=",
"futures",
"[",
"f",
"]",
"if",
"f",
".",
"exception",
"(",
")",
":",
"log",
".",
"warning",
"(",
"\"error account:{} region:{} error:{}\"",
".",
"format",
"(",
"account",
"[",
"'name'",
"]",
",",
"region",
",",
"f",
".",
"exception",
"(",
")",
")",
")",
"continue",
"log",
".",
"info",
"(",
"\"complete account:{} region:{}\"",
".",
"format",
"(",
"account",
"[",
"'name'",
"]",
",",
"region",
")",
")"
] | 37.090909 | 18.4 |
def _set_ipv6_config(self, v, load=False):
"""
Setter method for ipv6_config, mapped from YANG variable /routing_system/interface/ve/ipv6/ipv6_config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_config() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ipv6_config.ipv6_config, is_container='container', presence=False, yang_name="ipv6-config", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the IPv6 address of an interface', u'cli-drop-node-name': None, u'callpoint': u'intf-vlan-ipv6-cfg-cp', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IPv6_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-config', defining_module='brocade-ipv6-config', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ipv6_config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ipv6_config.ipv6_config, is_container='container', presence=False, yang_name="ipv6-config", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the IPv6 address of an interface', u'cli-drop-node-name': None, u'callpoint': u'intf-vlan-ipv6-cfg-cp', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IPv6_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-config', defining_module='brocade-ipv6-config', yang_type='container', is_config=True)""",
})
self.__ipv6_config = t
if hasattr(self, '_set'):
self._set() | [
"def",
"_set_ipv6_config",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"ipv6_config",
".",
"ipv6_config",
",",
"is_container",
"=",
"'container'",
",",
"presence",
"=",
"False",
",",
"yang_name",
"=",
"\"ipv6-config\"",
",",
"rest_name",
"=",
"\"\"",
",",
"parent",
"=",
"self",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"True",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'info'",
":",
"u'Set the IPv6 address of an interface'",
",",
"u'cli-drop-node-name'",
":",
"None",
",",
"u'callpoint'",
":",
"u'intf-vlan-ipv6-cfg-cp'",
",",
"u'sort-priority'",
":",
"u'RUNNCFG_INTERFACE_LEVEL_IPv6_CONFIG'",
"}",
"}",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-ipv6-config'",
",",
"defining_module",
"=",
"'brocade-ipv6-config'",
",",
"yang_type",
"=",
"'container'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"ipv6_config must be of a type compatible with container\"\"\"",
",",
"'defined-type'",
":",
"\"container\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=ipv6_config.ipv6_config, is_container='container', presence=False, yang_name=\"ipv6-config\", rest_name=\"\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the IPv6 address of an interface', u'cli-drop-node-name': None, u'callpoint': u'intf-vlan-ipv6-cfg-cp', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IPv6_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-config', defining_module='brocade-ipv6-config', yang_type='container', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__ipv6_config",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] | 86.636364 | 40.863636 |
def get_artifact_filename(self, package_name, artifact_name):
"""
Similar to pkg_resources.resource_filename, however this works
with the information cached in this registry instance, and
arguments are not quite the same.
Arguments:
package_name
The name of the package to get the artifact from
artifact_name
The exact name of the artifact.
Returns the path of where the artifact should be if it has been
declared, otherwise None.
"""
project_name = self.packages.normalize(package_name)
return self.records.get((project_name, artifact_name)) | [
"def",
"get_artifact_filename",
"(",
"self",
",",
"package_name",
",",
"artifact_name",
")",
":",
"project_name",
"=",
"self",
".",
"packages",
".",
"normalize",
"(",
"package_name",
")",
"return",
"self",
".",
"records",
".",
"get",
"(",
"(",
"project_name",
",",
"artifact_name",
")",
")"
] | 34.105263 | 21.157895 |
def width(self, level):
"""
Width at given level
:param level:
:return:
"""
return self.x_at_y(level, reverse=True) - self.x_at_y(level) | [
"def",
"width",
"(",
"self",
",",
"level",
")",
":",
"return",
"self",
".",
"x_at_y",
"(",
"level",
",",
"reverse",
"=",
"True",
")",
"-",
"self",
".",
"x_at_y",
"(",
"level",
")"
] | 25.428571 | 14.285714 |
def save_ext(self):
"""Write the internal data into an external data file."""
try:
sequencemanager = hydpy.pub.sequencemanager
except AttributeError:
raise RuntimeError(
'The time series of sequence %s cannot be saved. Firstly,'
'you have to prepare `pub.sequencemanager` correctly.'
% objecttools.devicephrase(self))
sequencemanager.save_file(self) | [
"def",
"save_ext",
"(",
"self",
")",
":",
"try",
":",
"sequencemanager",
"=",
"hydpy",
".",
"pub",
".",
"sequencemanager",
"except",
"AttributeError",
":",
"raise",
"RuntimeError",
"(",
"'The time series of sequence %s cannot be saved. Firstly,'",
"'you have to prepare `pub.sequencemanager` correctly.'",
"%",
"objecttools",
".",
"devicephrase",
"(",
"self",
")",
")",
"sequencemanager",
".",
"save_file",
"(",
"self",
")"
] | 44.5 | 15.8 |
def get_stack_frame(self, max_size = None):
"""
Reads the contents of the current stack frame.
Only works for functions with standard prologue and epilogue.
@type max_size: int
@param max_size: (Optional) Maximum amount of bytes to read.
@rtype: str
@return: Stack frame data.
May not be accurate, depending on the compiler used.
May return an empty string.
@raise RuntimeError: The stack frame is invalid,
or the function doesn't have a standard prologue
and epilogue.
@raise WindowsError: An error occured when getting the thread context
or reading data from the process memory.
"""
sp, fp = self.get_stack_frame_range()
size = fp - sp
if max_size and size > max_size:
size = max_size
return self.get_process().peek(sp, size) | [
"def",
"get_stack_frame",
"(",
"self",
",",
"max_size",
"=",
"None",
")",
":",
"sp",
",",
"fp",
"=",
"self",
".",
"get_stack_frame_range",
"(",
")",
"size",
"=",
"fp",
"-",
"sp",
"if",
"max_size",
"and",
"size",
">",
"max_size",
":",
"size",
"=",
"max_size",
"return",
"self",
".",
"get_process",
"(",
")",
".",
"peek",
"(",
"sp",
",",
"size",
")"
] | 35.96 | 17.56 |
def _iter_font_files_in(cls, directory):
"""
Generate the OpenType font files found in and under *directory*. Each
item is a key/value pair. The key is a (family_name, is_bold,
is_italic) 3-tuple, like ('Arial', True, False), and the value is the
absolute path to the font file.
"""
for root, dirs, files in os.walk(directory):
for filename in files:
file_ext = os.path.splitext(filename)[1]
if file_ext.lower() not in ('.otf', '.ttf'):
continue
path = os.path.abspath(os.path.join(root, filename))
with _Font.open(path) as f:
yield ((f.family_name, f.is_bold, f.is_italic), path) | [
"def",
"_iter_font_files_in",
"(",
"cls",
",",
"directory",
")",
":",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"directory",
")",
":",
"for",
"filename",
"in",
"files",
":",
"file_ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"1",
"]",
"if",
"file_ext",
".",
"lower",
"(",
")",
"not",
"in",
"(",
"'.otf'",
",",
"'.ttf'",
")",
":",
"continue",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"filename",
")",
")",
"with",
"_Font",
".",
"open",
"(",
"path",
")",
"as",
"f",
":",
"yield",
"(",
"(",
"f",
".",
"family_name",
",",
"f",
".",
"is_bold",
",",
"f",
".",
"is_italic",
")",
",",
"path",
")"
] | 49.2 | 15.6 |
def print_all_commands(self, *, no_pager=False):
"""Print help for all commands.
Commands are sorted in alphabetical order and wrapping is done
based on the width of the terminal.
"""
formatter = self.parent_parser._get_formatter()
command_names = sorted(self.parent_parser.subparsers.choices.keys())
max_name_len = max([len(name) for name in command_names]) + 1
commands = ""
for name in command_names:
command = self.parent_parser.subparsers.choices[name]
extra_padding = max_name_len - len(name)
command_line = '%s%s%s' % (
name, ' ' * extra_padding, command.description)
while len(command_line) > formatter._width:
lines = textwrap.wrap(command_line, formatter._width)
commands += "%s\n" % lines[0]
if len(lines) > 1:
lines[1] = (' ' * max_name_len) + lines[1]
command_line = ' '.join(lines[1:])
else:
command_line = None
if command_line:
commands += "%s\n" % command_line
if no_pager:
print(commands[:-1])
else:
print_with_pager(commands[:-1]) | [
"def",
"print_all_commands",
"(",
"self",
",",
"*",
",",
"no_pager",
"=",
"False",
")",
":",
"formatter",
"=",
"self",
".",
"parent_parser",
".",
"_get_formatter",
"(",
")",
"command_names",
"=",
"sorted",
"(",
"self",
".",
"parent_parser",
".",
"subparsers",
".",
"choices",
".",
"keys",
"(",
")",
")",
"max_name_len",
"=",
"max",
"(",
"[",
"len",
"(",
"name",
")",
"for",
"name",
"in",
"command_names",
"]",
")",
"+",
"1",
"commands",
"=",
"\"\"",
"for",
"name",
"in",
"command_names",
":",
"command",
"=",
"self",
".",
"parent_parser",
".",
"subparsers",
".",
"choices",
"[",
"name",
"]",
"extra_padding",
"=",
"max_name_len",
"-",
"len",
"(",
"name",
")",
"command_line",
"=",
"'%s%s%s'",
"%",
"(",
"name",
",",
"' '",
"*",
"extra_padding",
",",
"command",
".",
"description",
")",
"while",
"len",
"(",
"command_line",
")",
">",
"formatter",
".",
"_width",
":",
"lines",
"=",
"textwrap",
".",
"wrap",
"(",
"command_line",
",",
"formatter",
".",
"_width",
")",
"commands",
"+=",
"\"%s\\n\"",
"%",
"lines",
"[",
"0",
"]",
"if",
"len",
"(",
"lines",
")",
">",
"1",
":",
"lines",
"[",
"1",
"]",
"=",
"(",
"' '",
"*",
"max_name_len",
")",
"+",
"lines",
"[",
"1",
"]",
"command_line",
"=",
"' '",
".",
"join",
"(",
"lines",
"[",
"1",
":",
"]",
")",
"else",
":",
"command_line",
"=",
"None",
"if",
"command_line",
":",
"commands",
"+=",
"\"%s\\n\"",
"%",
"command_line",
"if",
"no_pager",
":",
"print",
"(",
"commands",
"[",
":",
"-",
"1",
"]",
")",
"else",
":",
"print_with_pager",
"(",
"commands",
"[",
":",
"-",
"1",
"]",
")"
] | 43.068966 | 15.068966 |
def get_group_partition(group, partition_count):
"""Given a group name, return the partition number of the consumer offset
topic containing the data associated to that group."""
def java_string_hashcode(s):
h = 0
for c in s:
h = (31 * h + ord(c)) & 0xFFFFFFFF
return ((h + 0x80000000) & 0xFFFFFFFF) - 0x80000000
return abs(java_string_hashcode(group)) % partition_count | [
"def",
"get_group_partition",
"(",
"group",
",",
"partition_count",
")",
":",
"def",
"java_string_hashcode",
"(",
"s",
")",
":",
"h",
"=",
"0",
"for",
"c",
"in",
"s",
":",
"h",
"=",
"(",
"31",
"*",
"h",
"+",
"ord",
"(",
"c",
")",
")",
"&",
"0xFFFFFFFF",
"return",
"(",
"(",
"h",
"+",
"0x80000000",
")",
"&",
"0xFFFFFFFF",
")",
"-",
"0x80000000",
"return",
"abs",
"(",
"java_string_hashcode",
"(",
"group",
")",
")",
"%",
"partition_count"
] | 45.888889 | 12.222222 |
def _check_vmware_player_requirements(self, player_version):
"""
Check minimum requirements to use VMware Player.
VIX 1.13 was the release for Player 6.
VIX 1.14 was the release for Player 7.
VIX 1.15 was the release for Workstation Player 12.
:param player_version: VMware Player major version.
"""
player_version = int(player_version)
if player_version < 6:
raise VMwareError("Using VMware Player requires version 6 or above")
elif player_version == 6:
yield from self.check_vmrun_version(minimum_required_version="1.13.0")
elif player_version == 7:
yield from self.check_vmrun_version(minimum_required_version="1.14.0")
elif player_version >= 12:
yield from self.check_vmrun_version(minimum_required_version="1.15.0")
self._host_type = "player" | [
"def",
"_check_vmware_player_requirements",
"(",
"self",
",",
"player_version",
")",
":",
"player_version",
"=",
"int",
"(",
"player_version",
")",
"if",
"player_version",
"<",
"6",
":",
"raise",
"VMwareError",
"(",
"\"Using VMware Player requires version 6 or above\"",
")",
"elif",
"player_version",
"==",
"6",
":",
"yield",
"from",
"self",
".",
"check_vmrun_version",
"(",
"minimum_required_version",
"=",
"\"1.13.0\"",
")",
"elif",
"player_version",
"==",
"7",
":",
"yield",
"from",
"self",
".",
"check_vmrun_version",
"(",
"minimum_required_version",
"=",
"\"1.14.0\"",
")",
"elif",
"player_version",
">=",
"12",
":",
"yield",
"from",
"self",
".",
"check_vmrun_version",
"(",
"minimum_required_version",
"=",
"\"1.15.0\"",
")",
"self",
".",
"_host_type",
"=",
"\"player\""
] | 42 | 19.619048 |
def make_grammar(allow_errors):
"""Make the part of the grammar that depends on whether we swallow errors or not."""
if allow_errors in GRAMMAR_CACHE:
return GRAMMAR_CACHE[allow_errors]
tuple = p.Forward()
catch_errors = p.Forward()
catch_errors << (p.Regex('[^{};]*') - p.Optional(tuple) - p.Regex('[^;}]*'))
def swallow_remainder():
if allow_errors:
return pattern('swallow_remainder', p.Suppress(catch_errors))
return p.Empty()
def swallow_errors(rule):
"""Extend the production rule by potentially eating errors.
This does not return a p.NoMatch() because that messes up the error messages.
"""
ret = rule
if allow_errors:
# Synchronize on the first semicolon or the first unbalanced closing curly
ret = rule | pattern('catch_errors', parseWithLocation(p.Suppress(catch_errors), UnparseableNode))
return ret
class Grammar:
keywords = ['and', 'or', 'not', 'if', 'then', 'else', 'include', 'inherit', 'null', 'true', 'false',
'for', 'in']
# This is a hack: this condition helps uselessly recursing into the grammar for
# juxtapositions.
early_abort_scan = ~p.oneOf([';', ',', ']', '}', 'for' ])
expression = pattern('expression', p.Forward())
comment = p.Regex('#') + ~p.FollowedBy(sym('.')) + p.restOfLine
doc_comment = pattern('doc_comment', (sym('#.') - p.restOfLine))
quotedIdentifier = pattern('quotedIdentifier', p.QuotedString('`', multiline=False))
# - Must start with an alphascore
# - May contain alphanumericscores and special characters such as : and -
# - Must not end in a special character
identifier = pattern('identifier', parseWithLocation(quotedIdentifier | p.Regex(r'[a-zA-Z_]([a-zA-Z0-9_:-]*[a-zA-Z0-9_])?'), Identifier))
# Variable identifier (can't be any of the keywords, which may have lower matching priority)
variable = pattern('variable', ~p.MatchFirst(p.oneOf(keywords)) + pattern('identifier', parseWithLocation(identifier.copy(), Var)))
# Contants
integer = pattern('integer', parseWithLocation(p.Word(p.nums), convertAndMake(int, Literal)))
floating = pattern('floating', parseWithLocation(p.Regex(r'\d*\.\d+'), convertAndMake(float, Literal)))
dq_string = pattern('dq_string', parseWithLocation(p.QuotedString('"', escChar='\\', unquoteResults=False, multiline=True), convertAndMake(unquote, Literal)))
sq_string = pattern('sq_string', parseWithLocation(p.QuotedString("'", escChar='\\', unquoteResults=False, multiline=True), convertAndMake(unquote, Literal)))
boolean = pattern('boolean', parseWithLocation(p.Keyword('true') | p.Keyword('false'), convertAndMake(mkBool, Literal)))
null = pattern('null', parseWithLocation(p.Keyword('null'), Null))
# List
list_ = pattern('list', parseWithLocation(bracketedList('[', ']', ',', expression), List))
# Tuple
inherit = pattern('inherit', (kw('inherit') - p.ZeroOrMore(variable)).setParseAction(inheritNodes))
schema_spec = pattern('schema_spec', parseWithLocation(p.Optional(p.Keyword('private').setParseAction(lambda: True), default=False)
- p.Optional(p.Keyword('required').setParseAction(lambda: True), default=False)
- p.Optional(expression, default=any_schema_expr), MemberSchemaNode))
optional_schema = pattern('optional_schema', p.Optional(p.Suppress(':') - schema_spec, default=no_schema))
expression_value = pattern('expression_value', sym('=') - swallow_errors(expression))
void_value = pattern('void_value', parseWithLocation(p.FollowedBy(sym(';') | sym('}')), lambda loc: Void(loc, 'nonameyet')))
member_value = pattern('member_value', swallow_errors(expression_value | void_value))
named_member = pattern('named_member', parseWithLocation(identifier - optional_schema - member_value - swallow_remainder(), TupleMemberNode))
documented_member = pattern('documented_member', parseWithLocation(parseWithLocation(p.ZeroOrMore(doc_comment), DocComment) + named_member, attach_doc_comment))
tuple_member = early_abort_scan + pattern('tuple_member', swallow_errors(inherit | documented_member) - swallow_remainder())
ErrorAwareTupleNode = functools.partial(TupleNode, allow_errors)
tuple_members = pattern('tuple_members', parseWithLocation(listMembers(';', tuple_member), ErrorAwareTupleNode))
tuple << pattern('tuple', parseWithLocation(bracketedList('{', '}', ';', tuple_member, allow_missing_close=allow_errors), ErrorAwareTupleNode))
# Argument list will live by itself as a atom. Actually, it's a tuple, but we
# don't call it that because we use that term for something else already :)
arg_list = pattern('arg_list', bracketedList('(', ')', ',', expression).setParseAction(ArgList))
parenthesized_expr = pattern('parenthesized_expr', (sym('(') - expression - ')').setParseAction(head))
unary_op = pattern('unary_op', (p.oneOf(' '.join(functions.unary_operators.keys())) - expression).setParseAction(mkUnOp))
if_then_else = pattern('if_then_else', parseWithLocation(kw('if') + expression +
kw('then') + expression +
kw('else') + expression, Condition))
list_comprehension = pattern('list_comprehension', parseWithLocation(sym('[') + expression + kw('for') + variable + kw('in') +
expression + p.Optional(kw('if') + expression) + sym(']'), ListComprehension))
# We don't allow space-application here
# Now our grammar is becoming very dirty and hackish
deref = pattern('deref', p.Forward())
include = pattern('include', parseWithLocation(kw('include') - deref, Include))
atom = pattern('atom', (tuple
| sq_string
| dq_string
| variable
| floating
| integer
| boolean
| list_
| null
| unary_op
| parenthesized_expr
| if_then_else
| include
| list_comprehension
))
# We have two different forms of function application, so they can have 2
# different precedences. This one: fn(args), which binds stronger than
# dereferencing (fn(args).attr == (fn(args)).attr)
applic1 = pattern('applic1', parseWithLocation(atom - p.ZeroOrMore(arg_list), mkApplications))
# Dereferencing of an expression (obj.bar)
deref << parseWithLocation(applic1 - p.ZeroOrMore(p.Suppress('.') - swallow_errors(identifier)), mkDerefs)
# All binary operators at various precedence levels go here:
# This piece of code does the moral equivalent of:
#
# T = F*F | F/F | F
# E = T+T | T-T | T
#
# etc.
term = deref
for op_level in functions.binary_operators_before_juxtaposition:
operator_syms = list(op_level.keys())
term = (term - p.ZeroOrMore(p.oneOf(operator_syms) - term)).setParseAction(mkBinOps)
# Juxtaposition function application (fn arg), must be 1-arg every time
applic2 = pattern('applic2', parseWithLocation(term - p.ZeroOrMore(early_abort_scan + term), mkApplications))
term = applic2
for op_level in functions.binary_operators_after_juxtaposition:
operator_syms = list(op_level.keys())
term = (term - p.ZeroOrMore(p.oneOf(operator_syms) - term)).setParseAction(mkBinOps)
expression << term
# Two entry points: start at an arbitrary expression, or expect the top-level
# scope to be a tuple.
start = pattern('start', expression.copy().ignore(comment))
start_tuple = tuple_members.ignore(comment)
GRAMMAR_CACHE[allow_errors] = Grammar
return Grammar | [
"def",
"make_grammar",
"(",
"allow_errors",
")",
":",
"if",
"allow_errors",
"in",
"GRAMMAR_CACHE",
":",
"return",
"GRAMMAR_CACHE",
"[",
"allow_errors",
"]",
"tuple",
"=",
"p",
".",
"Forward",
"(",
")",
"catch_errors",
"=",
"p",
".",
"Forward",
"(",
")",
"catch_errors",
"<<",
"(",
"p",
".",
"Regex",
"(",
"'[^{};]*'",
")",
"-",
"p",
".",
"Optional",
"(",
"tuple",
")",
"-",
"p",
".",
"Regex",
"(",
"'[^;}]*'",
")",
")",
"def",
"swallow_remainder",
"(",
")",
":",
"if",
"allow_errors",
":",
"return",
"pattern",
"(",
"'swallow_remainder'",
",",
"p",
".",
"Suppress",
"(",
"catch_errors",
")",
")",
"return",
"p",
".",
"Empty",
"(",
")",
"def",
"swallow_errors",
"(",
"rule",
")",
":",
"\"\"\"Extend the production rule by potentially eating errors.\n\n This does not return a p.NoMatch() because that messes up the error messages.\n \"\"\"",
"ret",
"=",
"rule",
"if",
"allow_errors",
":",
"# Synchronize on the first semicolon or the first unbalanced closing curly",
"ret",
"=",
"rule",
"|",
"pattern",
"(",
"'catch_errors'",
",",
"parseWithLocation",
"(",
"p",
".",
"Suppress",
"(",
"catch_errors",
")",
",",
"UnparseableNode",
")",
")",
"return",
"ret",
"class",
"Grammar",
":",
"keywords",
"=",
"[",
"'and'",
",",
"'or'",
",",
"'not'",
",",
"'if'",
",",
"'then'",
",",
"'else'",
",",
"'include'",
",",
"'inherit'",
",",
"'null'",
",",
"'true'",
",",
"'false'",
",",
"'for'",
",",
"'in'",
"]",
"# This is a hack: this condition helps uselessly recursing into the grammar for",
"# juxtapositions.",
"early_abort_scan",
"=",
"~",
"p",
".",
"oneOf",
"(",
"[",
"';'",
",",
"','",
",",
"']'",
",",
"'}'",
",",
"'for'",
"]",
")",
"expression",
"=",
"pattern",
"(",
"'expression'",
",",
"p",
".",
"Forward",
"(",
")",
")",
"comment",
"=",
"p",
".",
"Regex",
"(",
"'#'",
")",
"+",
"~",
"p",
".",
"FollowedBy",
"(",
"sym",
"(",
"'.'",
")",
")",
"+",
"p",
".",
"restOfLine",
"doc_comment",
"=",
"pattern",
"(",
"'doc_comment'",
",",
"(",
"sym",
"(",
"'#.'",
")",
"-",
"p",
".",
"restOfLine",
")",
")",
"quotedIdentifier",
"=",
"pattern",
"(",
"'quotedIdentifier'",
",",
"p",
".",
"QuotedString",
"(",
"'`'",
",",
"multiline",
"=",
"False",
")",
")",
"# - Must start with an alphascore",
"# - May contain alphanumericscores and special characters such as : and -",
"# - Must not end in a special character",
"identifier",
"=",
"pattern",
"(",
"'identifier'",
",",
"parseWithLocation",
"(",
"quotedIdentifier",
"|",
"p",
".",
"Regex",
"(",
"r'[a-zA-Z_]([a-zA-Z0-9_:-]*[a-zA-Z0-9_])?'",
")",
",",
"Identifier",
")",
")",
"# Variable identifier (can't be any of the keywords, which may have lower matching priority)",
"variable",
"=",
"pattern",
"(",
"'variable'",
",",
"~",
"p",
".",
"MatchFirst",
"(",
"p",
".",
"oneOf",
"(",
"keywords",
")",
")",
"+",
"pattern",
"(",
"'identifier'",
",",
"parseWithLocation",
"(",
"identifier",
".",
"copy",
"(",
")",
",",
"Var",
")",
")",
")",
"# Contants",
"integer",
"=",
"pattern",
"(",
"'integer'",
",",
"parseWithLocation",
"(",
"p",
".",
"Word",
"(",
"p",
".",
"nums",
")",
",",
"convertAndMake",
"(",
"int",
",",
"Literal",
")",
")",
")",
"floating",
"=",
"pattern",
"(",
"'floating'",
",",
"parseWithLocation",
"(",
"p",
".",
"Regex",
"(",
"r'\\d*\\.\\d+'",
")",
",",
"convertAndMake",
"(",
"float",
",",
"Literal",
")",
")",
")",
"dq_string",
"=",
"pattern",
"(",
"'dq_string'",
",",
"parseWithLocation",
"(",
"p",
".",
"QuotedString",
"(",
"'\"'",
",",
"escChar",
"=",
"'\\\\'",
",",
"unquoteResults",
"=",
"False",
",",
"multiline",
"=",
"True",
")",
",",
"convertAndMake",
"(",
"unquote",
",",
"Literal",
")",
")",
")",
"sq_string",
"=",
"pattern",
"(",
"'sq_string'",
",",
"parseWithLocation",
"(",
"p",
".",
"QuotedString",
"(",
"\"'\"",
",",
"escChar",
"=",
"'\\\\'",
",",
"unquoteResults",
"=",
"False",
",",
"multiline",
"=",
"True",
")",
",",
"convertAndMake",
"(",
"unquote",
",",
"Literal",
")",
")",
")",
"boolean",
"=",
"pattern",
"(",
"'boolean'",
",",
"parseWithLocation",
"(",
"p",
".",
"Keyword",
"(",
"'true'",
")",
"|",
"p",
".",
"Keyword",
"(",
"'false'",
")",
",",
"convertAndMake",
"(",
"mkBool",
",",
"Literal",
")",
")",
")",
"null",
"=",
"pattern",
"(",
"'null'",
",",
"parseWithLocation",
"(",
"p",
".",
"Keyword",
"(",
"'null'",
")",
",",
"Null",
")",
")",
"# List",
"list_",
"=",
"pattern",
"(",
"'list'",
",",
"parseWithLocation",
"(",
"bracketedList",
"(",
"'['",
",",
"']'",
",",
"','",
",",
"expression",
")",
",",
"List",
")",
")",
"# Tuple",
"inherit",
"=",
"pattern",
"(",
"'inherit'",
",",
"(",
"kw",
"(",
"'inherit'",
")",
"-",
"p",
".",
"ZeroOrMore",
"(",
"variable",
")",
")",
".",
"setParseAction",
"(",
"inheritNodes",
")",
")",
"schema_spec",
"=",
"pattern",
"(",
"'schema_spec'",
",",
"parseWithLocation",
"(",
"p",
".",
"Optional",
"(",
"p",
".",
"Keyword",
"(",
"'private'",
")",
".",
"setParseAction",
"(",
"lambda",
":",
"True",
")",
",",
"default",
"=",
"False",
")",
"-",
"p",
".",
"Optional",
"(",
"p",
".",
"Keyword",
"(",
"'required'",
")",
".",
"setParseAction",
"(",
"lambda",
":",
"True",
")",
",",
"default",
"=",
"False",
")",
"-",
"p",
".",
"Optional",
"(",
"expression",
",",
"default",
"=",
"any_schema_expr",
")",
",",
"MemberSchemaNode",
")",
")",
"optional_schema",
"=",
"pattern",
"(",
"'optional_schema'",
",",
"p",
".",
"Optional",
"(",
"p",
".",
"Suppress",
"(",
"':'",
")",
"-",
"schema_spec",
",",
"default",
"=",
"no_schema",
")",
")",
"expression_value",
"=",
"pattern",
"(",
"'expression_value'",
",",
"sym",
"(",
"'='",
")",
"-",
"swallow_errors",
"(",
"expression",
")",
")",
"void_value",
"=",
"pattern",
"(",
"'void_value'",
",",
"parseWithLocation",
"(",
"p",
".",
"FollowedBy",
"(",
"sym",
"(",
"';'",
")",
"|",
"sym",
"(",
"'}'",
")",
")",
",",
"lambda",
"loc",
":",
"Void",
"(",
"loc",
",",
"'nonameyet'",
")",
")",
")",
"member_value",
"=",
"pattern",
"(",
"'member_value'",
",",
"swallow_errors",
"(",
"expression_value",
"|",
"void_value",
")",
")",
"named_member",
"=",
"pattern",
"(",
"'named_member'",
",",
"parseWithLocation",
"(",
"identifier",
"-",
"optional_schema",
"-",
"member_value",
"-",
"swallow_remainder",
"(",
")",
",",
"TupleMemberNode",
")",
")",
"documented_member",
"=",
"pattern",
"(",
"'documented_member'",
",",
"parseWithLocation",
"(",
"parseWithLocation",
"(",
"p",
".",
"ZeroOrMore",
"(",
"doc_comment",
")",
",",
"DocComment",
")",
"+",
"named_member",
",",
"attach_doc_comment",
")",
")",
"tuple_member",
"=",
"early_abort_scan",
"+",
"pattern",
"(",
"'tuple_member'",
",",
"swallow_errors",
"(",
"inherit",
"|",
"documented_member",
")",
"-",
"swallow_remainder",
"(",
")",
")",
"ErrorAwareTupleNode",
"=",
"functools",
".",
"partial",
"(",
"TupleNode",
",",
"allow_errors",
")",
"tuple_members",
"=",
"pattern",
"(",
"'tuple_members'",
",",
"parseWithLocation",
"(",
"listMembers",
"(",
"';'",
",",
"tuple_member",
")",
",",
"ErrorAwareTupleNode",
")",
")",
"tuple",
"<<",
"pattern",
"(",
"'tuple'",
",",
"parseWithLocation",
"(",
"bracketedList",
"(",
"'{'",
",",
"'}'",
",",
"';'",
",",
"tuple_member",
",",
"allow_missing_close",
"=",
"allow_errors",
")",
",",
"ErrorAwareTupleNode",
")",
")",
"# Argument list will live by itself as a atom. Actually, it's a tuple, but we",
"# don't call it that because we use that term for something else already :)",
"arg_list",
"=",
"pattern",
"(",
"'arg_list'",
",",
"bracketedList",
"(",
"'('",
",",
"')'",
",",
"','",
",",
"expression",
")",
".",
"setParseAction",
"(",
"ArgList",
")",
")",
"parenthesized_expr",
"=",
"pattern",
"(",
"'parenthesized_expr'",
",",
"(",
"sym",
"(",
"'('",
")",
"-",
"expression",
"-",
"')'",
")",
".",
"setParseAction",
"(",
"head",
")",
")",
"unary_op",
"=",
"pattern",
"(",
"'unary_op'",
",",
"(",
"p",
".",
"oneOf",
"(",
"' '",
".",
"join",
"(",
"functions",
".",
"unary_operators",
".",
"keys",
"(",
")",
")",
")",
"-",
"expression",
")",
".",
"setParseAction",
"(",
"mkUnOp",
")",
")",
"if_then_else",
"=",
"pattern",
"(",
"'if_then_else'",
",",
"parseWithLocation",
"(",
"kw",
"(",
"'if'",
")",
"+",
"expression",
"+",
"kw",
"(",
"'then'",
")",
"+",
"expression",
"+",
"kw",
"(",
"'else'",
")",
"+",
"expression",
",",
"Condition",
")",
")",
"list_comprehension",
"=",
"pattern",
"(",
"'list_comprehension'",
",",
"parseWithLocation",
"(",
"sym",
"(",
"'['",
")",
"+",
"expression",
"+",
"kw",
"(",
"'for'",
")",
"+",
"variable",
"+",
"kw",
"(",
"'in'",
")",
"+",
"expression",
"+",
"p",
".",
"Optional",
"(",
"kw",
"(",
"'if'",
")",
"+",
"expression",
")",
"+",
"sym",
"(",
"']'",
")",
",",
"ListComprehension",
")",
")",
"# We don't allow space-application here",
"# Now our grammar is becoming very dirty and hackish",
"deref",
"=",
"pattern",
"(",
"'deref'",
",",
"p",
".",
"Forward",
"(",
")",
")",
"include",
"=",
"pattern",
"(",
"'include'",
",",
"parseWithLocation",
"(",
"kw",
"(",
"'include'",
")",
"-",
"deref",
",",
"Include",
")",
")",
"atom",
"=",
"pattern",
"(",
"'atom'",
",",
"(",
"tuple",
"|",
"sq_string",
"|",
"dq_string",
"|",
"variable",
"|",
"floating",
"|",
"integer",
"|",
"boolean",
"|",
"list_",
"|",
"null",
"|",
"unary_op",
"|",
"parenthesized_expr",
"|",
"if_then_else",
"|",
"include",
"|",
"list_comprehension",
")",
")",
"# We have two different forms of function application, so they can have 2",
"# different precedences. This one: fn(args), which binds stronger than",
"# dereferencing (fn(args).attr == (fn(args)).attr)",
"applic1",
"=",
"pattern",
"(",
"'applic1'",
",",
"parseWithLocation",
"(",
"atom",
"-",
"p",
".",
"ZeroOrMore",
"(",
"arg_list",
")",
",",
"mkApplications",
")",
")",
"# Dereferencing of an expression (obj.bar)",
"deref",
"<<",
"parseWithLocation",
"(",
"applic1",
"-",
"p",
".",
"ZeroOrMore",
"(",
"p",
".",
"Suppress",
"(",
"'.'",
")",
"-",
"swallow_errors",
"(",
"identifier",
")",
")",
",",
"mkDerefs",
")",
"# All binary operators at various precedence levels go here:",
"# This piece of code does the moral equivalent of:",
"#",
"# T = F*F | F/F | F",
"# E = T+T | T-T | T",
"#",
"# etc.",
"term",
"=",
"deref",
"for",
"op_level",
"in",
"functions",
".",
"binary_operators_before_juxtaposition",
":",
"operator_syms",
"=",
"list",
"(",
"op_level",
".",
"keys",
"(",
")",
")",
"term",
"=",
"(",
"term",
"-",
"p",
".",
"ZeroOrMore",
"(",
"p",
".",
"oneOf",
"(",
"operator_syms",
")",
"-",
"term",
")",
")",
".",
"setParseAction",
"(",
"mkBinOps",
")",
"# Juxtaposition function application (fn arg), must be 1-arg every time",
"applic2",
"=",
"pattern",
"(",
"'applic2'",
",",
"parseWithLocation",
"(",
"term",
"-",
"p",
".",
"ZeroOrMore",
"(",
"early_abort_scan",
"+",
"term",
")",
",",
"mkApplications",
")",
")",
"term",
"=",
"applic2",
"for",
"op_level",
"in",
"functions",
".",
"binary_operators_after_juxtaposition",
":",
"operator_syms",
"=",
"list",
"(",
"op_level",
".",
"keys",
"(",
")",
")",
"term",
"=",
"(",
"term",
"-",
"p",
".",
"ZeroOrMore",
"(",
"p",
".",
"oneOf",
"(",
"operator_syms",
")",
"-",
"term",
")",
")",
".",
"setParseAction",
"(",
"mkBinOps",
")",
"expression",
"<<",
"term",
"# Two entry points: start at an arbitrary expression, or expect the top-level",
"# scope to be a tuple.",
"start",
"=",
"pattern",
"(",
"'start'",
",",
"expression",
".",
"copy",
"(",
")",
".",
"ignore",
"(",
"comment",
")",
")",
"start_tuple",
"=",
"tuple_members",
".",
"ignore",
"(",
"comment",
")",
"GRAMMAR_CACHE",
"[",
"allow_errors",
"]",
"=",
"Grammar",
"return",
"Grammar"
] | 49.713333 | 36.9 |
def bst(height=3, is_perfect=False):
"""Generate a random BST (binary search tree) and return its root node.
:param height: Height of the BST (default: 3, range: 0 - 9 inclusive).
:type height: int
:param is_perfect: If set to True (default: False), a perfect BST with all
levels filled is returned. If set to False, a perfect BST may still be
generated by chance.
:type is_perfect: bool
:return: Root node of the BST.
:rtype: binarytree.Node
:raise binarytree.exceptions.TreeHeightError: If height is invalid.
**Example**:
.. doctest::
>>> from binarytree import bst
>>>
>>> root = bst()
>>>
>>> root.height
3
>>> root.is_bst
True
.. doctest::
>>> from binarytree import bst
>>>
>>> root = bst(10) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TreeHeightError: height must be an int between 0 - 9
"""
_validate_tree_height(height)
if is_perfect:
return _generate_perfect_bst(height)
values = _generate_random_node_values(height)
leaf_count = _generate_random_leaf_count(height)
root = Node(values.pop(0))
leaves = set()
for value in values:
node = root
depth = 0
inserted = False
while depth < height and not inserted:
attr = 'left' if node.value > value else 'right'
if getattr(node, attr) is None:
setattr(node, attr, Node(value))
inserted = True
node = getattr(node, attr)
depth += 1
if inserted and depth == height:
leaves.add(node)
if len(leaves) == leaf_count:
break
return root | [
"def",
"bst",
"(",
"height",
"=",
"3",
",",
"is_perfect",
"=",
"False",
")",
":",
"_validate_tree_height",
"(",
"height",
")",
"if",
"is_perfect",
":",
"return",
"_generate_perfect_bst",
"(",
"height",
")",
"values",
"=",
"_generate_random_node_values",
"(",
"height",
")",
"leaf_count",
"=",
"_generate_random_leaf_count",
"(",
"height",
")",
"root",
"=",
"Node",
"(",
"values",
".",
"pop",
"(",
"0",
")",
")",
"leaves",
"=",
"set",
"(",
")",
"for",
"value",
"in",
"values",
":",
"node",
"=",
"root",
"depth",
"=",
"0",
"inserted",
"=",
"False",
"while",
"depth",
"<",
"height",
"and",
"not",
"inserted",
":",
"attr",
"=",
"'left'",
"if",
"node",
".",
"value",
">",
"value",
"else",
"'right'",
"if",
"getattr",
"(",
"node",
",",
"attr",
")",
"is",
"None",
":",
"setattr",
"(",
"node",
",",
"attr",
",",
"Node",
"(",
"value",
")",
")",
"inserted",
"=",
"True",
"node",
"=",
"getattr",
"(",
"node",
",",
"attr",
")",
"depth",
"+=",
"1",
"if",
"inserted",
"and",
"depth",
"==",
"height",
":",
"leaves",
".",
"add",
"(",
"node",
")",
"if",
"len",
"(",
"leaves",
")",
"==",
"leaf_count",
":",
"break",
"return",
"root"
] | 27.078125 | 20.703125 |
def remove_spin(self):
"""
Removes spin states from a structure.
"""
for site in self.sites:
new_sp = collections.defaultdict(float)
for sp, occu in site.species.items():
oxi_state = getattr(sp, "oxi_state", None)
new_sp[Specie(sp.symbol, oxidation_state=oxi_state)] += occu
site.species = new_sp | [
"def",
"remove_spin",
"(",
"self",
")",
":",
"for",
"site",
"in",
"self",
".",
"sites",
":",
"new_sp",
"=",
"collections",
".",
"defaultdict",
"(",
"float",
")",
"for",
"sp",
",",
"occu",
"in",
"site",
".",
"species",
".",
"items",
"(",
")",
":",
"oxi_state",
"=",
"getattr",
"(",
"sp",
",",
"\"oxi_state\"",
",",
"None",
")",
"new_sp",
"[",
"Specie",
"(",
"sp",
".",
"symbol",
",",
"oxidation_state",
"=",
"oxi_state",
")",
"]",
"+=",
"occu",
"site",
".",
"species",
"=",
"new_sp"
] | 38.7 | 11.3 |
def new(cls, repo, *tree_sha):
""" Merge the given treeish revisions into a new index which is returned.
This method behaves like git-read-tree --aggressive when doing the merge.
:param repo: The repository treeish are located in.
:param tree_sha:
20 byte or 40 byte tree sha or tree objects
:return:
New IndexFile instance. Its path will be undefined.
If you intend to write such a merged Index, supply an alternate file_path
to its 'write' method."""
base_entries = aggressive_tree_merge(repo.odb, [to_bin_sha(str(t)) for t in tree_sha])
inst = cls(repo)
# convert to entries dict
entries = dict(izip(((e.path, e.stage) for e in base_entries),
(IndexEntry.from_base(e) for e in base_entries)))
inst.entries = entries
return inst | [
"def",
"new",
"(",
"cls",
",",
"repo",
",",
"*",
"tree_sha",
")",
":",
"base_entries",
"=",
"aggressive_tree_merge",
"(",
"repo",
".",
"odb",
",",
"[",
"to_bin_sha",
"(",
"str",
"(",
"t",
")",
")",
"for",
"t",
"in",
"tree_sha",
"]",
")",
"inst",
"=",
"cls",
"(",
"repo",
")",
"# convert to entries dict",
"entries",
"=",
"dict",
"(",
"izip",
"(",
"(",
"(",
"e",
".",
"path",
",",
"e",
".",
"stage",
")",
"for",
"e",
"in",
"base_entries",
")",
",",
"(",
"IndexEntry",
".",
"from_base",
"(",
"e",
")",
"for",
"e",
"in",
"base_entries",
")",
")",
")",
"inst",
".",
"entries",
"=",
"entries",
"return",
"inst"
] | 39.909091 | 25.818182 |
def set_volume_level(self, volume):
"""Set volume level."""
if self._volume_level is not None:
if volume > self._volume_level:
num = int(self._max_volume * (volume - self._volume_level))
self._volume_level = volume
self._device.vol_up(num=num)
elif volume < self._volume_level:
num = int(self._max_volume * (self._volume_level - volume))
self._volume_level = volume
self._device.vol_down(num=num) | [
"def",
"set_volume_level",
"(",
"self",
",",
"volume",
")",
":",
"if",
"self",
".",
"_volume_level",
"is",
"not",
"None",
":",
"if",
"volume",
">",
"self",
".",
"_volume_level",
":",
"num",
"=",
"int",
"(",
"self",
".",
"_max_volume",
"*",
"(",
"volume",
"-",
"self",
".",
"_volume_level",
")",
")",
"self",
".",
"_volume_level",
"=",
"volume",
"self",
".",
"_device",
".",
"vol_up",
"(",
"num",
"=",
"num",
")",
"elif",
"volume",
"<",
"self",
".",
"_volume_level",
":",
"num",
"=",
"int",
"(",
"self",
".",
"_max_volume",
"*",
"(",
"self",
".",
"_volume_level",
"-",
"volume",
")",
")",
"self",
".",
"_volume_level",
"=",
"volume",
"self",
".",
"_device",
".",
"vol_down",
"(",
"num",
"=",
"num",
")"
] | 47.454545 | 9.181818 |
def get_repair_task_list(
self, task_id_filter=None, state_filter=None, executor_filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets a list of repair tasks matching the given filters.
This API supports the Service Fabric platform; it is not meant to be
used directly from your code.
:param task_id_filter: The repair task ID prefix to be matched.
:type task_id_filter: str
:param state_filter: A bitwise-OR of the following values, specifying
which task states should be included in the result list.
- 1 - Created
- 2 - Claimed
- 4 - Preparing
- 8 - Approved
- 16 - Executing
- 32 - Restoring
- 64 - Completed
:type state_filter: int
:param executor_filter: The name of the repair executor whose claimed
tasks should be included in the list.
:type executor_filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype: list[~azure.servicefabric.models.RepairTask] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`FabricErrorException<azure.servicefabric.models.FabricErrorException>`
"""
api_version = "6.0"
# Construct URL
url = self.get_repair_task_list.metadata['url']
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if task_id_filter is not None:
query_parameters['TaskIdFilter'] = self._serialize.query("task_id_filter", task_id_filter, 'str')
if state_filter is not None:
query_parameters['StateFilter'] = self._serialize.query("state_filter", state_filter, 'int')
if executor_filter is not None:
query_parameters['ExecutorFilter'] = self._serialize.query("executor_filter", executor_filter, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.FabricErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[RepairTask]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized | [
"def",
"get_repair_task_list",
"(",
"self",
",",
"task_id_filter",
"=",
"None",
",",
"state_filter",
"=",
"None",
",",
"executor_filter",
"=",
"None",
",",
"custom_headers",
"=",
"None",
",",
"raw",
"=",
"False",
",",
"*",
"*",
"operation_config",
")",
":",
"api_version",
"=",
"\"6.0\"",
"# Construct URL",
"url",
"=",
"self",
".",
"get_repair_task_list",
".",
"metadata",
"[",
"'url'",
"]",
"# Construct parameters",
"query_parameters",
"=",
"{",
"}",
"query_parameters",
"[",
"'api-version'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"api_version\"",
",",
"api_version",
",",
"'str'",
")",
"if",
"task_id_filter",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'TaskIdFilter'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"task_id_filter\"",
",",
"task_id_filter",
",",
"'str'",
")",
"if",
"state_filter",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'StateFilter'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"state_filter\"",
",",
"state_filter",
",",
"'int'",
")",
"if",
"executor_filter",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'ExecutorFilter'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"executor_filter\"",
",",
"executor_filter",
",",
"'str'",
")",
"# Construct headers",
"header_parameters",
"=",
"{",
"}",
"header_parameters",
"[",
"'Accept'",
"]",
"=",
"'application/json'",
"if",
"custom_headers",
":",
"header_parameters",
".",
"update",
"(",
"custom_headers",
")",
"# Construct and send request",
"request",
"=",
"self",
".",
"_client",
".",
"get",
"(",
"url",
",",
"query_parameters",
",",
"header_parameters",
")",
"response",
"=",
"self",
".",
"_client",
".",
"send",
"(",
"request",
",",
"stream",
"=",
"False",
",",
"*",
"*",
"operation_config",
")",
"if",
"response",
".",
"status_code",
"not",
"in",
"[",
"200",
"]",
":",
"raise",
"models",
".",
"FabricErrorException",
"(",
"self",
".",
"_deserialize",
",",
"response",
")",
"deserialized",
"=",
"None",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"deserialized",
"=",
"self",
".",
"_deserialize",
"(",
"'[RepairTask]'",
",",
"response",
")",
"if",
"raw",
":",
"client_raw_response",
"=",
"ClientRawResponse",
"(",
"deserialized",
",",
"response",
")",
"return",
"client_raw_response",
"return",
"deserialized"
] | 42.070423 | 24.112676 |
def parse_line(self, line):
"""Parses a single line of a GPI.
Return a tuple `(processed_line, entities)`. Typically
there will be a single entity, but in some cases there
may be none (invalid line) or multiple (disjunctive clause in
annotation extensions)
Note: most applications will only need to call this directly if they require fine-grained control of parsing. For most purposes,
:method:`parse_file` can be used over the whole file
Arguments
---------
line : str
A single tab-seperated line from a GPAD file
"""
vals = line.split("\t")
if len(vals) < 7:
self.report.error(line, assocparser.Report.WRONG_NUMBER_OF_COLUMNS, "")
return line, []
if len(vals) < 10 and len(vals) >= 7:
missing_columns = 10 - len(vals)
vals += ["" for i in range(missing_columns)]
[
db,
db_object_id,
db_object_symbol,
db_object_name,
db_object_synonym,
db_object_type,
taxon,
parent_object_id,
xrefs,
properties
] = vals
split_line = assocparser.SplitLine(line=line, values=vals, taxon=taxon)
## --
## db + db_object_id. CARD=1
## --
id = self._pair_to_id(db, db_object_id)
if not self._validate_id(id, split_line, context=assocparser.Report):
return line, []
## --
## db_object_synonym CARD=0..*
## --
synonyms = db_object_synonym.split("|")
if db_object_synonym == "":
synonyms = []
# TODO: DRY
parents = parent_object_id.split("|")
if parent_object_id == "":
parents = []
else:
parents = [self._normalize_id(x) for x in parents]
for p in parents:
self._validate_id(p, split_line, context=assocparser.Report)
xref_ids = xrefs.split("|")
if xrefs == "":
xref_ids = []
obj = {
'id': id,
'label': db_object_symbol,
'full_name': db_object_name,
'synonyms': synonyms,
'type': db_object_type,
'parents': parents,
'xrefs': xref_ids,
'taxon': {
'id': self._taxon_id(taxon, split_line)
}
}
return line, [obj] | [
"def",
"parse_line",
"(",
"self",
",",
"line",
")",
":",
"vals",
"=",
"line",
".",
"split",
"(",
"\"\\t\"",
")",
"if",
"len",
"(",
"vals",
")",
"<",
"7",
":",
"self",
".",
"report",
".",
"error",
"(",
"line",
",",
"assocparser",
".",
"Report",
".",
"WRONG_NUMBER_OF_COLUMNS",
",",
"\"\"",
")",
"return",
"line",
",",
"[",
"]",
"if",
"len",
"(",
"vals",
")",
"<",
"10",
"and",
"len",
"(",
"vals",
")",
">=",
"7",
":",
"missing_columns",
"=",
"10",
"-",
"len",
"(",
"vals",
")",
"vals",
"+=",
"[",
"\"\"",
"for",
"i",
"in",
"range",
"(",
"missing_columns",
")",
"]",
"[",
"db",
",",
"db_object_id",
",",
"db_object_symbol",
",",
"db_object_name",
",",
"db_object_synonym",
",",
"db_object_type",
",",
"taxon",
",",
"parent_object_id",
",",
"xrefs",
",",
"properties",
"]",
"=",
"vals",
"split_line",
"=",
"assocparser",
".",
"SplitLine",
"(",
"line",
"=",
"line",
",",
"values",
"=",
"vals",
",",
"taxon",
"=",
"taxon",
")",
"## --",
"## db + db_object_id. CARD=1",
"## --",
"id",
"=",
"self",
".",
"_pair_to_id",
"(",
"db",
",",
"db_object_id",
")",
"if",
"not",
"self",
".",
"_validate_id",
"(",
"id",
",",
"split_line",
",",
"context",
"=",
"assocparser",
".",
"Report",
")",
":",
"return",
"line",
",",
"[",
"]",
"## --",
"## db_object_synonym CARD=0..*",
"## --",
"synonyms",
"=",
"db_object_synonym",
".",
"split",
"(",
"\"|\"",
")",
"if",
"db_object_synonym",
"==",
"\"\"",
":",
"synonyms",
"=",
"[",
"]",
"# TODO: DRY",
"parents",
"=",
"parent_object_id",
".",
"split",
"(",
"\"|\"",
")",
"if",
"parent_object_id",
"==",
"\"\"",
":",
"parents",
"=",
"[",
"]",
"else",
":",
"parents",
"=",
"[",
"self",
".",
"_normalize_id",
"(",
"x",
")",
"for",
"x",
"in",
"parents",
"]",
"for",
"p",
"in",
"parents",
":",
"self",
".",
"_validate_id",
"(",
"p",
",",
"split_line",
",",
"context",
"=",
"assocparser",
".",
"Report",
")",
"xref_ids",
"=",
"xrefs",
".",
"split",
"(",
"\"|\"",
")",
"if",
"xrefs",
"==",
"\"\"",
":",
"xref_ids",
"=",
"[",
"]",
"obj",
"=",
"{",
"'id'",
":",
"id",
",",
"'label'",
":",
"db_object_symbol",
",",
"'full_name'",
":",
"db_object_name",
",",
"'synonyms'",
":",
"synonyms",
",",
"'type'",
":",
"db_object_type",
",",
"'parents'",
":",
"parents",
",",
"'xrefs'",
":",
"xref_ids",
",",
"'taxon'",
":",
"{",
"'id'",
":",
"self",
".",
"_taxon_id",
"(",
"taxon",
",",
"split_line",
")",
"}",
"}",
"return",
"line",
",",
"[",
"obj",
"]"
] | 29.243902 | 21.170732 |
def build_recursive_gcs_delocalize_env(source, outputs):
"""Return a multi-line string with export statements for the variables.
Arguments:
source: Folder with the data.
For example /mnt/data
outputs: a list of OutputFileParam
Returns:
a multi-line string with a shell script that sets environment variables
corresponding to the outputs.
"""
filtered_outs = [
var for var in outputs
if var.recursive and var.file_provider == job_model.P_GCS
]
return '\n'.join([
'export {0}={1}/{2}'.format(var.name,
source.rstrip('/'),
var.docker_path.rstrip('/'))
for var in filtered_outs
]) | [
"def",
"build_recursive_gcs_delocalize_env",
"(",
"source",
",",
"outputs",
")",
":",
"filtered_outs",
"=",
"[",
"var",
"for",
"var",
"in",
"outputs",
"if",
"var",
".",
"recursive",
"and",
"var",
".",
"file_provider",
"==",
"job_model",
".",
"P_GCS",
"]",
"return",
"'\\n'",
".",
"join",
"(",
"[",
"'export {0}={1}/{2}'",
".",
"format",
"(",
"var",
".",
"name",
",",
"source",
".",
"rstrip",
"(",
"'/'",
")",
",",
"var",
".",
"docker_path",
".",
"rstrip",
"(",
"'/'",
")",
")",
"for",
"var",
"in",
"filtered_outs",
"]",
")"
] | 31.5 | 18.590909 |
def model_sizes(m:nn.Module, size:tuple=(64,64))->Tuple[Sizes,Tensor,Hooks]:
"Pass a dummy input through the model `m` to get the various sizes of activations."
with hook_outputs(m) as hooks:
x = dummy_eval(m, size)
return [o.stored.shape for o in hooks] | [
"def",
"model_sizes",
"(",
"m",
":",
"nn",
".",
"Module",
",",
"size",
":",
"tuple",
"=",
"(",
"64",
",",
"64",
")",
")",
"->",
"Tuple",
"[",
"Sizes",
",",
"Tensor",
",",
"Hooks",
"]",
":",
"with",
"hook_outputs",
"(",
"m",
")",
"as",
"hooks",
":",
"x",
"=",
"dummy_eval",
"(",
"m",
",",
"size",
")",
"return",
"[",
"o",
".",
"stored",
".",
"shape",
"for",
"o",
"in",
"hooks",
"]"
] | 54.8 | 20.8 |
def handle_write(self):
"""
Handle the 'channel writable' state. E.g. send buffered data via a
socket.
"""
with self.lock:
logger.debug("handle_write: queue: {0!r}".format(self._write_queue))
try:
job = self._write_queue.popleft()
except IndexError:
return
if isinstance(job, WriteData):
self._do_write(job.data) # pylint: disable=E1101
elif isinstance(job, ContinueConnect):
self._continue_connect()
elif isinstance(job, StartTLS):
self._initiate_starttls(**job.kwargs)
elif isinstance(job, TLSHandshake):
self._continue_tls_handshake()
else:
raise ValueError("Unrecognized job in the write queue: "
"{0!r}".format(job)) | [
"def",
"handle_write",
"(",
"self",
")",
":",
"with",
"self",
".",
"lock",
":",
"logger",
".",
"debug",
"(",
"\"handle_write: queue: {0!r}\"",
".",
"format",
"(",
"self",
".",
"_write_queue",
")",
")",
"try",
":",
"job",
"=",
"self",
".",
"_write_queue",
".",
"popleft",
"(",
")",
"except",
"IndexError",
":",
"return",
"if",
"isinstance",
"(",
"job",
",",
"WriteData",
")",
":",
"self",
".",
"_do_write",
"(",
"job",
".",
"data",
")",
"# pylint: disable=E1101",
"elif",
"isinstance",
"(",
"job",
",",
"ContinueConnect",
")",
":",
"self",
".",
"_continue_connect",
"(",
")",
"elif",
"isinstance",
"(",
"job",
",",
"StartTLS",
")",
":",
"self",
".",
"_initiate_starttls",
"(",
"*",
"*",
"job",
".",
"kwargs",
")",
"elif",
"isinstance",
"(",
"job",
",",
"TLSHandshake",
")",
":",
"self",
".",
"_continue_tls_handshake",
"(",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unrecognized job in the write queue: \"",
"\"{0!r}\"",
".",
"format",
"(",
"job",
")",
")"
] | 40.363636 | 15.181818 |
def resize_volume(self, volumeObj, sizeInGb, bsize=1000):
"""
Resize a volume to new GB size, must be larger than original.
:param volumeObj: ScaleIO Volume Object
:param sizeInGb: New size in GB (have to be larger than original)
:param bsize: 1000
:return: POST request response
:rtype: Requests POST response object
"""
current_vol = self.get_volume_by_id(volumeObj.id)
if current_vol.size_kb > (sizeInGb * bsize * bsize):
raise RuntimeError(
"resize_volume() - New size needs to be bigger than: %d KBs" % current_vol.size_kb)
resizeDict = { 'sizeInGB' : str(sizeInGb) }
response = self.conn.connection._do_post("{}/{}{}/{}".format(
self.conn.connection._api_url, "instances/Volume::", volumeObj.id, 'action/setVolumeSize'), json=resizeDict)
return response | [
"def",
"resize_volume",
"(",
"self",
",",
"volumeObj",
",",
"sizeInGb",
",",
"bsize",
"=",
"1000",
")",
":",
"current_vol",
"=",
"self",
".",
"get_volume_by_id",
"(",
"volumeObj",
".",
"id",
")",
"if",
"current_vol",
".",
"size_kb",
">",
"(",
"sizeInGb",
"*",
"bsize",
"*",
"bsize",
")",
":",
"raise",
"RuntimeError",
"(",
"\"resize_volume() - New size needs to be bigger than: %d KBs\"",
"%",
"current_vol",
".",
"size_kb",
")",
"resizeDict",
"=",
"{",
"'sizeInGB'",
":",
"str",
"(",
"sizeInGb",
")",
"}",
"response",
"=",
"self",
".",
"conn",
".",
"connection",
".",
"_do_post",
"(",
"\"{}/{}{}/{}\"",
".",
"format",
"(",
"self",
".",
"conn",
".",
"connection",
".",
"_api_url",
",",
"\"instances/Volume::\"",
",",
"volumeObj",
".",
"id",
",",
"'action/setVolumeSize'",
")",
",",
"json",
"=",
"resizeDict",
")",
"return",
"response"
] | 49.722222 | 21.166667 |
def add_team(name,
description=None,
repo_names=None,
privacy=None,
permission=None,
profile="github"):
'''
Create a new Github team within an organization.
name
The name of the team to be created.
description
The description of the team.
repo_names
The names of repositories to add the team to.
privacy
The level of privacy for the team, can be 'secret' or 'closed'.
permission
The default permission for new repositories added to the team, can be
'pull', 'push' or 'admin'.
profile
The name of the profile configuration to use. Defaults to ``github``.
CLI Example:
.. code-block:: bash
salt myminion github.add_team 'team_name'
.. versionadded:: 2016.11.0
'''
try:
client = _get_client(profile)
organization = client.get_organization(
_get_config_value(profile, 'org_name')
)
parameters = {}
parameters['name'] = name
if description is not None:
parameters['description'] = description
if repo_names is not None:
parameters['repo_names'] = repo_names
if permission is not None:
parameters['permission'] = permission
if privacy is not None:
parameters['privacy'] = privacy
organization._requester.requestJsonAndCheck(
'POST',
organization.url + '/teams',
input=parameters
)
list_teams(ignore_cache=True) # Refresh cache
return True
except github.GithubException:
log.exception('Error creating a team')
return False | [
"def",
"add_team",
"(",
"name",
",",
"description",
"=",
"None",
",",
"repo_names",
"=",
"None",
",",
"privacy",
"=",
"None",
",",
"permission",
"=",
"None",
",",
"profile",
"=",
"\"github\"",
")",
":",
"try",
":",
"client",
"=",
"_get_client",
"(",
"profile",
")",
"organization",
"=",
"client",
".",
"get_organization",
"(",
"_get_config_value",
"(",
"profile",
",",
"'org_name'",
")",
")",
"parameters",
"=",
"{",
"}",
"parameters",
"[",
"'name'",
"]",
"=",
"name",
"if",
"description",
"is",
"not",
"None",
":",
"parameters",
"[",
"'description'",
"]",
"=",
"description",
"if",
"repo_names",
"is",
"not",
"None",
":",
"parameters",
"[",
"'repo_names'",
"]",
"=",
"repo_names",
"if",
"permission",
"is",
"not",
"None",
":",
"parameters",
"[",
"'permission'",
"]",
"=",
"permission",
"if",
"privacy",
"is",
"not",
"None",
":",
"parameters",
"[",
"'privacy'",
"]",
"=",
"privacy",
"organization",
".",
"_requester",
".",
"requestJsonAndCheck",
"(",
"'POST'",
",",
"organization",
".",
"url",
"+",
"'/teams'",
",",
"input",
"=",
"parameters",
")",
"list_teams",
"(",
"ignore_cache",
"=",
"True",
")",
"# Refresh cache",
"return",
"True",
"except",
"github",
".",
"GithubException",
":",
"log",
".",
"exception",
"(",
"'Error creating a team'",
")",
"return",
"False"
] | 26.460317 | 20.619048 |
def do_global_lock(self, args):
'''read (or clear) the global lock'''
if args.purge:
self.task_master.registry.force_clear_lock()
else:
owner = self.task_master.registry.read_lock()
if owner:
heartbeat = self.task_master.get_heartbeat(owner)
if 'hostname' in heartbeat:
self.stdout.write('{0} ({1})\n'.format(owner,
heartbeat['hostname']))
else:
self.stdout.write('{0}\n'.format(owner))
else:
self.stdout.write('(unlocked)\n') | [
"def",
"do_global_lock",
"(",
"self",
",",
"args",
")",
":",
"if",
"args",
".",
"purge",
":",
"self",
".",
"task_master",
".",
"registry",
".",
"force_clear_lock",
"(",
")",
"else",
":",
"owner",
"=",
"self",
".",
"task_master",
".",
"registry",
".",
"read_lock",
"(",
")",
"if",
"owner",
":",
"heartbeat",
"=",
"self",
".",
"task_master",
".",
"get_heartbeat",
"(",
"owner",
")",
"if",
"'hostname'",
"in",
"heartbeat",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0} ({1})\\n'",
".",
"format",
"(",
"owner",
",",
"heartbeat",
"[",
"'hostname'",
"]",
")",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0}\\n'",
".",
"format",
"(",
"owner",
")",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'(unlocked)\\n'",
")"
] | 43 | 18.333333 |
def asdict(self):
"""Return a recursive dict representation of self
"""
d = dict(self._odict)
for k,v in d.items():
if isinstance(v, Struct):
d[k] = v.asdict()
return d | [
"def",
"asdict",
"(",
"self",
")",
":",
"d",
"=",
"dict",
"(",
"self",
".",
"_odict",
")",
"for",
"k",
",",
"v",
"in",
"d",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"Struct",
")",
":",
"d",
"[",
"k",
"]",
"=",
"v",
".",
"asdict",
"(",
")",
"return",
"d"
] | 22.9 | 15.9 |
def create_job_queue(self, queue_name, priority, state, compute_env_order):
"""
Create a job queue
:param queue_name: Queue name
:type queue_name: str
:param priority: Queue priority
:type priority: int
:param state: Queue state
:type state: string
:param compute_env_order: Compute environment list
:type compute_env_order: list of dict
:return: Tuple of Name, ARN
:rtype: tuple of str
"""
for variable, var_name in ((queue_name, 'jobQueueName'), (priority, 'priority'), (state, 'state'), (compute_env_order, 'computeEnvironmentOrder')):
if variable is None:
raise ClientException('{0} must be provided'.format(var_name))
if state not in ('ENABLED', 'DISABLED'):
raise ClientException('state {0} must be one of ENABLED | DISABLED'.format(state))
if self.get_job_queue_by_name(queue_name) is not None:
raise ClientException('Job queue {0} already exists'.format(queue_name))
if len(compute_env_order) == 0:
raise ClientException('At least 1 compute environment must be provided')
try:
# orders and extracts computeEnvironment names
ordered_compute_environments = [item['computeEnvironment'] for item in sorted(compute_env_order, key=lambda x: x['order'])]
env_objects = []
# Check each ARN exists, then make a list of compute env's
for arn in ordered_compute_environments:
env = self.get_compute_environment_by_arn(arn)
if env is None:
raise ClientException('Compute environment {0} does not exist'.format(arn))
env_objects.append(env)
except Exception:
raise ClientException('computeEnvironmentOrder is malformed')
# Create new Job Queue
queue = JobQueue(queue_name, priority, state, env_objects, compute_env_order, self.region_name)
self._job_queues[queue.arn] = queue
return queue_name, queue.arn | [
"def",
"create_job_queue",
"(",
"self",
",",
"queue_name",
",",
"priority",
",",
"state",
",",
"compute_env_order",
")",
":",
"for",
"variable",
",",
"var_name",
"in",
"(",
"(",
"queue_name",
",",
"'jobQueueName'",
")",
",",
"(",
"priority",
",",
"'priority'",
")",
",",
"(",
"state",
",",
"'state'",
")",
",",
"(",
"compute_env_order",
",",
"'computeEnvironmentOrder'",
")",
")",
":",
"if",
"variable",
"is",
"None",
":",
"raise",
"ClientException",
"(",
"'{0} must be provided'",
".",
"format",
"(",
"var_name",
")",
")",
"if",
"state",
"not",
"in",
"(",
"'ENABLED'",
",",
"'DISABLED'",
")",
":",
"raise",
"ClientException",
"(",
"'state {0} must be one of ENABLED | DISABLED'",
".",
"format",
"(",
"state",
")",
")",
"if",
"self",
".",
"get_job_queue_by_name",
"(",
"queue_name",
")",
"is",
"not",
"None",
":",
"raise",
"ClientException",
"(",
"'Job queue {0} already exists'",
".",
"format",
"(",
"queue_name",
")",
")",
"if",
"len",
"(",
"compute_env_order",
")",
"==",
"0",
":",
"raise",
"ClientException",
"(",
"'At least 1 compute environment must be provided'",
")",
"try",
":",
"# orders and extracts computeEnvironment names",
"ordered_compute_environments",
"=",
"[",
"item",
"[",
"'computeEnvironment'",
"]",
"for",
"item",
"in",
"sorted",
"(",
"compute_env_order",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"'order'",
"]",
")",
"]",
"env_objects",
"=",
"[",
"]",
"# Check each ARN exists, then make a list of compute env's",
"for",
"arn",
"in",
"ordered_compute_environments",
":",
"env",
"=",
"self",
".",
"get_compute_environment_by_arn",
"(",
"arn",
")",
"if",
"env",
"is",
"None",
":",
"raise",
"ClientException",
"(",
"'Compute environment {0} does not exist'",
".",
"format",
"(",
"arn",
")",
")",
"env_objects",
".",
"append",
"(",
"env",
")",
"except",
"Exception",
":",
"raise",
"ClientException",
"(",
"'computeEnvironmentOrder is malformed'",
")",
"# Create new Job Queue",
"queue",
"=",
"JobQueue",
"(",
"queue_name",
",",
"priority",
",",
"state",
",",
"env_objects",
",",
"compute_env_order",
",",
"self",
".",
"region_name",
")",
"self",
".",
"_job_queues",
"[",
"queue",
".",
"arn",
"]",
"=",
"queue",
"return",
"queue_name",
",",
"queue",
".",
"arn"
] | 46.568182 | 24.568182 |
def _clean_files_only(self, files):
''' if a user only wants to process one or more specific files, instead of a full sosreport '''
try:
if not (os.path.exists(self.origin_path)):
self.logger.info("Creating Origin Path - %s" % self.origin_path)
os.makedirs(self.origin_path) # create the origin_path directory
if not (os.path.exists(self.dir_path)):
self.logger.info("Creating Directory Path - %s" % self.dir_path)
os.makedirs(self.dir_path) # create the dir_path directory
self._add_extra_files(files)
except OSError as e: # pragma: no cover
if e.errno == errno.EEXIST:
pass
else: # pragma: no cover
self.logger.exception(e)
raise e
except Exception as e: # pragma: no cover
self.logger.exception(e)
raise Exception("CleanFilesOnlyError: unable to process") | [
"def",
"_clean_files_only",
"(",
"self",
",",
"files",
")",
":",
"try",
":",
"if",
"not",
"(",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"origin_path",
")",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Creating Origin Path - %s\"",
"%",
"self",
".",
"origin_path",
")",
"os",
".",
"makedirs",
"(",
"self",
".",
"origin_path",
")",
"# create the origin_path directory",
"if",
"not",
"(",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"dir_path",
")",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Creating Directory Path - %s\"",
"%",
"self",
".",
"dir_path",
")",
"os",
".",
"makedirs",
"(",
"self",
".",
"dir_path",
")",
"# create the dir_path directory",
"self",
".",
"_add_extra_files",
"(",
"files",
")",
"except",
"OSError",
"as",
"e",
":",
"# pragma: no cover",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
"EEXIST",
":",
"pass",
"else",
":",
"# pragma: no cover",
"self",
".",
"logger",
".",
"exception",
"(",
"e",
")",
"raise",
"e",
"except",
"Exception",
"as",
"e",
":",
"# pragma: no cover",
"self",
".",
"logger",
".",
"exception",
"(",
"e",
")",
"raise",
"Exception",
"(",
"\"CleanFilesOnlyError: unable to process\"",
")"
] | 46.47619 | 21.428571 |
async def help(self, command_name=None):
"""
Sends a SMTP 'HELP' command.
For further details please check out `RFC 5321 § 4.1.1.8`_.
Args:
command_name (str or None, optional): Name of a command for which
you want help. For example, if you want to get help about the
'*RSET*' command, you'd call ``help('RSET')``.
Raises:
ConnectionResetError: If the connection with the server is
unexpectedely lost.
SMTPCommandFailedError: If the HELP command fails.
Returns:
Help text as given by the server.
.. _`RFC 5321 § 4.1.1.8`: https://tools.ietf.org/html/rfc5321#section-4.1.1.8
"""
if command_name is None:
command_name = ""
code, message = await self.do_cmd("HELP", command_name)
return message | [
"async",
"def",
"help",
"(",
"self",
",",
"command_name",
"=",
"None",
")",
":",
"if",
"command_name",
"is",
"None",
":",
"command_name",
"=",
"\"\"",
"code",
",",
"message",
"=",
"await",
"self",
".",
"do_cmd",
"(",
"\"HELP\"",
",",
"command_name",
")",
"return",
"message"
] | 32.148148 | 24.074074 |
def _rollback(self):
"""Restore the index in its previous state
This uses values that were indexed/deindexed since the last call
to `_reset_cache`.
This is used when an error is encountered while updating a value,
to return to the previous state
"""
# to avoid using self set that may be updated during the process
indexed_values = set(self._indexed_values)
deindexed_values = set(self._deindexed_values)
for args in indexed_values:
self.remove(*args)
for args in deindexed_values:
self.add(*args, check_uniqueness=False) | [
"def",
"_rollback",
"(",
"self",
")",
":",
"# to avoid using self set that may be updated during the process",
"indexed_values",
"=",
"set",
"(",
"self",
".",
"_indexed_values",
")",
"deindexed_values",
"=",
"set",
"(",
"self",
".",
"_deindexed_values",
")",
"for",
"args",
"in",
"indexed_values",
":",
"self",
".",
"remove",
"(",
"*",
"args",
")",
"for",
"args",
"in",
"deindexed_values",
":",
"self",
".",
"add",
"(",
"*",
"args",
",",
"check_uniqueness",
"=",
"False",
")"
] | 36.470588 | 17.941176 |
def get_proficiency_lookup_session_for_objective_bank(self, objective_bank_id, proxy):
"""Gets the ``OsidSession`` associated with the proficiency lookup service for the given objective bank.
arg: objective_bank_id (osid.id.Id): the ``Id`` of the
obective bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.learning.ProficiencyLookupSession) - a
``ProficiencyLookupSession``
raise: NotFound - no ``ObjectiveBank`` found by the given
``Id``
raise: NullArgument - ``objective_bank_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_proficiency_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_proficiency_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
if not self.supports_proficiency_lookup():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.ProficiencyLookupSession(objective_bank_id, proxy, self._runtime) | [
"def",
"get_proficiency_lookup_session_for_objective_bank",
"(",
"self",
",",
"objective_bank_id",
",",
"proxy",
")",
":",
"if",
"not",
"self",
".",
"supports_proficiency_lookup",
"(",
")",
":",
"raise",
"errors",
".",
"Unimplemented",
"(",
")",
"##",
"# Also include check to see if the catalog Id is found otherwise raise errors.NotFound",
"##",
"# pylint: disable=no-member",
"return",
"sessions",
".",
"ProficiencyLookupSession",
"(",
"objective_bank_id",
",",
"proxy",
",",
"self",
".",
"_runtime",
")"
] | 48.925926 | 21.407407 |
def _signal_handler_init(self):
"""Catch interupt signals."""
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGINT, self._signal_handler)
signal.signal(signal.SIGTERM, self._signal_handler) | [
"def",
"_signal_handler_init",
"(",
"self",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"signal",
".",
"SIG_IGN",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"self",
".",
"_signal_handler",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"self",
".",
"_signal_handler",
")"
] | 47.4 | 11.6 |
def main():
'''
Bootstrapper CLI
'''
parser = argparse.ArgumentParser(prog='kclboot',
description='kclboot - Kinesis Client Library Bootstrapper')
subparsers = parser.add_subparsers(title='Subcommands', help='Additional help', dest='subparser')
# Common arguments
jar_path_parser = argparse.ArgumentParser(add_help=False)
jar_path_parser.add_argument('--jar-folder', dest='jar_folder', default='./jars',
help='Folder used to store jar files')
prop_path_parser = argparse.ArgumentParser(add_help=False)
prop_path_parser.add_argument('--properties-file', required=True, dest='properties_file',
help='*.properties file with KCL settings')
# Sub-commands
download_parser = subparsers.add_parser('download', parents=[jar_path_parser],
description='Download jars necessary to run KCL\'s MultiLangDaemon')
download_parser.set_defaults(func=_download)
command_parser = subparsers.add_parser('command', parents=[jar_path_parser, prop_path_parser],
description='Output formatted Java invocation with classpath')
command_parser.set_defaults(func=_command)
classpath_parser = subparsers.add_parser('classpath', parents=[jar_path_parser, prop_path_parser],
description='Output classpath, including jars and the folder containing the *.properties file')
classpath_parser.set_defaults(func=_classpath)
properties_parser = subparsers.add_parser('properties-from-env', parents=[prop_path_parser],
description='Generate a *.properties file from environmental variables')
properties_parser.set_defaults(func=_properties_from_env)
args = parser.parse_args()
if args.subparser:
args.func(args)
elif len(vars(args).keys()) == 1:
parser.print_usage() | [
"def",
"main",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"'kclboot'",
",",
"description",
"=",
"'kclboot - Kinesis Client Library Bootstrapper'",
")",
"subparsers",
"=",
"parser",
".",
"add_subparsers",
"(",
"title",
"=",
"'Subcommands'",
",",
"help",
"=",
"'Additional help'",
",",
"dest",
"=",
"'subparser'",
")",
"# Common arguments",
"jar_path_parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"add_help",
"=",
"False",
")",
"jar_path_parser",
".",
"add_argument",
"(",
"'--jar-folder'",
",",
"dest",
"=",
"'jar_folder'",
",",
"default",
"=",
"'./jars'",
",",
"help",
"=",
"'Folder used to store jar files'",
")",
"prop_path_parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"add_help",
"=",
"False",
")",
"prop_path_parser",
".",
"add_argument",
"(",
"'--properties-file'",
",",
"required",
"=",
"True",
",",
"dest",
"=",
"'properties_file'",
",",
"help",
"=",
"'*.properties file with KCL settings'",
")",
"# Sub-commands",
"download_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'download'",
",",
"parents",
"=",
"[",
"jar_path_parser",
"]",
",",
"description",
"=",
"'Download jars necessary to run KCL\\'s MultiLangDaemon'",
")",
"download_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_download",
")",
"command_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'command'",
",",
"parents",
"=",
"[",
"jar_path_parser",
",",
"prop_path_parser",
"]",
",",
"description",
"=",
"'Output formatted Java invocation with classpath'",
")",
"command_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_command",
")",
"classpath_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'classpath'",
",",
"parents",
"=",
"[",
"jar_path_parser",
",",
"prop_path_parser",
"]",
",",
"description",
"=",
"'Output classpath, including jars and the folder containing the *.properties file'",
")",
"classpath_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_classpath",
")",
"properties_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'properties-from-env'",
",",
"parents",
"=",
"[",
"prop_path_parser",
"]",
",",
"description",
"=",
"'Generate a *.properties file from environmental variables'",
")",
"properties_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_properties_from_env",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"if",
"args",
".",
"subparser",
":",
"args",
".",
"func",
"(",
"args",
")",
"elif",
"len",
"(",
"vars",
"(",
"args",
")",
".",
"keys",
"(",
")",
")",
"==",
"1",
":",
"parser",
".",
"print_usage",
"(",
")"
] | 44.025 | 30.775 |
def _set_box(self):
"""
Set the box size for the molecular assembly
"""
net_volume = 0.0
for idx, mol in enumerate(self.mols):
length = max([np.max(mol.cart_coords[:, i])-np.min(mol.cart_coords[:, i])
for i in range(3)]) + 2.0
net_volume += (length**3.0) * float(self.param_list[idx]['number'])
length = net_volume**(1.0/3.0)
for idx, mol in enumerate(self.mols):
self.param_list[idx]['inside box'] = '0.0 0.0 0.0 {} {} {}'.format(
length, length, length) | [
"def",
"_set_box",
"(",
"self",
")",
":",
"net_volume",
"=",
"0.0",
"for",
"idx",
",",
"mol",
"in",
"enumerate",
"(",
"self",
".",
"mols",
")",
":",
"length",
"=",
"max",
"(",
"[",
"np",
".",
"max",
"(",
"mol",
".",
"cart_coords",
"[",
":",
",",
"i",
"]",
")",
"-",
"np",
".",
"min",
"(",
"mol",
".",
"cart_coords",
"[",
":",
",",
"i",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"3",
")",
"]",
")",
"+",
"2.0",
"net_volume",
"+=",
"(",
"length",
"**",
"3.0",
")",
"*",
"float",
"(",
"self",
".",
"param_list",
"[",
"idx",
"]",
"[",
"'number'",
"]",
")",
"length",
"=",
"net_volume",
"**",
"(",
"1.0",
"/",
"3.0",
")",
"for",
"idx",
",",
"mol",
"in",
"enumerate",
"(",
"self",
".",
"mols",
")",
":",
"self",
".",
"param_list",
"[",
"idx",
"]",
"[",
"'inside box'",
"]",
"=",
"'0.0 0.0 0.0 {} {} {}'",
".",
"format",
"(",
"length",
",",
"length",
",",
"length",
")"
] | 44.461538 | 15.076923 |
def handle_initiate_stateful_checkpoint(self, ckptmsg):
"""Called when we get InitiateStatefulCheckpoint message
:param ckptmsg: InitiateStatefulCheckpoint type
"""
self.in_stream.offer(ckptmsg)
if self.my_pplan_helper.is_topology_running():
self.my_instance.py_class.process_incoming_tuples() | [
"def",
"handle_initiate_stateful_checkpoint",
"(",
"self",
",",
"ckptmsg",
")",
":",
"self",
".",
"in_stream",
".",
"offer",
"(",
"ckptmsg",
")",
"if",
"self",
".",
"my_pplan_helper",
".",
"is_topology_running",
"(",
")",
":",
"self",
".",
"my_instance",
".",
"py_class",
".",
"process_incoming_tuples",
"(",
")"
] | 44.714286 | 8.571429 |
def hat_map(vec):
"""Return that hat map of a vector
Inputs:
vec - 3 element vector
Outputs:
skew - 3,3 skew symmetric matrix
"""
vec = np.squeeze(vec)
skew = np.array([
[0, -vec[2], vec[1]],
[vec[2], 0, -vec[0]],
[-vec[1], vec[0], 0]])
return skew | [
"def",
"hat_map",
"(",
"vec",
")",
":",
"vec",
"=",
"np",
".",
"squeeze",
"(",
"vec",
")",
"skew",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0",
",",
"-",
"vec",
"[",
"2",
"]",
",",
"vec",
"[",
"1",
"]",
"]",
",",
"[",
"vec",
"[",
"2",
"]",
",",
"0",
",",
"-",
"vec",
"[",
"0",
"]",
"]",
",",
"[",
"-",
"vec",
"[",
"1",
"]",
",",
"vec",
"[",
"0",
"]",
",",
"0",
"]",
"]",
")",
"return",
"skew"
] | 20.294118 | 18.117647 |
def pynac_in_sub_directory(num, file_list):
"""
A context manager to create a new directory, move the files listed in ``file_list``
to that directory, and change to that directory before handing control back to
context. The closing action is to change back to the original directory.
The directory name is based on the ``num`` input, and if it already exists, it
will be deleted upon entering the context.
The primary purpose of this function is to enable multiprocess use of Pynac via
the ``multi_process_pynac`` function.
"""
print('Running %d' % num)
new_dir = 'dynacProc_%04d' % num
if os.path.isdir(new_dir):
shutil.rmtree(new_dir)
os.mkdir(new_dir)
for f in file_list:
shutil.copy(f, new_dir)
os.chdir(new_dir)
yield
os.chdir('..') | [
"def",
"pynac_in_sub_directory",
"(",
"num",
",",
"file_list",
")",
":",
"print",
"(",
"'Running %d'",
"%",
"num",
")",
"new_dir",
"=",
"'dynacProc_%04d'",
"%",
"num",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"new_dir",
")",
":",
"shutil",
".",
"rmtree",
"(",
"new_dir",
")",
"os",
".",
"mkdir",
"(",
"new_dir",
")",
"for",
"f",
"in",
"file_list",
":",
"shutil",
".",
"copy",
"(",
"f",
",",
"new_dir",
")",
"os",
".",
"chdir",
"(",
"new_dir",
")",
"yield",
"os",
".",
"chdir",
"(",
"'..'",
")"
] | 36.625 | 21.708333 |
def _list_syntax_error():
"""
If we're going through a syntax error, add the directory of the error to
the watchlist.
"""
_, e, _ = sys.exc_info()
if isinstance(e, SyntaxError) and hasattr(e, 'filename'):
yield path.dirname(e.filename) | [
"def",
"_list_syntax_error",
"(",
")",
":",
"_",
",",
"e",
",",
"_",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"isinstance",
"(",
"e",
",",
"SyntaxError",
")",
"and",
"hasattr",
"(",
"e",
",",
"'filename'",
")",
":",
"yield",
"path",
".",
"dirname",
"(",
"e",
".",
"filename",
")"
] | 28.888889 | 16.444444 |
def setup_application (self):
""" Allows us to use method, injected as dependency earlier to set
up argparser before autocompletion/running the app.
"""
# figure out precise method name, specific to this use
name = 'configure_%s_app' % self.parent.name
# call generic set up method
getattr(self.method, 'configure_app', self._no_op_setup)(self, self.parser)
# call specific set up method
getattr(self.method, name, self._no_op_setup)(self, self.parser) | [
"def",
"setup_application",
"(",
"self",
")",
":",
"# figure out precise method name, specific to this use",
"name",
"=",
"'configure_%s_app'",
"%",
"self",
".",
"parent",
".",
"name",
"# call generic set up method",
"getattr",
"(",
"self",
".",
"method",
",",
"'configure_app'",
",",
"self",
".",
"_no_op_setup",
")",
"(",
"self",
",",
"self",
".",
"parser",
")",
"# call specific set up method",
"getattr",
"(",
"self",
".",
"method",
",",
"name",
",",
"self",
".",
"_no_op_setup",
")",
"(",
"self",
",",
"self",
".",
"parser",
")"
] | 47.9 | 13.4 |
def has(self, querypart_name, value=None):
"""Returns True if `querypart_name` with `value` is set.
For example you can check if you already used condition by `sql.has('where')`.
If you want to check for more information, for example if that condition
also contain ID, you can do this by `sql.has('where', 'id')`.
"""
querypart = self._queryparts.get(querypart_name)
if not querypart:
return False
if not querypart.is_set:
return False
if value:
return querypart.has(value)
return True | [
"def",
"has",
"(",
"self",
",",
"querypart_name",
",",
"value",
"=",
"None",
")",
":",
"querypart",
"=",
"self",
".",
"_queryparts",
".",
"get",
"(",
"querypart_name",
")",
"if",
"not",
"querypart",
":",
"return",
"False",
"if",
"not",
"querypart",
".",
"is_set",
":",
"return",
"False",
"if",
"value",
":",
"return",
"querypart",
".",
"has",
"(",
"value",
")",
"return",
"True"
] | 36.75 | 19.5625 |
def get_page_api(client_access_token, page_id):
"""
You can also skip the above if you get a page token:
http://stackoverflow.com/questions/8231877
and make that long-lived token as in Step 3
"""
graph = GraphAPI(client_access_token)
# Get page token to post as the page. You can skip
# the following if you want to post as yourself.
resp = graph.get('me/accounts')
page_access_token = None
for page in resp['data']:
if page['id'] == page_id:
page_access_token = page['access_token']
break
return GraphAPI(page_access_token) | [
"def",
"get_page_api",
"(",
"client_access_token",
",",
"page_id",
")",
":",
"graph",
"=",
"GraphAPI",
"(",
"client_access_token",
")",
"# Get page token to post as the page. You can skip",
"# the following if you want to post as yourself.",
"resp",
"=",
"graph",
".",
"get",
"(",
"'me/accounts'",
")",
"page_access_token",
"=",
"None",
"for",
"page",
"in",
"resp",
"[",
"'data'",
"]",
":",
"if",
"page",
"[",
"'id'",
"]",
"==",
"page_id",
":",
"page_access_token",
"=",
"page",
"[",
"'access_token'",
"]",
"break",
"return",
"GraphAPI",
"(",
"page_access_token",
")"
] | 32.722222 | 11.944444 |
def _parse_model(topology, scope, model, inputs=None, outputs=None):
'''
This is a delegate function of all top-level parsing functions. It does nothing but call a proper function
to parse the given model.
'''
if inputs is None:
inputs = list()
if outputs is None:
outputs = list()
model_type = model.WhichOneof('Type')
if model_type in ['pipeline', 'pipelineClassifier', 'pipelineRegressor']:
_parse_pipeline_model(topology, scope, model, inputs, outputs)
elif model_type in ['neuralNetworkClassifier', 'neuralNetworkRegressor', 'neuralNetwork']:
_parse_neural_network_model(topology, scope, model, inputs, outputs)
else:
_parse_simple_model(topology, scope, model, inputs, outputs) | [
"def",
"_parse_model",
"(",
"topology",
",",
"scope",
",",
"model",
",",
"inputs",
"=",
"None",
",",
"outputs",
"=",
"None",
")",
":",
"if",
"inputs",
"is",
"None",
":",
"inputs",
"=",
"list",
"(",
")",
"if",
"outputs",
"is",
"None",
":",
"outputs",
"=",
"list",
"(",
")",
"model_type",
"=",
"model",
".",
"WhichOneof",
"(",
"'Type'",
")",
"if",
"model_type",
"in",
"[",
"'pipeline'",
",",
"'pipelineClassifier'",
",",
"'pipelineRegressor'",
"]",
":",
"_parse_pipeline_model",
"(",
"topology",
",",
"scope",
",",
"model",
",",
"inputs",
",",
"outputs",
")",
"elif",
"model_type",
"in",
"[",
"'neuralNetworkClassifier'",
",",
"'neuralNetworkRegressor'",
",",
"'neuralNetwork'",
"]",
":",
"_parse_neural_network_model",
"(",
"topology",
",",
"scope",
",",
"model",
",",
"inputs",
",",
"outputs",
")",
"else",
":",
"_parse_simple_model",
"(",
"topology",
",",
"scope",
",",
"model",
",",
"inputs",
",",
"outputs",
")"
] | 41.555556 | 30 |
def __stopOpenThreadWpan(self):
"""stop OpenThreadWpan
Returns:
True: successfully stop OpenThreadWpan
False: failed to stop OpenThreadWpan
"""
print 'call stopOpenThreadWpan'
try:
if self.__sendCommand(WPANCTL_CMD + 'leave')[0] != 'Fail' and self.__sendCommand(WPANCTL_CMD + 'dataset erase')[0] != 'Fail':
return True
else:
return False
except Exception, e:
ModuleHelper.WriteIntoDebugLogger('stopOpenThreadWpan() Error: ' + str(e)) | [
"def",
"__stopOpenThreadWpan",
"(",
"self",
")",
":",
"print",
"'call stopOpenThreadWpan'",
"try",
":",
"if",
"self",
".",
"__sendCommand",
"(",
"WPANCTL_CMD",
"+",
"'leave'",
")",
"[",
"0",
"]",
"!=",
"'Fail'",
"and",
"self",
".",
"__sendCommand",
"(",
"WPANCTL_CMD",
"+",
"'dataset erase'",
")",
"[",
"0",
"]",
"!=",
"'Fail'",
":",
"return",
"True",
"else",
":",
"return",
"False",
"except",
"Exception",
",",
"e",
":",
"ModuleHelper",
".",
"WriteIntoDebugLogger",
"(",
"'stopOpenThreadWpan() Error: '",
"+",
"str",
"(",
"e",
")",
")"
] | 37.333333 | 21.533333 |
def handle_requires(metadata, pkg_info, key):
"""
Place the runtime requirements from pkg_info into metadata.
"""
may_requires = defaultdict(list)
for value in pkg_info.get_all(key):
extra_match = EXTRA_RE.search(value)
if extra_match:
groupdict = extra_match.groupdict()
condition = groupdict['condition']
extra = groupdict['extra']
package = groupdict['package']
if condition.endswith(' and '):
condition = condition[:-5]
else:
condition, extra = None, None
package = value
key = MayRequiresKey(condition, extra)
may_requires[key].append(package)
if may_requires:
metadata['run_requires'] = []
for key, value in may_requires.items():
may_requirement = {'requires':value}
if key.extra:
may_requirement['extra'] = key.extra
if key.condition:
may_requirement['environment'] = key.condition
metadata['run_requires'].append(may_requirement)
if not 'extras' in metadata:
metadata['extras'] = []
metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra]) | [
"def",
"handle_requires",
"(",
"metadata",
",",
"pkg_info",
",",
"key",
")",
":",
"may_requires",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"value",
"in",
"pkg_info",
".",
"get_all",
"(",
"key",
")",
":",
"extra_match",
"=",
"EXTRA_RE",
".",
"search",
"(",
"value",
")",
"if",
"extra_match",
":",
"groupdict",
"=",
"extra_match",
".",
"groupdict",
"(",
")",
"condition",
"=",
"groupdict",
"[",
"'condition'",
"]",
"extra",
"=",
"groupdict",
"[",
"'extra'",
"]",
"package",
"=",
"groupdict",
"[",
"'package'",
"]",
"if",
"condition",
".",
"endswith",
"(",
"' and '",
")",
":",
"condition",
"=",
"condition",
"[",
":",
"-",
"5",
"]",
"else",
":",
"condition",
",",
"extra",
"=",
"None",
",",
"None",
"package",
"=",
"value",
"key",
"=",
"MayRequiresKey",
"(",
"condition",
",",
"extra",
")",
"may_requires",
"[",
"key",
"]",
".",
"append",
"(",
"package",
")",
"if",
"may_requires",
":",
"metadata",
"[",
"'run_requires'",
"]",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"may_requires",
".",
"items",
"(",
")",
":",
"may_requirement",
"=",
"{",
"'requires'",
":",
"value",
"}",
"if",
"key",
".",
"extra",
":",
"may_requirement",
"[",
"'extra'",
"]",
"=",
"key",
".",
"extra",
"if",
"key",
".",
"condition",
":",
"may_requirement",
"[",
"'environment'",
"]",
"=",
"key",
".",
"condition",
"metadata",
"[",
"'run_requires'",
"]",
".",
"append",
"(",
"may_requirement",
")",
"if",
"not",
"'extras'",
"in",
"metadata",
":",
"metadata",
"[",
"'extras'",
"]",
"=",
"[",
"]",
"metadata",
"[",
"'extras'",
"]",
".",
"extend",
"(",
"[",
"key",
".",
"extra",
"for",
"key",
"in",
"may_requires",
".",
"keys",
"(",
")",
"if",
"key",
".",
"extra",
"]",
")"
] | 37.30303 | 11.545455 |
def get_update_status_brok(self):
"""
Create an update item brok
:return: Brok object
:rtype: alignak.Brok
"""
data = {'uuid': self.uuid}
self.fill_data_brok_from(data, 'full_status')
return Brok({'type': 'update_' + self.my_type + '_status', 'data': data}) | [
"def",
"get_update_status_brok",
"(",
"self",
")",
":",
"data",
"=",
"{",
"'uuid'",
":",
"self",
".",
"uuid",
"}",
"self",
".",
"fill_data_brok_from",
"(",
"data",
",",
"'full_status'",
")",
"return",
"Brok",
"(",
"{",
"'type'",
":",
"'update_'",
"+",
"self",
".",
"my_type",
"+",
"'_status'",
",",
"'data'",
":",
"data",
"}",
")"
] | 31.3 | 13.7 |
def get_first_weekday_after(day, weekday):
"""Get the first weekday after a given day. If the day is the same
weekday, the same day will be returned.
>>> # the first monday after Apr 1 2015
>>> Calendar.get_first_weekday_after(date(2015, 4, 1), MON)
datetime.date(2015, 4, 6)
>>> # the first tuesday after Apr 14 2015
>>> Calendar.get_first_weekday_after(date(2015, 4, 14), TUE)
datetime.date(2015, 4, 14)
"""
day_delta = (weekday - day.weekday()) % 7
day = day + timedelta(days=day_delta)
return day | [
"def",
"get_first_weekday_after",
"(",
"day",
",",
"weekday",
")",
":",
"day_delta",
"=",
"(",
"weekday",
"-",
"day",
".",
"weekday",
"(",
")",
")",
"%",
"7",
"day",
"=",
"day",
"+",
"timedelta",
"(",
"days",
"=",
"day_delta",
")",
"return",
"day"
] | 38.933333 | 13.933333 |
def set_tg(self):
""" Try to grab the treatment group number for the student.
If there is no treatment group number available, request it
from the server.
"""
# Checks to see the student currently has a treatment group number.
if not os.path.isfile(self.current_working_dir + LOCAL_TG_FILE):
cur_email = self.assignment.get_student_email()
log.info("Current email is %s", cur_email)
if not cur_email:
self.tg_id = -1
return EMPTY_MISUCOUNT_TGID_PRNTEDMSG
tg_url = ("{}{}/{}{}"
.format(TGSERVER, cur_email, self.assignment_name,
TG_SERVER_ENDING))
try:
log.info("Accessing treatment server at %s", tg_url)
data = requests.get(tg_url, timeout=1).json()
except IOError:
data = {"tg": -1}
log.warning("Failed to communicate to server", exc_info=True)
if data.get("tg") is None:
log.warning("Server returned back a bad treatment group ID.")
data = {"tg": -1}
with open(self.current_working_dir + LOCAL_TG_FILE, "w") as fd:
fd.write(str(data["tg"]))
tg_file = open(self.current_working_dir + LOCAL_TG_FILE, 'r')
self.tg_id = int(tg_file.read()) | [
"def",
"set_tg",
"(",
"self",
")",
":",
"# Checks to see the student currently has a treatment group number.",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"self",
".",
"current_working_dir",
"+",
"LOCAL_TG_FILE",
")",
":",
"cur_email",
"=",
"self",
".",
"assignment",
".",
"get_student_email",
"(",
")",
"log",
".",
"info",
"(",
"\"Current email is %s\"",
",",
"cur_email",
")",
"if",
"not",
"cur_email",
":",
"self",
".",
"tg_id",
"=",
"-",
"1",
"return",
"EMPTY_MISUCOUNT_TGID_PRNTEDMSG",
"tg_url",
"=",
"(",
"\"{}{}/{}{}\"",
".",
"format",
"(",
"TGSERVER",
",",
"cur_email",
",",
"self",
".",
"assignment_name",
",",
"TG_SERVER_ENDING",
")",
")",
"try",
":",
"log",
".",
"info",
"(",
"\"Accessing treatment server at %s\"",
",",
"tg_url",
")",
"data",
"=",
"requests",
".",
"get",
"(",
"tg_url",
",",
"timeout",
"=",
"1",
")",
".",
"json",
"(",
")",
"except",
"IOError",
":",
"data",
"=",
"{",
"\"tg\"",
":",
"-",
"1",
"}",
"log",
".",
"warning",
"(",
"\"Failed to communicate to server\"",
",",
"exc_info",
"=",
"True",
")",
"if",
"data",
".",
"get",
"(",
"\"tg\"",
")",
"is",
"None",
":",
"log",
".",
"warning",
"(",
"\"Server returned back a bad treatment group ID.\"",
")",
"data",
"=",
"{",
"\"tg\"",
":",
"-",
"1",
"}",
"with",
"open",
"(",
"self",
".",
"current_working_dir",
"+",
"LOCAL_TG_FILE",
",",
"\"w\"",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"str",
"(",
"data",
"[",
"\"tg\"",
"]",
")",
")",
"tg_file",
"=",
"open",
"(",
"self",
".",
"current_working_dir",
"+",
"LOCAL_TG_FILE",
",",
"'r'",
")",
"self",
".",
"tg_id",
"=",
"int",
"(",
"tg_file",
".",
"read",
"(",
")",
")"
] | 42.71875 | 20.21875 |
def load_containers(self, service, configs, use_cache):
"""
:param service_name:
:return None:
"""
if not isinstance(service, Service):
raise TypeError("service must be and instance of service. {0} was passed.".format(service))
if not self.healthy():
logger.error("unable to connect to container ship.")
raise Exception('lost comms with our container ship')
self._load_service_containers(service, configs, use_cache) | [
"def",
"load_containers",
"(",
"self",
",",
"service",
",",
"configs",
",",
"use_cache",
")",
":",
"if",
"not",
"isinstance",
"(",
"service",
",",
"Service",
")",
":",
"raise",
"TypeError",
"(",
"\"service must be and instance of service. {0} was passed.\"",
".",
"format",
"(",
"service",
")",
")",
"if",
"not",
"self",
".",
"healthy",
"(",
")",
":",
"logger",
".",
"error",
"(",
"\"unable to connect to container ship.\"",
")",
"raise",
"Exception",
"(",
"'lost comms with our container ship'",
")",
"self",
".",
"_load_service_containers",
"(",
"service",
",",
"configs",
",",
"use_cache",
")"
] | 38.384615 | 21.461538 |
def transaction_effects(self, tx_hash, cursor=None, order='asc', limit=10):
"""This endpoint represents all effects that occurred as a result of a
given transaction.
`GET /transactions/{hash}/effects{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-transaction.html>`_
:param str tx_hash: The hex-encoded transaction hash.
:param int cursor: A paging token, specifying where to start returning records from.
:param str order: The order in which to return rows, "asc" or "desc".
:param int limit: Maximum number of records to return.
:return: A single transaction's effects.
:rtype: dict
"""
endpoint = '/transactions/{tx_hash}/effects'.format(tx_hash=tx_hash)
params = self.__query_params(cursor=cursor, order=order, limit=limit)
return self.query(endpoint, params) | [
"def",
"transaction_effects",
"(",
"self",
",",
"tx_hash",
",",
"cursor",
"=",
"None",
",",
"order",
"=",
"'asc'",
",",
"limit",
"=",
"10",
")",
":",
"endpoint",
"=",
"'/transactions/{tx_hash}/effects'",
".",
"format",
"(",
"tx_hash",
"=",
"tx_hash",
")",
"params",
"=",
"self",
".",
"__query_params",
"(",
"cursor",
"=",
"cursor",
",",
"order",
"=",
"order",
",",
"limit",
"=",
"limit",
")",
"return",
"self",
".",
"query",
"(",
"endpoint",
",",
"params",
")"
] | 50.611111 | 27.222222 |
def set_colors(self, text='black', background='white'):
"""
Sets the colors of the text area.
"""
if self._multiline: self._widget.setStyleSheet("QTextEdit {background-color: "+str(background)+"; color: "+str(text)+"}")
else: self._widget.setStyleSheet("QLineEdit {background-color: "+str(background)+"; color: "+str(text)+"}") | [
"def",
"set_colors",
"(",
"self",
",",
"text",
"=",
"'black'",
",",
"background",
"=",
"'white'",
")",
":",
"if",
"self",
".",
"_multiline",
":",
"self",
".",
"_widget",
".",
"setStyleSheet",
"(",
"\"QTextEdit {background-color: \"",
"+",
"str",
"(",
"background",
")",
"+",
"\"; color: \"",
"+",
"str",
"(",
"text",
")",
"+",
"\"}\"",
")",
"else",
":",
"self",
".",
"_widget",
".",
"setStyleSheet",
"(",
"\"QLineEdit {background-color: \"",
"+",
"str",
"(",
"background",
")",
"+",
"\"; color: \"",
"+",
"str",
"(",
"text",
")",
"+",
"\"}\"",
")"
] | 62.666667 | 32.333333 |
def list_jobs(self, limit=None, skip=None):
"""
Lists replication jobs. Includes replications created via
/_replicate endpoint as well as those created from replication
documents. Does not include replications which have completed
or have failed to start because replication documents were
malformed. Each job description will include source and target
information, replication id, a history of recent event, and a
few other things.
:param limit: How many results to return.
:param skip: How many result to skip starting at the beginning, if ordered by document ID.
"""
params = dict()
if limit is not None:
params["limit"] = limit
if skip is not None:
params["skip"] = skip
resp = self._r_session.get('/'.join([self._scheduler, 'jobs']), params=params)
resp.raise_for_status()
return response_to_json_dict(resp) | [
"def",
"list_jobs",
"(",
"self",
",",
"limit",
"=",
"None",
",",
"skip",
"=",
"None",
")",
":",
"params",
"=",
"dict",
"(",
")",
"if",
"limit",
"is",
"not",
"None",
":",
"params",
"[",
"\"limit\"",
"]",
"=",
"limit",
"if",
"skip",
"is",
"not",
"None",
":",
"params",
"[",
"\"skip\"",
"]",
"=",
"skip",
"resp",
"=",
"self",
".",
"_r_session",
".",
"get",
"(",
"'/'",
".",
"join",
"(",
"[",
"self",
".",
"_scheduler",
",",
"'jobs'",
"]",
")",
",",
"params",
"=",
"params",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"return",
"response_to_json_dict",
"(",
"resp",
")"
] | 45.380952 | 19.190476 |
def qr_code(self, instance):
"""
Display picture of QR-code from used secret
"""
try:
return self._qr_code(instance)
except Exception as err:
if settings.DEBUG:
import traceback
return "<pre>%s</pre>" % traceback.format_exc() | [
"def",
"qr_code",
"(",
"self",
",",
"instance",
")",
":",
"try",
":",
"return",
"self",
".",
"_qr_code",
"(",
"instance",
")",
"except",
"Exception",
"as",
"err",
":",
"if",
"settings",
".",
"DEBUG",
":",
"import",
"traceback",
"return",
"\"<pre>%s</pre>\"",
"%",
"traceback",
".",
"format_exc",
"(",
")"
] | 31.2 | 10.2 |
def commit_transaction(self):
"""
Commit the currently active transaction (Pipeline). If no
transaction is active in the current thread, an exception
will be raised.
:returns: The return value of executing the Pipeline.
:raises: ``ValueError`` if no transaction is active.
"""
with self._transaction_lock:
local = self._transaction_local
if not local.pipes:
raise ValueError('No transaction is currently active.')
return local.commit() | [
"def",
"commit_transaction",
"(",
"self",
")",
":",
"with",
"self",
".",
"_transaction_lock",
":",
"local",
"=",
"self",
".",
"_transaction_local",
"if",
"not",
"local",
".",
"pipes",
":",
"raise",
"ValueError",
"(",
"'No transaction is currently active.'",
")",
"return",
"local",
".",
"commit",
"(",
")"
] | 38.5 | 15.214286 |
def get_node_dict(self, return_internal=False, return_nodes=False):
"""
Return node labels as a dictionary mapping {idx: name} where idx is
the order of nodes in 'preorder' traversal. Used internally by the
func .get_node_values() to return values in proper order.
return_internal: if True all nodes are returned, if False only tips.
return_nodes: if True returns TreeNodes, if False return node names.
"""
if return_internal:
if return_nodes:
return {
i.idx: i for i in self.treenode.traverse("preorder")
}
else:
return {
i.idx: i.name for i in self.treenode.traverse("preorder")
}
else:
if return_nodes:
return {
i.idx: i for i in self.treenode.traverse("preorder")
if i.is_leaf()
}
else:
return {
i.idx: i.name for i in self.treenode.traverse("preorder")
if i.is_leaf()
} | [
"def",
"get_node_dict",
"(",
"self",
",",
"return_internal",
"=",
"False",
",",
"return_nodes",
"=",
"False",
")",
":",
"if",
"return_internal",
":",
"if",
"return_nodes",
":",
"return",
"{",
"i",
".",
"idx",
":",
"i",
"for",
"i",
"in",
"self",
".",
"treenode",
".",
"traverse",
"(",
"\"preorder\"",
")",
"}",
"else",
":",
"return",
"{",
"i",
".",
"idx",
":",
"i",
".",
"name",
"for",
"i",
"in",
"self",
".",
"treenode",
".",
"traverse",
"(",
"\"preorder\"",
")",
"}",
"else",
":",
"if",
"return_nodes",
":",
"return",
"{",
"i",
".",
"idx",
":",
"i",
"for",
"i",
"in",
"self",
".",
"treenode",
".",
"traverse",
"(",
"\"preorder\"",
")",
"if",
"i",
".",
"is_leaf",
"(",
")",
"}",
"else",
":",
"return",
"{",
"i",
".",
"idx",
":",
"i",
".",
"name",
"for",
"i",
"in",
"self",
".",
"treenode",
".",
"traverse",
"(",
"\"preorder\"",
")",
"if",
"i",
".",
"is_leaf",
"(",
")",
"}"
] | 38.517241 | 22.448276 |
def generateBatches(tasks, givens):
"""
A function to generate a batch of commands to run in a specific order as to
meet all the dependencies for each command. For example, the commands with
no dependencies are run first, and the commands with the most deep
dependencies are run last
"""
_removeGivensFromTasks(tasks, givens)
batches = []
while tasks:
batch = set()
for task, dependencies in tasks.items():
if not dependencies:
batch.add(task)
if not batch:
_batchErrorProcessing(tasks)
for task in batch:
del tasks[task]
for task, dependencies in tasks.items():
for item in batch:
if item in dependencies:
tasks[task].remove(item)
batches.append(batch)
return batches | [
"def",
"generateBatches",
"(",
"tasks",
",",
"givens",
")",
":",
"_removeGivensFromTasks",
"(",
"tasks",
",",
"givens",
")",
"batches",
"=",
"[",
"]",
"while",
"tasks",
":",
"batch",
"=",
"set",
"(",
")",
"for",
"task",
",",
"dependencies",
"in",
"tasks",
".",
"items",
"(",
")",
":",
"if",
"not",
"dependencies",
":",
"batch",
".",
"add",
"(",
"task",
")",
"if",
"not",
"batch",
":",
"_batchErrorProcessing",
"(",
"tasks",
")",
"for",
"task",
"in",
"batch",
":",
"del",
"tasks",
"[",
"task",
"]",
"for",
"task",
",",
"dependencies",
"in",
"tasks",
".",
"items",
"(",
")",
":",
"for",
"item",
"in",
"batch",
":",
"if",
"item",
"in",
"dependencies",
":",
"tasks",
"[",
"task",
"]",
".",
"remove",
"(",
"item",
")",
"batches",
".",
"append",
"(",
"batch",
")",
"return",
"batches"
] | 28.724138 | 17.827586 |
def _is_instance(self, triple):
"""helper, returns the class type of subj"""
subj, pred, obj = triple
input_pred_ns = self._namespace_from_uri(self._expand_qname(pred))
triples = self.graph.triples(
(subj, rt.URIRef(self.schema_def.lexicon['type']), None)
)
if triples:
for tr in triples:
triple_obj_ns = self._namespace_from_uri(
self._expand_qname(tr[2]))
if input_pred_ns == triple_obj_ns: # match namespaces
return tr[2] | [
"def",
"_is_instance",
"(",
"self",
",",
"triple",
")",
":",
"subj",
",",
"pred",
",",
"obj",
"=",
"triple",
"input_pred_ns",
"=",
"self",
".",
"_namespace_from_uri",
"(",
"self",
".",
"_expand_qname",
"(",
"pred",
")",
")",
"triples",
"=",
"self",
".",
"graph",
".",
"triples",
"(",
"(",
"subj",
",",
"rt",
".",
"URIRef",
"(",
"self",
".",
"schema_def",
".",
"lexicon",
"[",
"'type'",
"]",
")",
",",
"None",
")",
")",
"if",
"triples",
":",
"for",
"tr",
"in",
"triples",
":",
"triple_obj_ns",
"=",
"self",
".",
"_namespace_from_uri",
"(",
"self",
".",
"_expand_qname",
"(",
"tr",
"[",
"2",
"]",
")",
")",
"if",
"input_pred_ns",
"==",
"triple_obj_ns",
":",
"# match namespaces",
"return",
"tr",
"[",
"2",
"]"
] | 42.846154 | 15.769231 |
def process_md5(self, md5_output, pattern=r"^([a-fA-F0-9]+)$"):
"""
IOS-XR defaults with timestamps enabled
# show md5 file /bootflash:/boot/grub/grub.cfg
Sat Mar 3 17:49:03.596 UTC
c84843f0030efd44b01343fdb8c2e801
"""
match = re.search(pattern, md5_output, flags=re.M)
if match:
return match.group(1)
else:
raise ValueError("Invalid output from MD5 command: {}".format(md5_output)) | [
"def",
"process_md5",
"(",
"self",
",",
"md5_output",
",",
"pattern",
"=",
"r\"^([a-fA-F0-9]+)$\"",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"pattern",
",",
"md5_output",
",",
"flags",
"=",
"re",
".",
"M",
")",
"if",
"match",
":",
"return",
"match",
".",
"group",
"(",
"1",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Invalid output from MD5 command: {}\"",
".",
"format",
"(",
"md5_output",
")",
")"
] | 36 | 16.153846 |
def parse_list(self, entries):
"""Parse a JSON array into a list of model instances."""
result_entries = SearchableList()
for entry in entries:
result_entries.append(self.instance.parse(self.requester, entry))
return result_entries | [
"def",
"parse_list",
"(",
"self",
",",
"entries",
")",
":",
"result_entries",
"=",
"SearchableList",
"(",
")",
"for",
"entry",
"in",
"entries",
":",
"result_entries",
".",
"append",
"(",
"self",
".",
"instance",
".",
"parse",
"(",
"self",
".",
"requester",
",",
"entry",
")",
")",
"return",
"result_entries"
] | 45 | 11.666667 |
def transform(self, X):
"""
Delete all features, which were not relevant in the fit phase.
:param X: data sample with all features, which will be reduced to only those that are relevant
:type X: pandas.DataSeries or numpy.array
:return: same data sample as X, but with only the relevant features
:rtype: pandas.DataFrame or numpy.array
"""
if self.relevant_features is None:
raise RuntimeError("You have to call fit before.")
if isinstance(X, pd.DataFrame):
return X.copy().loc[:, self.relevant_features]
else:
return X[:, self.relevant_features] | [
"def",
"transform",
"(",
"self",
",",
"X",
")",
":",
"if",
"self",
".",
"relevant_features",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"You have to call fit before.\"",
")",
"if",
"isinstance",
"(",
"X",
",",
"pd",
".",
"DataFrame",
")",
":",
"return",
"X",
".",
"copy",
"(",
")",
".",
"loc",
"[",
":",
",",
"self",
".",
"relevant_features",
"]",
"else",
":",
"return",
"X",
"[",
":",
",",
"self",
".",
"relevant_features",
"]"
] | 38.176471 | 21 |
def apps_location_installations_reorder(self, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/app_location_installations#reorder-app-installations-for-location"
api_path = "/api/v2/apps/location_installations/reorder.json"
return self.call(api_path, method="POST", data=data, **kwargs) | [
"def",
"apps_location_installations_reorder",
"(",
"self",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"api_path",
"=",
"\"/api/v2/apps/location_installations/reorder.json\"",
"return",
"self",
".",
"call",
"(",
"api_path",
",",
"method",
"=",
"\"POST\"",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | 81.25 | 41.25 |
def filter(self, condition):
"""Filters rows using the given condition.
:func:`where` is an alias for :func:`filter`.
:param condition: a :class:`Column` of :class:`types.BooleanType`
or a string of SQL expression.
>>> df.filter(df.age > 3).collect()
[Row(age=5, name=u'Bob')]
>>> df.where(df.age == 2).collect()
[Row(age=2, name=u'Alice')]
>>> df.filter("age > 3").collect()
[Row(age=5, name=u'Bob')]
>>> df.where("age = 2").collect()
[Row(age=2, name=u'Alice')]
"""
if isinstance(condition, basestring):
jdf = self._jdf.filter(condition)
elif isinstance(condition, Column):
jdf = self._jdf.filter(condition._jc)
else:
raise TypeError("condition should be string or Column")
return DataFrame(jdf, self.sql_ctx) | [
"def",
"filter",
"(",
"self",
",",
"condition",
")",
":",
"if",
"isinstance",
"(",
"condition",
",",
"basestring",
")",
":",
"jdf",
"=",
"self",
".",
"_jdf",
".",
"filter",
"(",
"condition",
")",
"elif",
"isinstance",
"(",
"condition",
",",
"Column",
")",
":",
"jdf",
"=",
"self",
".",
"_jdf",
".",
"filter",
"(",
"condition",
".",
"_jc",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"condition should be string or Column\"",
")",
"return",
"DataFrame",
"(",
"jdf",
",",
"self",
".",
"sql_ctx",
")"
] | 34.68 | 13.28 |
def deselect_nodenames(self, *substrings: str) -> 'Selection':
"""Restrict the current selection to all nodes with a name
not containing at least one of the given substrings (does not
affect any elements).
See the documentation on method |Selection.search_nodenames| for
additional information.
"""
self.nodes -= self.search_nodenames(*substrings).nodes
return self | [
"def",
"deselect_nodenames",
"(",
"self",
",",
"*",
"substrings",
":",
"str",
")",
"->",
"'Selection'",
":",
"self",
".",
"nodes",
"-=",
"self",
".",
"search_nodenames",
"(",
"*",
"substrings",
")",
".",
"nodes",
"return",
"self"
] | 42.1 | 18.6 |
def run(self, format=None, reduce=False, *args, **kwargs):
"""Generates the underlying graph and prints it.
"""
plan = self._generate_plan()
if reduce:
# This will performa a transitive reduction on the underlying
# graph, producing less edges. Mostly useful for the "dot" format,
# when converting to PNG, so it creates a prettier/cleaner
# dependency graph.
plan.graph.transitive_reduction()
fn = FORMATTERS[format]
fn(sys.stdout, plan.graph)
sys.stdout.flush() | [
"def",
"run",
"(",
"self",
",",
"format",
"=",
"None",
",",
"reduce",
"=",
"False",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"plan",
"=",
"self",
".",
"_generate_plan",
"(",
")",
"if",
"reduce",
":",
"# This will performa a transitive reduction on the underlying",
"# graph, producing less edges. Mostly useful for the \"dot\" format,",
"# when converting to PNG, so it creates a prettier/cleaner",
"# dependency graph.",
"plan",
".",
"graph",
".",
"transitive_reduction",
"(",
")",
"fn",
"=",
"FORMATTERS",
"[",
"format",
"]",
"fn",
"(",
"sys",
".",
"stdout",
",",
"plan",
".",
"graph",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")"
] | 37.8 | 17.866667 |
def accept(self):
"""accept a connection on the host/port to which the socket is bound
.. note::
if there is no connection attempt already queued, this method will
block until a connection is made
:returns:
a two-tuple of ``(socket, address)`` where the socket is connected,
and the address is the ``(ip_address, port)`` of the remote end
"""
with self._registered('re'):
while 1:
try:
client, addr = self._sock.accept()
except socket.error, exc:
if not self._blocking or exc[0] not in _BLOCKING_OP:
raise
sys.exc_clear()
if self._readable.wait(self.gettimeout()):
raise socket.timeout("timed out")
if scheduler.state.interrupted:
raise IOError(errno.EINTR,
"interrupted system call")
continue
return type(self)(fromsock=client), addr | [
"def",
"accept",
"(",
"self",
")",
":",
"with",
"self",
".",
"_registered",
"(",
"'re'",
")",
":",
"while",
"1",
":",
"try",
":",
"client",
",",
"addr",
"=",
"self",
".",
"_sock",
".",
"accept",
"(",
")",
"except",
"socket",
".",
"error",
",",
"exc",
":",
"if",
"not",
"self",
".",
"_blocking",
"or",
"exc",
"[",
"0",
"]",
"not",
"in",
"_BLOCKING_OP",
":",
"raise",
"sys",
".",
"exc_clear",
"(",
")",
"if",
"self",
".",
"_readable",
".",
"wait",
"(",
"self",
".",
"gettimeout",
"(",
")",
")",
":",
"raise",
"socket",
".",
"timeout",
"(",
"\"timed out\"",
")",
"if",
"scheduler",
".",
"state",
".",
"interrupted",
":",
"raise",
"IOError",
"(",
"errno",
".",
"EINTR",
",",
"\"interrupted system call\"",
")",
"continue",
"return",
"type",
"(",
"self",
")",
"(",
"fromsock",
"=",
"client",
")",
",",
"addr"
] | 40.111111 | 19.185185 |
def problem(problem_name, **kwargs):
"""Get possibly copied/reversed problem in `base_registry` or `env_registry`.
Args:
problem_name: string problem name. See `parse_problem_name`.
**kwargs: forwarded to env problem's initialize method.
Returns:
possibly reversed/copied version of base problem registered in the given
registry.
"""
spec = parse_problem_name(problem_name)
try:
return Registries.problems[spec.base_name](
was_copy=spec.was_copy, was_reversed=spec.was_reversed)
except KeyError:
# If name is not found in base problems then try creating an env problem
return env_problem(problem_name, **kwargs) | [
"def",
"problem",
"(",
"problem_name",
",",
"*",
"*",
"kwargs",
")",
":",
"spec",
"=",
"parse_problem_name",
"(",
"problem_name",
")",
"try",
":",
"return",
"Registries",
".",
"problems",
"[",
"spec",
".",
"base_name",
"]",
"(",
"was_copy",
"=",
"spec",
".",
"was_copy",
",",
"was_reversed",
"=",
"spec",
".",
"was_reversed",
")",
"except",
"KeyError",
":",
"# If name is not found in base problems then try creating an env problem",
"return",
"env_problem",
"(",
"problem_name",
",",
"*",
"*",
"kwargs",
")"
] | 35.888889 | 21.222222 |
def print_table(table, title='', delim='|', centering='center', col_padding=2,
header=True, headerchar='-'):
"""Print a table from a list of lists representing the rows of a table.
Parameters
----------
table : list
list of lists, e.g. a table with 3 columns and 2 rows could be
[ ['0,0', '0,1', '0,2'], ['1,0', '1,1', '1,2'] ]
title : string
Printed centered above the table
delim : string
character to delimit columns
centering : {'left', 'right', 'center'}
chooses justification for columns
col_padding : int
number of blank spaces to add to each column
header : {True, False}
Does the first entry of table contain column headers?
headerchar : {string}
character to separate column headers from rest of table
Returns
-------
string representing table that's ready to be printed
Notes
-----
The string for the table will have correctly justified columns
with extra padding added into each column entry to ensure columns align.
The characters to delimit the columns can be user defined. This
should be useful for printing convergence data from tests.
Examples
--------
>>> from pyamg.util.utils import print_table
>>> table = [ ['cos(0)', 'cos(pi/2)', 'cos(pi)'], ['0.0', '1.0', '0.0'] ]
>>> table1 = print_table(table) # string to print
>>> table2 = print_table(table, delim='||')
>>> table3 = print_table(table, headerchar='*')
>>> table4 = print_table(table, col_padding=6, centering='left')
"""
table_str = '\n'
# sometimes, the table will be passed in as (title, table)
if isinstance(table, tuple):
title = table[0]
table = table[1]
# Calculate each column's width
colwidths = []
for i in range(len(table)):
# extend colwidths for row i
for k in range(len(table[i]) - len(colwidths)):
colwidths.append(-1)
# Update colwidths if table[i][j] is wider than colwidth[j]
for j in range(len(table[i])):
if len(table[i][j]) > colwidths[j]:
colwidths[j] = len(table[i][j])
# Factor in extra column padding
for i in range(len(colwidths)):
colwidths[i] += col_padding
# Total table width
ttwidth = sum(colwidths) + len(delim)*(len(colwidths)-1)
# Print Title
if len(title) > 0:
title = title.split("\n")
for i in range(len(title)):
table_str += str.center(title[i], ttwidth) + '\n'
table_str += "\n"
# Choose centering scheme
centering = centering.lower()
if centering == 'center':
centering = str.center
if centering == 'right':
centering = str.rjust
if centering == 'left':
centering = str.ljust
if header:
# Append Column Headers
for elmt, elmtwidth in zip(table[0], colwidths):
table_str += centering(str(elmt), elmtwidth) + delim
if table[0] != []:
table_str = table_str[:-len(delim)] + '\n'
# Append Header Separator
# Total Column Width Total Col Delimiter Widths
if len(headerchar) == 0:
headerchar = ' '
table_str += headerchar *\
int(sp.ceil(float(ttwidth)/float(len(headerchar)))) + '\n'
table = table[1:]
for row in table:
for elmt, elmtwidth in zip(row, colwidths):
table_str += centering(str(elmt), elmtwidth) + delim
if row != []:
table_str = table_str[:-len(delim)] + '\n'
else:
table_str += '\n'
return table_str | [
"def",
"print_table",
"(",
"table",
",",
"title",
"=",
"''",
",",
"delim",
"=",
"'|'",
",",
"centering",
"=",
"'center'",
",",
"col_padding",
"=",
"2",
",",
"header",
"=",
"True",
",",
"headerchar",
"=",
"'-'",
")",
":",
"table_str",
"=",
"'\\n'",
"# sometimes, the table will be passed in as (title, table)",
"if",
"isinstance",
"(",
"table",
",",
"tuple",
")",
":",
"title",
"=",
"table",
"[",
"0",
"]",
"table",
"=",
"table",
"[",
"1",
"]",
"# Calculate each column's width",
"colwidths",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"table",
")",
")",
":",
"# extend colwidths for row i",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"table",
"[",
"i",
"]",
")",
"-",
"len",
"(",
"colwidths",
")",
")",
":",
"colwidths",
".",
"append",
"(",
"-",
"1",
")",
"# Update colwidths if table[i][j] is wider than colwidth[j]",
"for",
"j",
"in",
"range",
"(",
"len",
"(",
"table",
"[",
"i",
"]",
")",
")",
":",
"if",
"len",
"(",
"table",
"[",
"i",
"]",
"[",
"j",
"]",
")",
">",
"colwidths",
"[",
"j",
"]",
":",
"colwidths",
"[",
"j",
"]",
"=",
"len",
"(",
"table",
"[",
"i",
"]",
"[",
"j",
"]",
")",
"# Factor in extra column padding",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"colwidths",
")",
")",
":",
"colwidths",
"[",
"i",
"]",
"+=",
"col_padding",
"# Total table width",
"ttwidth",
"=",
"sum",
"(",
"colwidths",
")",
"+",
"len",
"(",
"delim",
")",
"*",
"(",
"len",
"(",
"colwidths",
")",
"-",
"1",
")",
"# Print Title",
"if",
"len",
"(",
"title",
")",
">",
"0",
":",
"title",
"=",
"title",
".",
"split",
"(",
"\"\\n\"",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"title",
")",
")",
":",
"table_str",
"+=",
"str",
".",
"center",
"(",
"title",
"[",
"i",
"]",
",",
"ttwidth",
")",
"+",
"'\\n'",
"table_str",
"+=",
"\"\\n\"",
"# Choose centering scheme",
"centering",
"=",
"centering",
".",
"lower",
"(",
")",
"if",
"centering",
"==",
"'center'",
":",
"centering",
"=",
"str",
".",
"center",
"if",
"centering",
"==",
"'right'",
":",
"centering",
"=",
"str",
".",
"rjust",
"if",
"centering",
"==",
"'left'",
":",
"centering",
"=",
"str",
".",
"ljust",
"if",
"header",
":",
"# Append Column Headers",
"for",
"elmt",
",",
"elmtwidth",
"in",
"zip",
"(",
"table",
"[",
"0",
"]",
",",
"colwidths",
")",
":",
"table_str",
"+=",
"centering",
"(",
"str",
"(",
"elmt",
")",
",",
"elmtwidth",
")",
"+",
"delim",
"if",
"table",
"[",
"0",
"]",
"!=",
"[",
"]",
":",
"table_str",
"=",
"table_str",
"[",
":",
"-",
"len",
"(",
"delim",
")",
"]",
"+",
"'\\n'",
"# Append Header Separator",
"# Total Column Width Total Col Delimiter Widths",
"if",
"len",
"(",
"headerchar",
")",
"==",
"0",
":",
"headerchar",
"=",
"' '",
"table_str",
"+=",
"headerchar",
"*",
"int",
"(",
"sp",
".",
"ceil",
"(",
"float",
"(",
"ttwidth",
")",
"/",
"float",
"(",
"len",
"(",
"headerchar",
")",
")",
")",
")",
"+",
"'\\n'",
"table",
"=",
"table",
"[",
"1",
":",
"]",
"for",
"row",
"in",
"table",
":",
"for",
"elmt",
",",
"elmtwidth",
"in",
"zip",
"(",
"row",
",",
"colwidths",
")",
":",
"table_str",
"+=",
"centering",
"(",
"str",
"(",
"elmt",
")",
",",
"elmtwidth",
")",
"+",
"delim",
"if",
"row",
"!=",
"[",
"]",
":",
"table_str",
"=",
"table_str",
"[",
":",
"-",
"len",
"(",
"delim",
")",
"]",
"+",
"'\\n'",
"else",
":",
"table_str",
"+=",
"'\\n'",
"return",
"table_str"
] | 32.198198 | 20.216216 |
def login():
'''Log in as administrator
You can use wither basic auth or form based login (via POST).
:param username: The administrator's username
:type username: string
:param password: The administrator's password
:type password: string
'''
username = None
password = None
next = flask.request.args.get('next')
auth = flask.request.authorization
if flask.request.method == 'POST':
username = flask.request.form['username']
password = flask.request.form['password']
if auth and auth.type == 'basic':
username = auth.username
password = auth.password
if not flogin.current_user.is_active:
error = 'You have to login with proper credentials'
if username and password:
if check_auth(username, password):
user = _users.get(username)
if user:
if flogin.login_user(user):
return flask.redirect(next or flask.url_for("user"))
error = 'Could not log in user.'
else:
error = 'User not found.'
else:
error = 'Wrong username or password.'
else:
error = 'No username or password.'
return flask.Response(
'Could not verify your access level for that URL.\n {}'.format(error),
401,
{str('WWW-Authenticate'): str('Basic realm="Login Required"')})
return flask.redirect(next or flask.url_for("user")) | [
"def",
"login",
"(",
")",
":",
"username",
"=",
"None",
"password",
"=",
"None",
"next",
"=",
"flask",
".",
"request",
".",
"args",
".",
"get",
"(",
"'next'",
")",
"auth",
"=",
"flask",
".",
"request",
".",
"authorization",
"if",
"flask",
".",
"request",
".",
"method",
"==",
"'POST'",
":",
"username",
"=",
"flask",
".",
"request",
".",
"form",
"[",
"'username'",
"]",
"password",
"=",
"flask",
".",
"request",
".",
"form",
"[",
"'password'",
"]",
"if",
"auth",
"and",
"auth",
".",
"type",
"==",
"'basic'",
":",
"username",
"=",
"auth",
".",
"username",
"password",
"=",
"auth",
".",
"password",
"if",
"not",
"flogin",
".",
"current_user",
".",
"is_active",
":",
"error",
"=",
"'You have to login with proper credentials'",
"if",
"username",
"and",
"password",
":",
"if",
"check_auth",
"(",
"username",
",",
"password",
")",
":",
"user",
"=",
"_users",
".",
"get",
"(",
"username",
")",
"if",
"user",
":",
"if",
"flogin",
".",
"login_user",
"(",
"user",
")",
":",
"return",
"flask",
".",
"redirect",
"(",
"next",
"or",
"flask",
".",
"url_for",
"(",
"\"user\"",
")",
")",
"error",
"=",
"'Could not log in user.'",
"else",
":",
"error",
"=",
"'User not found.'",
"else",
":",
"error",
"=",
"'Wrong username or password.'",
"else",
":",
"error",
"=",
"'No username or password.'",
"return",
"flask",
".",
"Response",
"(",
"'Could not verify your access level for that URL.\\n {}'",
".",
"format",
"(",
"error",
")",
",",
"401",
",",
"{",
"str",
"(",
"'WWW-Authenticate'",
")",
":",
"str",
"(",
"'Basic realm=\"Login Required\"'",
")",
"}",
")",
"return",
"flask",
".",
"redirect",
"(",
"next",
"or",
"flask",
".",
"url_for",
"(",
"\"user\"",
")",
")"
] | 34.72093 | 17.511628 |
def cov_params(self, r_matrix=None, column=None, scale=None, cov_p=None,
other=None):
"""
Returns the variance/covariance matrix.
The variance/covariance matrix can be of a linear contrast
of the estimates of params or all params multiplied by scale which
will usually be an estimate of sigma^2. Scale is assumed to be
a scalar.
Parameters
----------
r_matrix : array-like
Can be 1d, or 2d. Can be used alone or with other.
column : array-like, optional
Must be used on its own. Can be 0d or 1d see below.
scale : float, optional
Can be specified or not. Default is None, which means that
the scale argument is taken from the model.
other : array-like, optional
Can be used when r_matrix is specified.
Returns
-------
cov : ndarray
covariance matrix of the parameter estimates or of linear
combination of parameter estimates. See Notes.
Notes
-----
(The below are assumed to be in matrix notation.)
If no argument is specified returns the covariance matrix of a model
``(scale)*(X.T X)^(-1)``
If contrast is specified it pre and post-multiplies as follows
``(scale) * r_matrix (X.T X)^(-1) r_matrix.T``
If contrast and other are specified returns
``(scale) * r_matrix (X.T X)^(-1) other.T``
If column is specified returns
``(scale) * (X.T X)^(-1)[column,column]`` if column is 0d
OR
``(scale) * (X.T X)^(-1)[column][:,column]`` if column is 1d
"""
if (hasattr(self, 'mle_settings') and
self.mle_settings['optimizer'] in ['l1', 'l1_cvxopt_cp']):
dot_fun = nan_dot
else:
dot_fun = np.dot
if (cov_p is None and self.normalized_cov_params is None and
not hasattr(self, 'cov_params_default')):
raise ValueError('need covariance of parameters for computing '
'(unnormalized) covariances')
if column is not None and (r_matrix is not None or other is not None):
raise ValueError('Column should be specified without other '
'arguments.')
if other is not None and r_matrix is None:
raise ValueError('other can only be specified with r_matrix')
if cov_p is None:
if hasattr(self, 'cov_params_default'):
cov_p = self.cov_params_default
else:
if scale is None:
scale = self.scale
cov_p = self.normalized_cov_params * scale
if column is not None:
column = np.asarray(column)
if column.shape == ():
return cov_p[column, column]
else:
# return cov_p[column][:, column]
return cov_p[column[:, None], column]
elif r_matrix is not None:
r_matrix = np.asarray(r_matrix)
if r_matrix.shape == ():
raise ValueError("r_matrix should be 1d or 2d")
if other is None:
other = r_matrix
else:
other = np.asarray(other)
tmp = dot_fun(r_matrix, dot_fun(cov_p, np.transpose(other)))
return tmp
else: # if r_matrix is None and column is None:
return cov_p | [
"def",
"cov_params",
"(",
"self",
",",
"r_matrix",
"=",
"None",
",",
"column",
"=",
"None",
",",
"scale",
"=",
"None",
",",
"cov_p",
"=",
"None",
",",
"other",
"=",
"None",
")",
":",
"if",
"(",
"hasattr",
"(",
"self",
",",
"'mle_settings'",
")",
"and",
"self",
".",
"mle_settings",
"[",
"'optimizer'",
"]",
"in",
"[",
"'l1'",
",",
"'l1_cvxopt_cp'",
"]",
")",
":",
"dot_fun",
"=",
"nan_dot",
"else",
":",
"dot_fun",
"=",
"np",
".",
"dot",
"if",
"(",
"cov_p",
"is",
"None",
"and",
"self",
".",
"normalized_cov_params",
"is",
"None",
"and",
"not",
"hasattr",
"(",
"self",
",",
"'cov_params_default'",
")",
")",
":",
"raise",
"ValueError",
"(",
"'need covariance of parameters for computing '",
"'(unnormalized) covariances'",
")",
"if",
"column",
"is",
"not",
"None",
"and",
"(",
"r_matrix",
"is",
"not",
"None",
"or",
"other",
"is",
"not",
"None",
")",
":",
"raise",
"ValueError",
"(",
"'Column should be specified without other '",
"'arguments.'",
")",
"if",
"other",
"is",
"not",
"None",
"and",
"r_matrix",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'other can only be specified with r_matrix'",
")",
"if",
"cov_p",
"is",
"None",
":",
"if",
"hasattr",
"(",
"self",
",",
"'cov_params_default'",
")",
":",
"cov_p",
"=",
"self",
".",
"cov_params_default",
"else",
":",
"if",
"scale",
"is",
"None",
":",
"scale",
"=",
"self",
".",
"scale",
"cov_p",
"=",
"self",
".",
"normalized_cov_params",
"*",
"scale",
"if",
"column",
"is",
"not",
"None",
":",
"column",
"=",
"np",
".",
"asarray",
"(",
"column",
")",
"if",
"column",
".",
"shape",
"==",
"(",
")",
":",
"return",
"cov_p",
"[",
"column",
",",
"column",
"]",
"else",
":",
"# return cov_p[column][:, column]",
"return",
"cov_p",
"[",
"column",
"[",
":",
",",
"None",
"]",
",",
"column",
"]",
"elif",
"r_matrix",
"is",
"not",
"None",
":",
"r_matrix",
"=",
"np",
".",
"asarray",
"(",
"r_matrix",
")",
"if",
"r_matrix",
".",
"shape",
"==",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"r_matrix should be 1d or 2d\"",
")",
"if",
"other",
"is",
"None",
":",
"other",
"=",
"r_matrix",
"else",
":",
"other",
"=",
"np",
".",
"asarray",
"(",
"other",
")",
"tmp",
"=",
"dot_fun",
"(",
"r_matrix",
",",
"dot_fun",
"(",
"cov_p",
",",
"np",
".",
"transpose",
"(",
"other",
")",
")",
")",
"return",
"tmp",
"else",
":",
"# if r_matrix is None and column is None:",
"return",
"cov_p"
] | 42.17284 | 17.901235 |
def analysis(self):
"""Return an AnalysisPartition proxy, which wraps this partition to provide acess to
dataframes, shapely shapes and other analysis services"""
if isinstance(self, PartitionProxy):
return AnalysisPartition(self._obj)
else:
return AnalysisPartition(self) | [
"def",
"analysis",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
",",
"PartitionProxy",
")",
":",
"return",
"AnalysisPartition",
"(",
"self",
".",
"_obj",
")",
"else",
":",
"return",
"AnalysisPartition",
"(",
"self",
")"
] | 46 | 8.714286 |
def temporal_segmentation(segments, min_time):
""" Segments based on time distant points
Args:
segments (:obj:`list` of :obj:`list` of :obj:`Point`): segment points
min_time (int): minimum required time for segmentation
"""
final_segments = []
for segment in segments:
final_segments.append([])
for point in segment:
if point.dt > min_time:
final_segments.append([])
final_segments[-1].append(point)
return final_segments | [
"def",
"temporal_segmentation",
"(",
"segments",
",",
"min_time",
")",
":",
"final_segments",
"=",
"[",
"]",
"for",
"segment",
"in",
"segments",
":",
"final_segments",
".",
"append",
"(",
"[",
"]",
")",
"for",
"point",
"in",
"segment",
":",
"if",
"point",
".",
"dt",
">",
"min_time",
":",
"final_segments",
".",
"append",
"(",
"[",
"]",
")",
"final_segments",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"point",
")",
"return",
"final_segments"
] | 31.5 | 15.5 |
def get_tags(filesystemid,
keyid=None,
key=None,
profile=None,
region=None,
**kwargs):
'''
Return the tags associated with an EFS instance.
filesystemid
(string) - ID of the file system whose tags to list
returns
(list) - list of tags as key/value pairs
CLI Example:
.. code-block:: bash
salt 'my-minion' boto_efs.get_tags efs-id
'''
client = _get_conn(key=key, keyid=keyid, profile=profile, region=region)
response = client.describe_tags(FileSystemId=filesystemid)
result = response["Tags"]
while "NextMarker" in response:
response = client.describe_tags(FileSystemId=filesystemid,
Marker=response["NextMarker"])
result.extend(response["Tags"])
return result | [
"def",
"get_tags",
"(",
"filesystemid",
",",
"keyid",
"=",
"None",
",",
"key",
"=",
"None",
",",
"profile",
"=",
"None",
",",
"region",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"client",
"=",
"_get_conn",
"(",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
",",
"region",
"=",
"region",
")",
"response",
"=",
"client",
".",
"describe_tags",
"(",
"FileSystemId",
"=",
"filesystemid",
")",
"result",
"=",
"response",
"[",
"\"Tags\"",
"]",
"while",
"\"NextMarker\"",
"in",
"response",
":",
"response",
"=",
"client",
".",
"describe_tags",
"(",
"FileSystemId",
"=",
"filesystemid",
",",
"Marker",
"=",
"response",
"[",
"\"NextMarker\"",
"]",
")",
"result",
".",
"extend",
"(",
"response",
"[",
"\"Tags\"",
"]",
")",
"return",
"result"
] | 26.741935 | 23.709677 |
def project_create_func(self, proj_id, proj=None):
"""Create project given project uuid"""
if self.get_project_name(proj_id):
LOG.info("project %s exists, returning", proj_id)
return
if not proj:
try:
proj = self.keystone_event._service.projects.get(proj_id)
except Exception:
LOG.error("Failed to find project %s.", proj_id)
return
# In the project name, dci_id may be included. Check if this is the
# case and extact the dci_id from the name, and provide dci_id when
# creating the project.
proj_name, dci_id = self._get_dci_id_and_proj_name(proj.name)
if proj_name in reserved_project_name:
proj_name = "_".join((proj_name, self.cfg.dcnm.orchestrator_id))
# The default partition name is 'os' (i.e. openstack) which reflects
# it is created by openstack.
part_name = self.cfg.dcnm.default_partition_name
if len(':'.join((proj_name, part_name))) > 32:
LOG.error('Invalid project name length: %s. The length of '
'org:part name is greater than 32',
len(':'.join((proj_name, part_name))))
return
try:
self.dcnm_client.create_project(self.cfg.dcnm.orchestrator_id,
proj_name, part_name, dci_id,
proj.description)
except dexc.DfaClientRequestFailed:
# Failed to send create project in DCNM.
# Save the info and mark it as failure and retry it later.
self.update_project_info_cache(proj_id, name=proj_name,
dci_id=dci_id,
result=constants.CREATE_FAIL)
LOG.error("Failed to create project %s on DCNM.", proj_name)
else:
self.update_project_info_cache(proj_id, name=proj_name,
dci_id=dci_id)
LOG.debug('project %(name)s %(dci)s %(desc)s', (
{'name': proj_name, 'dci': dci_id, 'desc': proj.description}))
self.project_create_notif(proj_id, proj_name) | [
"def",
"project_create_func",
"(",
"self",
",",
"proj_id",
",",
"proj",
"=",
"None",
")",
":",
"if",
"self",
".",
"get_project_name",
"(",
"proj_id",
")",
":",
"LOG",
".",
"info",
"(",
"\"project %s exists, returning\"",
",",
"proj_id",
")",
"return",
"if",
"not",
"proj",
":",
"try",
":",
"proj",
"=",
"self",
".",
"keystone_event",
".",
"_service",
".",
"projects",
".",
"get",
"(",
"proj_id",
")",
"except",
"Exception",
":",
"LOG",
".",
"error",
"(",
"\"Failed to find project %s.\"",
",",
"proj_id",
")",
"return",
"# In the project name, dci_id may be included. Check if this is the",
"# case and extact the dci_id from the name, and provide dci_id when",
"# creating the project.",
"proj_name",
",",
"dci_id",
"=",
"self",
".",
"_get_dci_id_and_proj_name",
"(",
"proj",
".",
"name",
")",
"if",
"proj_name",
"in",
"reserved_project_name",
":",
"proj_name",
"=",
"\"_\"",
".",
"join",
"(",
"(",
"proj_name",
",",
"self",
".",
"cfg",
".",
"dcnm",
".",
"orchestrator_id",
")",
")",
"# The default partition name is 'os' (i.e. openstack) which reflects",
"# it is created by openstack.",
"part_name",
"=",
"self",
".",
"cfg",
".",
"dcnm",
".",
"default_partition_name",
"if",
"len",
"(",
"':'",
".",
"join",
"(",
"(",
"proj_name",
",",
"part_name",
")",
")",
")",
">",
"32",
":",
"LOG",
".",
"error",
"(",
"'Invalid project name length: %s. The length of '",
"'org:part name is greater than 32'",
",",
"len",
"(",
"':'",
".",
"join",
"(",
"(",
"proj_name",
",",
"part_name",
")",
")",
")",
")",
"return",
"try",
":",
"self",
".",
"dcnm_client",
".",
"create_project",
"(",
"self",
".",
"cfg",
".",
"dcnm",
".",
"orchestrator_id",
",",
"proj_name",
",",
"part_name",
",",
"dci_id",
",",
"proj",
".",
"description",
")",
"except",
"dexc",
".",
"DfaClientRequestFailed",
":",
"# Failed to send create project in DCNM.",
"# Save the info and mark it as failure and retry it later.",
"self",
".",
"update_project_info_cache",
"(",
"proj_id",
",",
"name",
"=",
"proj_name",
",",
"dci_id",
"=",
"dci_id",
",",
"result",
"=",
"constants",
".",
"CREATE_FAIL",
")",
"LOG",
".",
"error",
"(",
"\"Failed to create project %s on DCNM.\"",
",",
"proj_name",
")",
"else",
":",
"self",
".",
"update_project_info_cache",
"(",
"proj_id",
",",
"name",
"=",
"proj_name",
",",
"dci_id",
"=",
"dci_id",
")",
"LOG",
".",
"debug",
"(",
"'project %(name)s %(dci)s %(desc)s'",
",",
"(",
"{",
"'name'",
":",
"proj_name",
",",
"'dci'",
":",
"dci_id",
",",
"'desc'",
":",
"proj",
".",
"description",
"}",
")",
")",
"self",
".",
"project_create_notif",
"(",
"proj_id",
",",
"proj_name",
")"
] | 49.422222 | 22.777778 |
def set_all_attribute_values(self, value):
"""
sets all the attribute values to the value and propagate to any children
"""
for attribute_name, type_instance in inspect.getmembers(self):
if attribute_name.startswith('__') or inspect.ismethod(type_instance):
# Ignore parameters with __ and if they are methods
continue
if isinstance(type_instance, bool):
self.__dict__[attribute_name] = value
elif isinstance(type_instance, self.__class__):
type_instance.set_all_attribute_values(value) | [
"def",
"set_all_attribute_values",
"(",
"self",
",",
"value",
")",
":",
"for",
"attribute_name",
",",
"type_instance",
"in",
"inspect",
".",
"getmembers",
"(",
"self",
")",
":",
"if",
"attribute_name",
".",
"startswith",
"(",
"'__'",
")",
"or",
"inspect",
".",
"ismethod",
"(",
"type_instance",
")",
":",
"# Ignore parameters with __ and if they are methods",
"continue",
"if",
"isinstance",
"(",
"type_instance",
",",
"bool",
")",
":",
"self",
".",
"__dict__",
"[",
"attribute_name",
"]",
"=",
"value",
"elif",
"isinstance",
"(",
"type_instance",
",",
"self",
".",
"__class__",
")",
":",
"type_instance",
".",
"set_all_attribute_values",
"(",
"value",
")"
] | 40.466667 | 22.466667 |
def parse_storage_size(storage_size):
"""
Parses an expression that represents an amount of storage/memory and returns the number of bytes it represents.
Args:
storage_size(str): Size in bytes. The units ``k`` (kibibytes), ``m`` (mebibytes) and ``g``
(gibibytes) are supported, i.e. a ``partition_size`` of ``1g`` equates :math:`2^{30}` bytes.
Returns:
int: Number of bytes.
"""
pattern = re.compile(r'^([0-9]+(\.[0-9]+)?)([gmk])?$', re.I)
units = {
'k': 1024,
'm': 1024 * 1024,
'g': 1024 * 1024 * 1024
}
match = pattern.fullmatch(str(storage_size))
if match is None:
raise ValueError('Invalid partition size: {0}'.format(storage_size))
groups = match.groups()
# no units
if groups[2] is None:
# silently dropping the float, because byte is the smallest unit)
return int(float(groups[0]))
return int(float(groups[0]) * units[groups[2].lower()]) | [
"def",
"parse_storage_size",
"(",
"storage_size",
")",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"r'^([0-9]+(\\.[0-9]+)?)([gmk])?$'",
",",
"re",
".",
"I",
")",
"units",
"=",
"{",
"'k'",
":",
"1024",
",",
"'m'",
":",
"1024",
"*",
"1024",
",",
"'g'",
":",
"1024",
"*",
"1024",
"*",
"1024",
"}",
"match",
"=",
"pattern",
".",
"fullmatch",
"(",
"str",
"(",
"storage_size",
")",
")",
"if",
"match",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Invalid partition size: {0}'",
".",
"format",
"(",
"storage_size",
")",
")",
"groups",
"=",
"match",
".",
"groups",
"(",
")",
"# no units",
"if",
"groups",
"[",
"2",
"]",
"is",
"None",
":",
"# silently dropping the float, because byte is the smallest unit)",
"return",
"int",
"(",
"float",
"(",
"groups",
"[",
"0",
"]",
")",
")",
"return",
"int",
"(",
"float",
"(",
"groups",
"[",
"0",
"]",
")",
"*",
"units",
"[",
"groups",
"[",
"2",
"]",
".",
"lower",
"(",
")",
"]",
")"
] | 30.25 | 28.4375 |
def route(self, req, node, path):
'''
Looks up a controller from a node based upon the specified path.
:param node: The node, such as a root controller object.
:param path: The path to look up on this node.
'''
path = path.split('/')[1:]
try:
node, remainder = lookup_controller(node, path, req)
return node, remainder
except NonCanonicalPath as e:
if self.force_canonical and \
not _cfg(e.controller).get('accept_noncanonical', False):
if req.method == 'POST':
raise RuntimeError(
"You have POSTed to a URL '%s' which "
"requires a slash. Most browsers will not maintain "
"POST data when redirected. Please update your code "
"to POST to '%s/' or set force_canonical to False" %
(req.pecan['routing_path'],
req.pecan['routing_path'])
)
redirect(code=302, add_slash=True, request=req)
return e.controller, e.remainder | [
"def",
"route",
"(",
"self",
",",
"req",
",",
"node",
",",
"path",
")",
":",
"path",
"=",
"path",
".",
"split",
"(",
"'/'",
")",
"[",
"1",
":",
"]",
"try",
":",
"node",
",",
"remainder",
"=",
"lookup_controller",
"(",
"node",
",",
"path",
",",
"req",
")",
"return",
"node",
",",
"remainder",
"except",
"NonCanonicalPath",
"as",
"e",
":",
"if",
"self",
".",
"force_canonical",
"and",
"not",
"_cfg",
"(",
"e",
".",
"controller",
")",
".",
"get",
"(",
"'accept_noncanonical'",
",",
"False",
")",
":",
"if",
"req",
".",
"method",
"==",
"'POST'",
":",
"raise",
"RuntimeError",
"(",
"\"You have POSTed to a URL '%s' which \"",
"\"requires a slash. Most browsers will not maintain \"",
"\"POST data when redirected. Please update your code \"",
"\"to POST to '%s/' or set force_canonical to False\"",
"%",
"(",
"req",
".",
"pecan",
"[",
"'routing_path'",
"]",
",",
"req",
".",
"pecan",
"[",
"'routing_path'",
"]",
")",
")",
"redirect",
"(",
"code",
"=",
"302",
",",
"add_slash",
"=",
"True",
",",
"request",
"=",
"req",
")",
"return",
"e",
".",
"controller",
",",
"e",
".",
"remainder"
] | 45.88 | 19.32 |
def set_regs(self):
''' setting unicorn registers '''
uc = self.uc
if self.state.arch.qemu_name == 'x86_64':
fs = self.state.solver.eval(self.state.regs.fs)
gs = self.state.solver.eval(self.state.regs.gs)
self.write_msr(fs, 0xC0000100)
self.write_msr(gs, 0xC0000101)
flags = self._process_value(self.state.regs.eflags, 'reg')
if flags is None:
raise SimValueError('symbolic eflags')
uc.reg_write(self._uc_const.UC_X86_REG_EFLAGS, self.state.solver.eval(flags))
elif self.state.arch.qemu_name == 'i386':
flags = self._process_value(self.state.regs.eflags, 'reg')
if flags is None:
raise SimValueError('symbolic eflags')
uc.reg_write(self._uc_const.UC_X86_REG_EFLAGS, self.state.solver.eval(flags))
fs = self.state.solver.eval(self.state.regs.fs) << 16
gs = self.state.solver.eval(self.state.regs.gs) << 16
self.setup_gdt(fs, gs)
for r, c in self._uc_regs.items():
if r in self.reg_blacklist:
continue
v = self._process_value(getattr(self.state.regs, r), 'reg')
if v is None:
raise SimValueError('setting a symbolic register')
# l.debug('setting $%s = %#x', r, self.state.solver.eval(v))
uc.reg_write(c, self.state.solver.eval(v))
if self.state.arch.name in ('X86', 'AMD64'):
# sync the fp clerical data
c3210 = self.state.solver.eval(self.state.regs.fc3210)
top = self.state.solver.eval(self.state.regs.ftop[2:0])
rm = self.state.solver.eval(self.state.regs.fpround[1:0])
control = 0x037F | (rm << 10)
status = (top << 11) | c3210
uc.reg_write(unicorn.x86_const.UC_X86_REG_FPCW, control)
uc.reg_write(unicorn.x86_const.UC_X86_REG_FPSW, status)
# we gotta convert the 64-bit doubles values to 80-bit extended precision!
uc_offset = unicorn.x86_const.UC_X86_REG_FP0
vex_offset = self.state.arch.registers['fpu_regs'][0]
vex_tag_offset = self.state.arch.registers['fpu_tags'][0]
tag_word = 0
for _ in range(8):
tag = self.state.solver.eval(self.state.registers.load(vex_tag_offset, size=1))
tag_word <<= 2
if tag == 0:
tag_word |= 3 # unicorn doesn't care about any value other than 3 for setting
else:
val = self._process_value(self.state.registers.load(vex_offset, size=8), 'reg')
if val is None:
raise SimValueError('setting a symbolic fp register')
val = self.state.solver.eval(val)
sign = bool(val & 0x8000000000000000)
exponent = (val & 0x7FF0000000000000) >> 52
mantissa = val & 0x000FFFFFFFFFFFFF
if exponent not in (0, 0x7FF): # normal value
exponent = exponent - 1023 + 16383
mantissa <<= 11
mantissa |= 0x8000000000000000 # set integer part bit, implicit to double
elif exponent == 0: # zero or subnormal value
mantissa = 0
elif exponent == 0x7FF: # nan or infinity
exponent = 0x7FFF
if mantissa != 0:
mantissa = 0x8000000000000000
else:
mantissa = 0xFFFFFFFFFFFFFFFF
if sign:
exponent |= 0x8000
uc.reg_write(uc_offset, (exponent, mantissa))
uc_offset += 1
vex_offset += 8
vex_tag_offset += 1
uc.reg_write(unicorn.x86_const.UC_X86_REG_FPTAG, tag_word) | [
"def",
"set_regs",
"(",
"self",
")",
":",
"uc",
"=",
"self",
".",
"uc",
"if",
"self",
".",
"state",
".",
"arch",
".",
"qemu_name",
"==",
"'x86_64'",
":",
"fs",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"fs",
")",
"gs",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"gs",
")",
"self",
".",
"write_msr",
"(",
"fs",
",",
"0xC0000100",
")",
"self",
".",
"write_msr",
"(",
"gs",
",",
"0xC0000101",
")",
"flags",
"=",
"self",
".",
"_process_value",
"(",
"self",
".",
"state",
".",
"regs",
".",
"eflags",
",",
"'reg'",
")",
"if",
"flags",
"is",
"None",
":",
"raise",
"SimValueError",
"(",
"'symbolic eflags'",
")",
"uc",
".",
"reg_write",
"(",
"self",
".",
"_uc_const",
".",
"UC_X86_REG_EFLAGS",
",",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"flags",
")",
")",
"elif",
"self",
".",
"state",
".",
"arch",
".",
"qemu_name",
"==",
"'i386'",
":",
"flags",
"=",
"self",
".",
"_process_value",
"(",
"self",
".",
"state",
".",
"regs",
".",
"eflags",
",",
"'reg'",
")",
"if",
"flags",
"is",
"None",
":",
"raise",
"SimValueError",
"(",
"'symbolic eflags'",
")",
"uc",
".",
"reg_write",
"(",
"self",
".",
"_uc_const",
".",
"UC_X86_REG_EFLAGS",
",",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"flags",
")",
")",
"fs",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"fs",
")",
"<<",
"16",
"gs",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"gs",
")",
"<<",
"16",
"self",
".",
"setup_gdt",
"(",
"fs",
",",
"gs",
")",
"for",
"r",
",",
"c",
"in",
"self",
".",
"_uc_regs",
".",
"items",
"(",
")",
":",
"if",
"r",
"in",
"self",
".",
"reg_blacklist",
":",
"continue",
"v",
"=",
"self",
".",
"_process_value",
"(",
"getattr",
"(",
"self",
".",
"state",
".",
"regs",
",",
"r",
")",
",",
"'reg'",
")",
"if",
"v",
"is",
"None",
":",
"raise",
"SimValueError",
"(",
"'setting a symbolic register'",
")",
"# l.debug('setting $%s = %#x', r, self.state.solver.eval(v))",
"uc",
".",
"reg_write",
"(",
"c",
",",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"v",
")",
")",
"if",
"self",
".",
"state",
".",
"arch",
".",
"name",
"in",
"(",
"'X86'",
",",
"'AMD64'",
")",
":",
"# sync the fp clerical data",
"c3210",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"fc3210",
")",
"top",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"ftop",
"[",
"2",
":",
"0",
"]",
")",
"rm",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"regs",
".",
"fpround",
"[",
"1",
":",
"0",
"]",
")",
"control",
"=",
"0x037F",
"|",
"(",
"rm",
"<<",
"10",
")",
"status",
"=",
"(",
"top",
"<<",
"11",
")",
"|",
"c3210",
"uc",
".",
"reg_write",
"(",
"unicorn",
".",
"x86_const",
".",
"UC_X86_REG_FPCW",
",",
"control",
")",
"uc",
".",
"reg_write",
"(",
"unicorn",
".",
"x86_const",
".",
"UC_X86_REG_FPSW",
",",
"status",
")",
"# we gotta convert the 64-bit doubles values to 80-bit extended precision!",
"uc_offset",
"=",
"unicorn",
".",
"x86_const",
".",
"UC_X86_REG_FP0",
"vex_offset",
"=",
"self",
".",
"state",
".",
"arch",
".",
"registers",
"[",
"'fpu_regs'",
"]",
"[",
"0",
"]",
"vex_tag_offset",
"=",
"self",
".",
"state",
".",
"arch",
".",
"registers",
"[",
"'fpu_tags'",
"]",
"[",
"0",
"]",
"tag_word",
"=",
"0",
"for",
"_",
"in",
"range",
"(",
"8",
")",
":",
"tag",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"self",
".",
"state",
".",
"registers",
".",
"load",
"(",
"vex_tag_offset",
",",
"size",
"=",
"1",
")",
")",
"tag_word",
"<<=",
"2",
"if",
"tag",
"==",
"0",
":",
"tag_word",
"|=",
"3",
"# unicorn doesn't care about any value other than 3 for setting",
"else",
":",
"val",
"=",
"self",
".",
"_process_value",
"(",
"self",
".",
"state",
".",
"registers",
".",
"load",
"(",
"vex_offset",
",",
"size",
"=",
"8",
")",
",",
"'reg'",
")",
"if",
"val",
"is",
"None",
":",
"raise",
"SimValueError",
"(",
"'setting a symbolic fp register'",
")",
"val",
"=",
"self",
".",
"state",
".",
"solver",
".",
"eval",
"(",
"val",
")",
"sign",
"=",
"bool",
"(",
"val",
"&",
"0x8000000000000000",
")",
"exponent",
"=",
"(",
"val",
"&",
"0x7FF0000000000000",
")",
">>",
"52",
"mantissa",
"=",
"val",
"&",
"0x000FFFFFFFFFFFFF",
"if",
"exponent",
"not",
"in",
"(",
"0",
",",
"0x7FF",
")",
":",
"# normal value",
"exponent",
"=",
"exponent",
"-",
"1023",
"+",
"16383",
"mantissa",
"<<=",
"11",
"mantissa",
"|=",
"0x8000000000000000",
"# set integer part bit, implicit to double",
"elif",
"exponent",
"==",
"0",
":",
"# zero or subnormal value",
"mantissa",
"=",
"0",
"elif",
"exponent",
"==",
"0x7FF",
":",
"# nan or infinity",
"exponent",
"=",
"0x7FFF",
"if",
"mantissa",
"!=",
"0",
":",
"mantissa",
"=",
"0x8000000000000000",
"else",
":",
"mantissa",
"=",
"0xFFFFFFFFFFFFFFFF",
"if",
"sign",
":",
"exponent",
"|=",
"0x8000",
"uc",
".",
"reg_write",
"(",
"uc_offset",
",",
"(",
"exponent",
",",
"mantissa",
")",
")",
"uc_offset",
"+=",
"1",
"vex_offset",
"+=",
"8",
"vex_tag_offset",
"+=",
"1",
"uc",
".",
"reg_write",
"(",
"unicorn",
".",
"x86_const",
".",
"UC_X86_REG_FPTAG",
",",
"tag_word",
")"
] | 46.305882 | 21.976471 |
def get_all_items_of_invoice(self, invoice_id):
"""
Get all items of invoice
This will iterate over all pages until it gets all elements.
So if the rate limit exceeded it will throw an Exception and you will get nothing
:param invoice_id: the invoice id
:return: list
"""
return self._iterate_through_pages(
get_function=self.get_items_of_invoice_per_page,
resource=INVOICE_ITEMS,
**{'invoice_id': invoice_id}
) | [
"def",
"get_all_items_of_invoice",
"(",
"self",
",",
"invoice_id",
")",
":",
"return",
"self",
".",
"_iterate_through_pages",
"(",
"get_function",
"=",
"self",
".",
"get_items_of_invoice_per_page",
",",
"resource",
"=",
"INVOICE_ITEMS",
",",
"*",
"*",
"{",
"'invoice_id'",
":",
"invoice_id",
"}",
")"
] | 36.214286 | 15.071429 |
def call():
"""Execute command line helper."""
args = get_arguments()
# Set up logging
if args.debug:
log_level = logging.DEBUG
elif args.quiet:
log_level = logging.WARN
else:
log_level = logging.INFO
setup_logging(log_level)
skybell = None
try:
# Create skybellpy instance.
skybell = skybellpy.Skybell(username=args.username,
password=args.password,
get_devices=True)
# # Set setting
# for setting in args.set or []:
# keyval = setting.split("=")
# if skybell.set_setting(keyval[0], keyval[1]):
# _LOGGER.info("Setting %s changed to %s", keyval[0], keyval[1])
# Output Json
for device_id in args.json or []:
device = skybell.get_device(device_id)
if device:
# pylint: disable=protected-access
_LOGGER.info(device_id + " JSON:\n" +
json.dumps(device._device_json, sort_keys=True,
indent=4, separators=(',', ': ')))
else:
_LOGGER.warning("Could not find device with id: %s", device_id)
# Print
def _device_print(dev, append=''):
_LOGGER.info("%s%s",
dev.desc, append)
# Print out all devices.
if args.devices:
for device in skybell.get_devices():
_device_print(device)
# Print out specific devices by device id.
if args.device:
for device_id in args.device:
device = skybell.get_device(device_id)
if device:
_device_print(device)
else:
_LOGGER.warning(
"Could not find device with id: %s", device_id)
# Print out last motion event
if args.activity_json:
for device_id in args.activity_json:
device = skybell.get_device(device_id)
if device:
_LOGGER.info(device.latest(CONST.EVENT_MOTION))
else:
_LOGGER.warning(
"Could not find device with id: %s", device_id)
# Print out avatar image
if args.avatar_image:
for device_id in args.avatar_image:
device = skybell.get_device(device_id)
if device:
_LOGGER.info(device.image)
else:
_LOGGER.warning(
"Could not find device with id: %s", device_id)
# Print out last motion event image
if args.activity_image:
for device_id in args.activity_image:
device = skybell.get_device(device_id)
if device:
_LOGGER.info(device.activity_image)
else:
_LOGGER.warning(
"Could not find device with id: %s", device_id)
except SkybellException as exc:
_LOGGER.error(exc) | [
"def",
"call",
"(",
")",
":",
"args",
"=",
"get_arguments",
"(",
")",
"# Set up logging",
"if",
"args",
".",
"debug",
":",
"log_level",
"=",
"logging",
".",
"DEBUG",
"elif",
"args",
".",
"quiet",
":",
"log_level",
"=",
"logging",
".",
"WARN",
"else",
":",
"log_level",
"=",
"logging",
".",
"INFO",
"setup_logging",
"(",
"log_level",
")",
"skybell",
"=",
"None",
"try",
":",
"# Create skybellpy instance.",
"skybell",
"=",
"skybellpy",
".",
"Skybell",
"(",
"username",
"=",
"args",
".",
"username",
",",
"password",
"=",
"args",
".",
"password",
",",
"get_devices",
"=",
"True",
")",
"# # Set setting",
"# for setting in args.set or []:",
"# keyval = setting.split(\"=\")",
"# if skybell.set_setting(keyval[0], keyval[1]):",
"# _LOGGER.info(\"Setting %s changed to %s\", keyval[0], keyval[1])",
"# Output Json",
"for",
"device_id",
"in",
"args",
".",
"json",
"or",
"[",
"]",
":",
"device",
"=",
"skybell",
".",
"get_device",
"(",
"device_id",
")",
"if",
"device",
":",
"# pylint: disable=protected-access",
"_LOGGER",
".",
"info",
"(",
"device_id",
"+",
"\" JSON:\\n\"",
"+",
"json",
".",
"dumps",
"(",
"device",
".",
"_device_json",
",",
"sort_keys",
"=",
"True",
",",
"indent",
"=",
"4",
",",
"separators",
"=",
"(",
"','",
",",
"': '",
")",
")",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not find device with id: %s\"",
",",
"device_id",
")",
"# Print",
"def",
"_device_print",
"(",
"dev",
",",
"append",
"=",
"''",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"%s%s\"",
",",
"dev",
".",
"desc",
",",
"append",
")",
"# Print out all devices.",
"if",
"args",
".",
"devices",
":",
"for",
"device",
"in",
"skybell",
".",
"get_devices",
"(",
")",
":",
"_device_print",
"(",
"device",
")",
"# Print out specific devices by device id.",
"if",
"args",
".",
"device",
":",
"for",
"device_id",
"in",
"args",
".",
"device",
":",
"device",
"=",
"skybell",
".",
"get_device",
"(",
"device_id",
")",
"if",
"device",
":",
"_device_print",
"(",
"device",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not find device with id: %s\"",
",",
"device_id",
")",
"# Print out last motion event",
"if",
"args",
".",
"activity_json",
":",
"for",
"device_id",
"in",
"args",
".",
"activity_json",
":",
"device",
"=",
"skybell",
".",
"get_device",
"(",
"device_id",
")",
"if",
"device",
":",
"_LOGGER",
".",
"info",
"(",
"device",
".",
"latest",
"(",
"CONST",
".",
"EVENT_MOTION",
")",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not find device with id: %s\"",
",",
"device_id",
")",
"# Print out avatar image",
"if",
"args",
".",
"avatar_image",
":",
"for",
"device_id",
"in",
"args",
".",
"avatar_image",
":",
"device",
"=",
"skybell",
".",
"get_device",
"(",
"device_id",
")",
"if",
"device",
":",
"_LOGGER",
".",
"info",
"(",
"device",
".",
"image",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not find device with id: %s\"",
",",
"device_id",
")",
"# Print out last motion event image",
"if",
"args",
".",
"activity_image",
":",
"for",
"device_id",
"in",
"args",
".",
"activity_image",
":",
"device",
"=",
"skybell",
".",
"get_device",
"(",
"device_id",
")",
"if",
"device",
":",
"_LOGGER",
".",
"info",
"(",
"device",
".",
"activity_image",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not find device with id: %s\"",
",",
"device_id",
")",
"except",
"SkybellException",
"as",
"exc",
":",
"_LOGGER",
".",
"error",
"(",
"exc",
")"
] | 31.666667 | 19.229167 |
def implements(obj, protocol):
"""Does the object 'obj' implement the 'prococol'?"""
if isinstance(obj, type):
raise TypeError("First argument to implements must be an instance. "
"Got %r." % obj)
return isinstance(obj, protocol) or issubclass(AnyType, protocol) | [
"def",
"implements",
"(",
"obj",
",",
"protocol",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"type",
")",
":",
"raise",
"TypeError",
"(",
"\"First argument to implements must be an instance. \"",
"\"Got %r.\"",
"%",
"obj",
")",
"return",
"isinstance",
"(",
"obj",
",",
"protocol",
")",
"or",
"issubclass",
"(",
"AnyType",
",",
"protocol",
")"
] | 50.166667 | 14.333333 |
def pretty_list(rtlst, header, sortBy=0, borders=False):
"""Pretty list to fit the terminal, and add header"""
if borders:
_space = "|"
else:
_space = " "
# Windows has a fat terminal border
_spacelen = len(_space) * (len(header) - 1) + (10 if WINDOWS else 0)
_croped = False
# Sort correctly
rtlst.sort(key=lambda x: x[sortBy])
# Append tag
rtlst = header + rtlst
# Detect column's width
colwidth = [max([len(y) for y in x]) for x in zip(*rtlst)]
# Make text fit in box (if required)
width = get_terminal_width()
if conf.auto_crop_tables and width:
width = width - _spacelen
while sum(colwidth) > width:
_croped = True
# Needs to be cropped
# Get the longest row
i = colwidth.index(max(colwidth))
# Get all elements of this row
row = [len(x[i]) for x in rtlst]
# Get biggest element of this row: biggest of the array
j = row.index(max(row))
# Re-build column tuple with the edited element
t = list(rtlst[j])
t[i] = t[i][:-2] + "_"
rtlst[j] = tuple(t)
# Update max size
row[j] = len(t[i])
colwidth[i] = max(row)
if _croped:
log_runtime.info("Table cropped to fit the terminal (conf.auto_crop_tables==True)") # noqa: E501
# Generate padding scheme
fmt = _space.join(["%%-%ds" % x for x in colwidth])
# Append separation line if needed
if borders:
rtlst.insert(1, tuple("-" * x for x in colwidth))
# Compile
rt = "\n".join(((fmt % x).strip() for x in rtlst))
return rt | [
"def",
"pretty_list",
"(",
"rtlst",
",",
"header",
",",
"sortBy",
"=",
"0",
",",
"borders",
"=",
"False",
")",
":",
"if",
"borders",
":",
"_space",
"=",
"\"|\"",
"else",
":",
"_space",
"=",
"\" \"",
"# Windows has a fat terminal border",
"_spacelen",
"=",
"len",
"(",
"_space",
")",
"*",
"(",
"len",
"(",
"header",
")",
"-",
"1",
")",
"+",
"(",
"10",
"if",
"WINDOWS",
"else",
"0",
")",
"_croped",
"=",
"False",
"# Sort correctly",
"rtlst",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"sortBy",
"]",
")",
"# Append tag",
"rtlst",
"=",
"header",
"+",
"rtlst",
"# Detect column's width",
"colwidth",
"=",
"[",
"max",
"(",
"[",
"len",
"(",
"y",
")",
"for",
"y",
"in",
"x",
"]",
")",
"for",
"x",
"in",
"zip",
"(",
"*",
"rtlst",
")",
"]",
"# Make text fit in box (if required)",
"width",
"=",
"get_terminal_width",
"(",
")",
"if",
"conf",
".",
"auto_crop_tables",
"and",
"width",
":",
"width",
"=",
"width",
"-",
"_spacelen",
"while",
"sum",
"(",
"colwidth",
")",
">",
"width",
":",
"_croped",
"=",
"True",
"# Needs to be cropped",
"# Get the longest row",
"i",
"=",
"colwidth",
".",
"index",
"(",
"max",
"(",
"colwidth",
")",
")",
"# Get all elements of this row",
"row",
"=",
"[",
"len",
"(",
"x",
"[",
"i",
"]",
")",
"for",
"x",
"in",
"rtlst",
"]",
"# Get biggest element of this row: biggest of the array",
"j",
"=",
"row",
".",
"index",
"(",
"max",
"(",
"row",
")",
")",
"# Re-build column tuple with the edited element",
"t",
"=",
"list",
"(",
"rtlst",
"[",
"j",
"]",
")",
"t",
"[",
"i",
"]",
"=",
"t",
"[",
"i",
"]",
"[",
":",
"-",
"2",
"]",
"+",
"\"_\"",
"rtlst",
"[",
"j",
"]",
"=",
"tuple",
"(",
"t",
")",
"# Update max size",
"row",
"[",
"j",
"]",
"=",
"len",
"(",
"t",
"[",
"i",
"]",
")",
"colwidth",
"[",
"i",
"]",
"=",
"max",
"(",
"row",
")",
"if",
"_croped",
":",
"log_runtime",
".",
"info",
"(",
"\"Table cropped to fit the terminal (conf.auto_crop_tables==True)\"",
")",
"# noqa: E501",
"# Generate padding scheme",
"fmt",
"=",
"_space",
".",
"join",
"(",
"[",
"\"%%-%ds\"",
"%",
"x",
"for",
"x",
"in",
"colwidth",
"]",
")",
"# Append separation line if needed",
"if",
"borders",
":",
"rtlst",
".",
"insert",
"(",
"1",
",",
"tuple",
"(",
"\"-\"",
"*",
"x",
"for",
"x",
"in",
"colwidth",
")",
")",
"# Compile",
"rt",
"=",
"\"\\n\"",
".",
"join",
"(",
"(",
"(",
"fmt",
"%",
"x",
")",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"rtlst",
")",
")",
"return",
"rt"
] | 36.533333 | 14.422222 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.