code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def post_process(self):
""" Apply last 2D transforms"""
self.image.putdata(self.pixels)
self.image = self.image.transpose(Image.ROTATE_90) | def function[post_process, parameter[self]]:
constant[ Apply last 2D transforms]
call[name[self].image.putdata, parameter[name[self].pixels]]
name[self].image assign[=] call[name[self].image.transpose, parameter[name[Image].ROTATE_90]] | keyword[def] identifier[post_process] ( identifier[self] ):
literal[string]
identifier[self] . identifier[image] . identifier[putdata] ( identifier[self] . identifier[pixels] )
identifier[self] . identifier[image] = identifier[self] . identifier[image] . identifier[transpose] ( identifier[Image] . identifier[ROTATE_90] ) | def post_process(self):
""" Apply last 2D transforms"""
self.image.putdata(self.pixels)
self.image = self.image.transpose(Image.ROTATE_90) |
def requestCheckRegularDocker(origAppliance, registryName, imageName, tag):
"""
Checks to see if an image exists using the requests library.
URL is based on the docker v2 schema described here:
https://docs.docker.com/registry/spec/manifest-v2-2/
This has the following format:
https://{websitehostname}.io/v2/{repo}/manifests/{tag}
Does not work with the official (docker.io) site, because they require an OAuth token, so a
separate check is done for docker.io images.
:param str origAppliance: The full url of the docker image originally
specified by the user (or the default).
e.g. "quay.io/ucsc_cgl/toil:latest"
:param str registryName: The url of a docker image's registry. e.g. "quay.io"
:param str imageName: The image, including path and excluding the tag. e.g. "ucsc_cgl/toil"
:param str tag: The tag used at that docker image's registry. e.g. "latest"
:return: Return True if match found. Raise otherwise.
"""
ioURL = 'https://{webhost}/v2/{pathName}/manifests/{tag}' \
''.format(webhost=registryName, pathName=imageName, tag=tag)
response = requests.head(ioURL)
if not response.ok:
raise ApplianceImageNotFound(origAppliance, ioURL, response.status_code)
else:
return origAppliance | def function[requestCheckRegularDocker, parameter[origAppliance, registryName, imageName, tag]]:
constant[
Checks to see if an image exists using the requests library.
URL is based on the docker v2 schema described here:
https://docs.docker.com/registry/spec/manifest-v2-2/
This has the following format:
https://{websitehostname}.io/v2/{repo}/manifests/{tag}
Does not work with the official (docker.io) site, because they require an OAuth token, so a
separate check is done for docker.io images.
:param str origAppliance: The full url of the docker image originally
specified by the user (or the default).
e.g. "quay.io/ucsc_cgl/toil:latest"
:param str registryName: The url of a docker image's registry. e.g. "quay.io"
:param str imageName: The image, including path and excluding the tag. e.g. "ucsc_cgl/toil"
:param str tag: The tag used at that docker image's registry. e.g. "latest"
:return: Return True if match found. Raise otherwise.
]
variable[ioURL] assign[=] call[constant[https://{webhost}/v2/{pathName}/manifests/{tag}].format, parameter[]]
variable[response] assign[=] call[name[requests].head, parameter[name[ioURL]]]
if <ast.UnaryOp object at 0x7da2054a71c0> begin[:]
<ast.Raise object at 0x7da2054a5510> | keyword[def] identifier[requestCheckRegularDocker] ( identifier[origAppliance] , identifier[registryName] , identifier[imageName] , identifier[tag] ):
literal[string]
identifier[ioURL] = literal[string] literal[string] . identifier[format] ( identifier[webhost] = identifier[registryName] , identifier[pathName] = identifier[imageName] , identifier[tag] = identifier[tag] )
identifier[response] = identifier[requests] . identifier[head] ( identifier[ioURL] )
keyword[if] keyword[not] identifier[response] . identifier[ok] :
keyword[raise] identifier[ApplianceImageNotFound] ( identifier[origAppliance] , identifier[ioURL] , identifier[response] . identifier[status_code] )
keyword[else] :
keyword[return] identifier[origAppliance] | def requestCheckRegularDocker(origAppliance, registryName, imageName, tag):
"""
Checks to see if an image exists using the requests library.
URL is based on the docker v2 schema described here:
https://docs.docker.com/registry/spec/manifest-v2-2/
This has the following format:
https://{websitehostname}.io/v2/{repo}/manifests/{tag}
Does not work with the official (docker.io) site, because they require an OAuth token, so a
separate check is done for docker.io images.
:param str origAppliance: The full url of the docker image originally
specified by the user (or the default).
e.g. "quay.io/ucsc_cgl/toil:latest"
:param str registryName: The url of a docker image's registry. e.g. "quay.io"
:param str imageName: The image, including path and excluding the tag. e.g. "ucsc_cgl/toil"
:param str tag: The tag used at that docker image's registry. e.g. "latest"
:return: Return True if match found. Raise otherwise.
"""
ioURL = 'https://{webhost}/v2/{pathName}/manifests/{tag}'.format(webhost=registryName, pathName=imageName, tag=tag)
response = requests.head(ioURL)
if not response.ok:
raise ApplianceImageNotFound(origAppliance, ioURL, response.status_code) # depends on [control=['if'], data=[]]
else:
return origAppliance |
def draw_tree(ax, tx, rmargin=.3,
treecolor="k", leafcolor="k", supportcolor="k",
outgroup=None, reroot=True, gffdir=None, sizes=None,
trunc_name=None, SH=None, scutoff=0, barcodefile=None,
leafcolorfile=None, leaffont=12):
"""
main function for drawing phylogenetic tree
"""
t = Tree(tx)
if reroot:
if outgroup:
R = t.get_common_ancestor(*outgroup)
else:
# Calculate the midpoint node
R = t.get_midpoint_outgroup()
if R != t:
t.set_outgroup(R)
farthest, max_dist = t.get_farthest_leaf()
margin = .05
xstart = margin
ystart = 1 - margin
canvas = 1 - rmargin - 2 * margin
tip = .005
# scale the tree
scale = canvas / max_dist
num_leaves = len(t.get_leaf_names())
yinterval = canvas / (num_leaves + 1)
# get exons structures, if any
structures = {}
if gffdir:
gffiles = glob("{0}/*.gff*".format(gffdir))
setups, ratio = get_setups(gffiles, canvas=rmargin / 2, noUTR=True)
structures = dict((a, (b, c)) for a, b, c in setups)
if sizes:
sizes = Sizes(sizes).mapping
if barcodefile:
barcodemap = DictFile(barcodefile, delimiter="\t")
if leafcolorfile:
leafcolors = DictFile(leafcolorfile, delimiter="\t")
coords = {}
i = 0
for n in t.traverse("postorder"):
dist = n.get_distance(t)
xx = xstart + scale * dist
if n.is_leaf():
yy = ystart - i * yinterval
i += 1
if trunc_name:
name = truncate_name(n.name, rule=trunc_name)
else:
name = n.name
if barcodefile:
name = decode_name(name, barcodemap)
sname = name.replace("_", "-")
try:
lc = leafcolors[n.name]
except Exception:
lc = leafcolor
else:
# if color is given as "R,G,B"
if "," in lc:
lc = map(float, lc.split(","))
ax.text(xx + tip, yy, sname, va="center",
fontstyle="italic", size=leaffont, color=lc)
gname = n.name.split("_")[0]
if gname in structures:
mrnabed, cdsbeds = structures[gname]
ExonGlyph(ax, 1 - rmargin / 2, yy, mrnabed, cdsbeds,
align="right", ratio=ratio)
if sizes and gname in sizes:
size = sizes[gname]
size = size / 3 - 1 # base pair converted to amino acid
size = "{0}aa".format(size)
ax.text(1 - rmargin / 2 + tip, yy, size, size=leaffont)
else:
children = [coords[x] for x in n.get_children()]
children_x, children_y = zip(*children)
min_y, max_y = min(children_y), max(children_y)
# plot the vertical bar
ax.plot((xx, xx), (min_y, max_y), "-", color=treecolor)
# plot the horizontal bar
for cx, cy in children:
ax.plot((xx, cx), (cy, cy), "-", color=treecolor)
yy = sum(children_y) * 1. / len(children_y)
support = n.support
if support > 1:
support = support / 100.
if not n.is_root():
if support > scutoff / 100.:
ax.text(xx, yy+.005, "{0:d}".format(int(abs(support * 100))),
ha="right", size=leaffont, color=supportcolor)
coords[n] = (xx, yy)
# scale bar
br = .1
x1 = xstart + .1
x2 = x1 + br * scale
yy = ystart - i * yinterval
ax.plot([x1, x1], [yy - tip, yy + tip], "-", color=treecolor)
ax.plot([x2, x2], [yy - tip, yy + tip], "-", color=treecolor)
ax.plot([x1, x2], [yy, yy], "-", color=treecolor)
ax.text((x1 + x2) / 2, yy - tip, "{0:g}".format(br),
va="top", ha="center", size=leaffont, color=treecolor)
if SH is not None:
xs = x1
ys = (margin + yy) / 2.
ax.text(xs, ys, "SH test against ref tree: {0}"
.format(SH), ha="left", size=leaffont, color="g")
normalize_axes(ax) | def function[draw_tree, parameter[ax, tx, rmargin, treecolor, leafcolor, supportcolor, outgroup, reroot, gffdir, sizes, trunc_name, SH, scutoff, barcodefile, leafcolorfile, leaffont]]:
constant[
main function for drawing phylogenetic tree
]
variable[t] assign[=] call[name[Tree], parameter[name[tx]]]
if name[reroot] begin[:]
if name[outgroup] begin[:]
variable[R] assign[=] call[name[t].get_common_ancestor, parameter[<ast.Starred object at 0x7da207f9a260>]]
if compare[name[R] not_equal[!=] name[t]] begin[:]
call[name[t].set_outgroup, parameter[name[R]]]
<ast.Tuple object at 0x7da18f722c50> assign[=] call[name[t].get_farthest_leaf, parameter[]]
variable[margin] assign[=] constant[0.05]
variable[xstart] assign[=] name[margin]
variable[ystart] assign[=] binary_operation[constant[1] - name[margin]]
variable[canvas] assign[=] binary_operation[binary_operation[constant[1] - name[rmargin]] - binary_operation[constant[2] * name[margin]]]
variable[tip] assign[=] constant[0.005]
variable[scale] assign[=] binary_operation[name[canvas] / name[max_dist]]
variable[num_leaves] assign[=] call[name[len], parameter[call[name[t].get_leaf_names, parameter[]]]]
variable[yinterval] assign[=] binary_operation[name[canvas] / binary_operation[name[num_leaves] + constant[1]]]
variable[structures] assign[=] dictionary[[], []]
if name[gffdir] begin[:]
variable[gffiles] assign[=] call[name[glob], parameter[call[constant[{0}/*.gff*].format, parameter[name[gffdir]]]]]
<ast.Tuple object at 0x7da207f98370> assign[=] call[name[get_setups], parameter[name[gffiles]]]
variable[structures] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da207f981c0>]]
if name[sizes] begin[:]
variable[sizes] assign[=] call[name[Sizes], parameter[name[sizes]]].mapping
if name[barcodefile] begin[:]
variable[barcodemap] assign[=] call[name[DictFile], parameter[name[barcodefile]]]
if name[leafcolorfile] begin[:]
variable[leafcolors] assign[=] call[name[DictFile], parameter[name[leafcolorfile]]]
variable[coords] assign[=] dictionary[[], []]
variable[i] assign[=] constant[0]
for taget[name[n]] in starred[call[name[t].traverse, parameter[constant[postorder]]]] begin[:]
variable[dist] assign[=] call[name[n].get_distance, parameter[name[t]]]
variable[xx] assign[=] binary_operation[name[xstart] + binary_operation[name[scale] * name[dist]]]
if call[name[n].is_leaf, parameter[]] begin[:]
variable[yy] assign[=] binary_operation[name[ystart] - binary_operation[name[i] * name[yinterval]]]
<ast.AugAssign object at 0x7da207f98cd0>
if name[trunc_name] begin[:]
variable[name] assign[=] call[name[truncate_name], parameter[name[n].name]]
if name[barcodefile] begin[:]
variable[name] assign[=] call[name[decode_name], parameter[name[name], name[barcodemap]]]
variable[sname] assign[=] call[name[name].replace, parameter[constant[_], constant[-]]]
<ast.Try object at 0x7da207f99c00>
call[name[ax].text, parameter[binary_operation[name[xx] + name[tip]], name[yy], name[sname]]]
variable[gname] assign[=] call[call[name[n].name.split, parameter[constant[_]]]][constant[0]]
if compare[name[gname] in name[structures]] begin[:]
<ast.Tuple object at 0x7da20c76e530> assign[=] call[name[structures]][name[gname]]
call[name[ExonGlyph], parameter[name[ax], binary_operation[constant[1] - binary_operation[name[rmargin] / constant[2]]], name[yy], name[mrnabed], name[cdsbeds]]]
if <ast.BoolOp object at 0x7da20c76c3a0> begin[:]
variable[size] assign[=] call[name[sizes]][name[gname]]
variable[size] assign[=] binary_operation[binary_operation[name[size] / constant[3]] - constant[1]]
variable[size] assign[=] call[constant[{0}aa].format, parameter[name[size]]]
call[name[ax].text, parameter[binary_operation[binary_operation[constant[1] - binary_operation[name[rmargin] / constant[2]]] + name[tip]], name[yy], name[size]]]
call[name[coords]][name[n]] assign[=] tuple[[<ast.Name object at 0x7da20c76f250>, <ast.Name object at 0x7da20c76dcc0>]]
variable[br] assign[=] constant[0.1]
variable[x1] assign[=] binary_operation[name[xstart] + constant[0.1]]
variable[x2] assign[=] binary_operation[name[x1] + binary_operation[name[br] * name[scale]]]
variable[yy] assign[=] binary_operation[name[ystart] - binary_operation[name[i] * name[yinterval]]]
call[name[ax].plot, parameter[list[[<ast.Name object at 0x7da1b09bec20>, <ast.Name object at 0x7da1b09bc790>]], list[[<ast.BinOp object at 0x7da1b09bfb20>, <ast.BinOp object at 0x7da1b09bf730>]], constant[-]]]
call[name[ax].plot, parameter[list[[<ast.Name object at 0x7da1b09be980>, <ast.Name object at 0x7da1b09be6e0>]], list[[<ast.BinOp object at 0x7da1b09bf8e0>, <ast.BinOp object at 0x7da1b09bc490>]], constant[-]]]
call[name[ax].plot, parameter[list[[<ast.Name object at 0x7da1b09be380>, <ast.Name object at 0x7da1b09be860>]], list[[<ast.Name object at 0x7da1b09bc610>, <ast.Name object at 0x7da1b09bd0f0>]], constant[-]]]
call[name[ax].text, parameter[binary_operation[binary_operation[name[x1] + name[x2]] / constant[2]], binary_operation[name[yy] - name[tip]], call[constant[{0:g}].format, parameter[name[br]]]]]
if compare[name[SH] is_not constant[None]] begin[:]
variable[xs] assign[=] name[x1]
variable[ys] assign[=] binary_operation[binary_operation[name[margin] + name[yy]] / constant[2.0]]
call[name[ax].text, parameter[name[xs], name[ys], call[constant[SH test against ref tree: {0}].format, parameter[name[SH]]]]]
call[name[normalize_axes], parameter[name[ax]]] | keyword[def] identifier[draw_tree] ( identifier[ax] , identifier[tx] , identifier[rmargin] = literal[int] ,
identifier[treecolor] = literal[string] , identifier[leafcolor] = literal[string] , identifier[supportcolor] = literal[string] ,
identifier[outgroup] = keyword[None] , identifier[reroot] = keyword[True] , identifier[gffdir] = keyword[None] , identifier[sizes] = keyword[None] ,
identifier[trunc_name] = keyword[None] , identifier[SH] = keyword[None] , identifier[scutoff] = literal[int] , identifier[barcodefile] = keyword[None] ,
identifier[leafcolorfile] = keyword[None] , identifier[leaffont] = literal[int] ):
literal[string]
identifier[t] = identifier[Tree] ( identifier[tx] )
keyword[if] identifier[reroot] :
keyword[if] identifier[outgroup] :
identifier[R] = identifier[t] . identifier[get_common_ancestor] (* identifier[outgroup] )
keyword[else] :
identifier[R] = identifier[t] . identifier[get_midpoint_outgroup] ()
keyword[if] identifier[R] != identifier[t] :
identifier[t] . identifier[set_outgroup] ( identifier[R] )
identifier[farthest] , identifier[max_dist] = identifier[t] . identifier[get_farthest_leaf] ()
identifier[margin] = literal[int]
identifier[xstart] = identifier[margin]
identifier[ystart] = literal[int] - identifier[margin]
identifier[canvas] = literal[int] - identifier[rmargin] - literal[int] * identifier[margin]
identifier[tip] = literal[int]
identifier[scale] = identifier[canvas] / identifier[max_dist]
identifier[num_leaves] = identifier[len] ( identifier[t] . identifier[get_leaf_names] ())
identifier[yinterval] = identifier[canvas] /( identifier[num_leaves] + literal[int] )
identifier[structures] ={}
keyword[if] identifier[gffdir] :
identifier[gffiles] = identifier[glob] ( literal[string] . identifier[format] ( identifier[gffdir] ))
identifier[setups] , identifier[ratio] = identifier[get_setups] ( identifier[gffiles] , identifier[canvas] = identifier[rmargin] / literal[int] , identifier[noUTR] = keyword[True] )
identifier[structures] = identifier[dict] (( identifier[a] ,( identifier[b] , identifier[c] )) keyword[for] identifier[a] , identifier[b] , identifier[c] keyword[in] identifier[setups] )
keyword[if] identifier[sizes] :
identifier[sizes] = identifier[Sizes] ( identifier[sizes] ). identifier[mapping]
keyword[if] identifier[barcodefile] :
identifier[barcodemap] = identifier[DictFile] ( identifier[barcodefile] , identifier[delimiter] = literal[string] )
keyword[if] identifier[leafcolorfile] :
identifier[leafcolors] = identifier[DictFile] ( identifier[leafcolorfile] , identifier[delimiter] = literal[string] )
identifier[coords] ={}
identifier[i] = literal[int]
keyword[for] identifier[n] keyword[in] identifier[t] . identifier[traverse] ( literal[string] ):
identifier[dist] = identifier[n] . identifier[get_distance] ( identifier[t] )
identifier[xx] = identifier[xstart] + identifier[scale] * identifier[dist]
keyword[if] identifier[n] . identifier[is_leaf] ():
identifier[yy] = identifier[ystart] - identifier[i] * identifier[yinterval]
identifier[i] += literal[int]
keyword[if] identifier[trunc_name] :
identifier[name] = identifier[truncate_name] ( identifier[n] . identifier[name] , identifier[rule] = identifier[trunc_name] )
keyword[else] :
identifier[name] = identifier[n] . identifier[name]
keyword[if] identifier[barcodefile] :
identifier[name] = identifier[decode_name] ( identifier[name] , identifier[barcodemap] )
identifier[sname] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
keyword[try] :
identifier[lc] = identifier[leafcolors] [ identifier[n] . identifier[name] ]
keyword[except] identifier[Exception] :
identifier[lc] = identifier[leafcolor]
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[lc] :
identifier[lc] = identifier[map] ( identifier[float] , identifier[lc] . identifier[split] ( literal[string] ))
identifier[ax] . identifier[text] ( identifier[xx] + identifier[tip] , identifier[yy] , identifier[sname] , identifier[va] = literal[string] ,
identifier[fontstyle] = literal[string] , identifier[size] = identifier[leaffont] , identifier[color] = identifier[lc] )
identifier[gname] = identifier[n] . identifier[name] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[gname] keyword[in] identifier[structures] :
identifier[mrnabed] , identifier[cdsbeds] = identifier[structures] [ identifier[gname] ]
identifier[ExonGlyph] ( identifier[ax] , literal[int] - identifier[rmargin] / literal[int] , identifier[yy] , identifier[mrnabed] , identifier[cdsbeds] ,
identifier[align] = literal[string] , identifier[ratio] = identifier[ratio] )
keyword[if] identifier[sizes] keyword[and] identifier[gname] keyword[in] identifier[sizes] :
identifier[size] = identifier[sizes] [ identifier[gname] ]
identifier[size] = identifier[size] / literal[int] - literal[int]
identifier[size] = literal[string] . identifier[format] ( identifier[size] )
identifier[ax] . identifier[text] ( literal[int] - identifier[rmargin] / literal[int] + identifier[tip] , identifier[yy] , identifier[size] , identifier[size] = identifier[leaffont] )
keyword[else] :
identifier[children] =[ identifier[coords] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[n] . identifier[get_children] ()]
identifier[children_x] , identifier[children_y] = identifier[zip] (* identifier[children] )
identifier[min_y] , identifier[max_y] = identifier[min] ( identifier[children_y] ), identifier[max] ( identifier[children_y] )
identifier[ax] . identifier[plot] (( identifier[xx] , identifier[xx] ),( identifier[min_y] , identifier[max_y] ), literal[string] , identifier[color] = identifier[treecolor] )
keyword[for] identifier[cx] , identifier[cy] keyword[in] identifier[children] :
identifier[ax] . identifier[plot] (( identifier[xx] , identifier[cx] ),( identifier[cy] , identifier[cy] ), literal[string] , identifier[color] = identifier[treecolor] )
identifier[yy] = identifier[sum] ( identifier[children_y] )* literal[int] / identifier[len] ( identifier[children_y] )
identifier[support] = identifier[n] . identifier[support]
keyword[if] identifier[support] > literal[int] :
identifier[support] = identifier[support] / literal[int]
keyword[if] keyword[not] identifier[n] . identifier[is_root] ():
keyword[if] identifier[support] > identifier[scutoff] / literal[int] :
identifier[ax] . identifier[text] ( identifier[xx] , identifier[yy] + literal[int] , literal[string] . identifier[format] ( identifier[int] ( identifier[abs] ( identifier[support] * literal[int] ))),
identifier[ha] = literal[string] , identifier[size] = identifier[leaffont] , identifier[color] = identifier[supportcolor] )
identifier[coords] [ identifier[n] ]=( identifier[xx] , identifier[yy] )
identifier[br] = literal[int]
identifier[x1] = identifier[xstart] + literal[int]
identifier[x2] = identifier[x1] + identifier[br] * identifier[scale]
identifier[yy] = identifier[ystart] - identifier[i] * identifier[yinterval]
identifier[ax] . identifier[plot] ([ identifier[x1] , identifier[x1] ],[ identifier[yy] - identifier[tip] , identifier[yy] + identifier[tip] ], literal[string] , identifier[color] = identifier[treecolor] )
identifier[ax] . identifier[plot] ([ identifier[x2] , identifier[x2] ],[ identifier[yy] - identifier[tip] , identifier[yy] + identifier[tip] ], literal[string] , identifier[color] = identifier[treecolor] )
identifier[ax] . identifier[plot] ([ identifier[x1] , identifier[x2] ],[ identifier[yy] , identifier[yy] ], literal[string] , identifier[color] = identifier[treecolor] )
identifier[ax] . identifier[text] (( identifier[x1] + identifier[x2] )/ literal[int] , identifier[yy] - identifier[tip] , literal[string] . identifier[format] ( identifier[br] ),
identifier[va] = literal[string] , identifier[ha] = literal[string] , identifier[size] = identifier[leaffont] , identifier[color] = identifier[treecolor] )
keyword[if] identifier[SH] keyword[is] keyword[not] keyword[None] :
identifier[xs] = identifier[x1]
identifier[ys] =( identifier[margin] + identifier[yy] )/ literal[int]
identifier[ax] . identifier[text] ( identifier[xs] , identifier[ys] , literal[string]
. identifier[format] ( identifier[SH] ), identifier[ha] = literal[string] , identifier[size] = identifier[leaffont] , identifier[color] = literal[string] )
identifier[normalize_axes] ( identifier[ax] ) | def draw_tree(ax, tx, rmargin=0.3, treecolor='k', leafcolor='k', supportcolor='k', outgroup=None, reroot=True, gffdir=None, sizes=None, trunc_name=None, SH=None, scutoff=0, barcodefile=None, leafcolorfile=None, leaffont=12):
"""
main function for drawing phylogenetic tree
"""
t = Tree(tx)
if reroot:
if outgroup:
R = t.get_common_ancestor(*outgroup) # depends on [control=['if'], data=[]]
else:
# Calculate the midpoint node
R = t.get_midpoint_outgroup()
if R != t:
t.set_outgroup(R) # depends on [control=['if'], data=['R', 't']] # depends on [control=['if'], data=[]]
(farthest, max_dist) = t.get_farthest_leaf()
margin = 0.05
xstart = margin
ystart = 1 - margin
canvas = 1 - rmargin - 2 * margin
tip = 0.005
# scale the tree
scale = canvas / max_dist
num_leaves = len(t.get_leaf_names())
yinterval = canvas / (num_leaves + 1)
# get exons structures, if any
structures = {}
if gffdir:
gffiles = glob('{0}/*.gff*'.format(gffdir))
(setups, ratio) = get_setups(gffiles, canvas=rmargin / 2, noUTR=True)
structures = dict(((a, (b, c)) for (a, b, c) in setups)) # depends on [control=['if'], data=[]]
if sizes:
sizes = Sizes(sizes).mapping # depends on [control=['if'], data=[]]
if barcodefile:
barcodemap = DictFile(barcodefile, delimiter='\t') # depends on [control=['if'], data=[]]
if leafcolorfile:
leafcolors = DictFile(leafcolorfile, delimiter='\t') # depends on [control=['if'], data=[]]
coords = {}
i = 0
for n in t.traverse('postorder'):
dist = n.get_distance(t)
xx = xstart + scale * dist
if n.is_leaf():
yy = ystart - i * yinterval
i += 1
if trunc_name:
name = truncate_name(n.name, rule=trunc_name) # depends on [control=['if'], data=[]]
else:
name = n.name
if barcodefile:
name = decode_name(name, barcodemap) # depends on [control=['if'], data=[]]
sname = name.replace('_', '-')
try:
lc = leafcolors[n.name] # depends on [control=['try'], data=[]]
except Exception:
lc = leafcolor # depends on [control=['except'], data=[]]
else:
# if color is given as "R,G,B"
if ',' in lc:
lc = map(float, lc.split(',')) # depends on [control=['if'], data=['lc']]
ax.text(xx + tip, yy, sname, va='center', fontstyle='italic', size=leaffont, color=lc)
gname = n.name.split('_')[0]
if gname in structures:
(mrnabed, cdsbeds) = structures[gname]
ExonGlyph(ax, 1 - rmargin / 2, yy, mrnabed, cdsbeds, align='right', ratio=ratio) # depends on [control=['if'], data=['gname', 'structures']]
if sizes and gname in sizes:
size = sizes[gname]
size = size / 3 - 1 # base pair converted to amino acid
size = '{0}aa'.format(size)
ax.text(1 - rmargin / 2 + tip, yy, size, size=leaffont) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
children = [coords[x] for x in n.get_children()]
(children_x, children_y) = zip(*children)
(min_y, max_y) = (min(children_y), max(children_y))
# plot the vertical bar
ax.plot((xx, xx), (min_y, max_y), '-', color=treecolor)
# plot the horizontal bar
for (cx, cy) in children:
ax.plot((xx, cx), (cy, cy), '-', color=treecolor) # depends on [control=['for'], data=[]]
yy = sum(children_y) * 1.0 / len(children_y)
support = n.support
if support > 1:
support = support / 100.0 # depends on [control=['if'], data=['support']]
if not n.is_root():
if support > scutoff / 100.0:
ax.text(xx, yy + 0.005, '{0:d}'.format(int(abs(support * 100))), ha='right', size=leaffont, color=supportcolor) # depends on [control=['if'], data=['support']] # depends on [control=['if'], data=[]]
coords[n] = (xx, yy) # depends on [control=['for'], data=['n']]
# scale bar
br = 0.1
x1 = xstart + 0.1
x2 = x1 + br * scale
yy = ystart - i * yinterval
ax.plot([x1, x1], [yy - tip, yy + tip], '-', color=treecolor)
ax.plot([x2, x2], [yy - tip, yy + tip], '-', color=treecolor)
ax.plot([x1, x2], [yy, yy], '-', color=treecolor)
ax.text((x1 + x2) / 2, yy - tip, '{0:g}'.format(br), va='top', ha='center', size=leaffont, color=treecolor)
if SH is not None:
xs = x1
ys = (margin + yy) / 2.0
ax.text(xs, ys, 'SH test against ref tree: {0}'.format(SH), ha='left', size=leaffont, color='g') # depends on [control=['if'], data=['SH']]
normalize_axes(ax) |
def add_member(self, host_name, hostgroup_name):
"""Add a host string to a hostgroup member
if the host group do not exist, create it
:param host_name: host name
:type host_name: str
:param hostgroup_name:hostgroup name
:type hostgroup_name: str
:return: None
"""
hostgroup = self.find_by_name(hostgroup_name)
if not hostgroup:
hostgroup = Hostgroup({'hostgroup_name': hostgroup_name,
'alias': hostgroup_name,
'members': host_name})
self.add(hostgroup)
else:
hostgroup.add_members(host_name) | def function[add_member, parameter[self, host_name, hostgroup_name]]:
constant[Add a host string to a hostgroup member
if the host group do not exist, create it
:param host_name: host name
:type host_name: str
:param hostgroup_name:hostgroup name
:type hostgroup_name: str
:return: None
]
variable[hostgroup] assign[=] call[name[self].find_by_name, parameter[name[hostgroup_name]]]
if <ast.UnaryOp object at 0x7da18f722380> begin[:]
variable[hostgroup] assign[=] call[name[Hostgroup], parameter[dictionary[[<ast.Constant object at 0x7da18f720850>, <ast.Constant object at 0x7da18f722080>, <ast.Constant object at 0x7da18f721480>], [<ast.Name object at 0x7da18f7214b0>, <ast.Name object at 0x7da18f7221a0>, <ast.Name object at 0x7da18f721ff0>]]]]
call[name[self].add, parameter[name[hostgroup]]] | keyword[def] identifier[add_member] ( identifier[self] , identifier[host_name] , identifier[hostgroup_name] ):
literal[string]
identifier[hostgroup] = identifier[self] . identifier[find_by_name] ( identifier[hostgroup_name] )
keyword[if] keyword[not] identifier[hostgroup] :
identifier[hostgroup] = identifier[Hostgroup] ({ literal[string] : identifier[hostgroup_name] ,
literal[string] : identifier[hostgroup_name] ,
literal[string] : identifier[host_name] })
identifier[self] . identifier[add] ( identifier[hostgroup] )
keyword[else] :
identifier[hostgroup] . identifier[add_members] ( identifier[host_name] ) | def add_member(self, host_name, hostgroup_name):
"""Add a host string to a hostgroup member
if the host group do not exist, create it
:param host_name: host name
:type host_name: str
:param hostgroup_name:hostgroup name
:type hostgroup_name: str
:return: None
"""
hostgroup = self.find_by_name(hostgroup_name)
if not hostgroup:
hostgroup = Hostgroup({'hostgroup_name': hostgroup_name, 'alias': hostgroup_name, 'members': host_name})
self.add(hostgroup) # depends on [control=['if'], data=[]]
else:
hostgroup.add_members(host_name) |
def synchronized(name, source,
delete=False,
force=False,
update=False,
passwordfile=None,
exclude=None,
excludefrom=None,
prepare=False,
dryrun=False,
additional_opts=None):
'''
Guarantees that the source directory is always copied to the target.
name
Name of the target directory.
source
Source directory.
prepare
Create destination directory if it does not exists.
delete
Delete extraneous files from the destination dirs (True or False)
force
Force deletion of dirs even if not empty
update
Skip files that are newer on the receiver (True or False)
passwordfile
Read daemon-access password from the file (path)
exclude
Exclude files, that matches pattern.
excludefrom
Read exclude patterns from the file (path)
dryrun
Perform a trial run with no changes made. Is the same as
doing test=True
.. versionadded:: 2016.3.1
additional_opts
Pass additional options to rsync, should be included as a list.
.. versionadded:: 2018.3.0
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
if not os.path.exists(name) and not force and not prepare:
ret['result'] = False
ret['comment'] = "Destination directory {dest} was not found.".format(dest=name)
else:
if not os.path.exists(name) and prepare:
os.makedirs(name)
if __opts__['test']:
dryrun = True
result = __salt__['rsync.rsync'](source, name, delete=delete,
force=force, update=update,
passwordfile=passwordfile,
exclude=exclude,
excludefrom=excludefrom,
dryrun=dryrun,
additional_opts=additional_opts)
if __opts__['test'] or dryrun:
ret['result'] = None
ret['comment'] = _get_summary(result['stdout'])
return ret
# Failed
if result.get('retcode'):
ret['result'] = False
ret['comment'] = result['stderr']
# Changed
elif _get_changes(result['stdout'])['changed']:
ret['comment'] = _get_summary(result['stdout'])
ret['changes'] = _get_changes(result['stdout'])
del ret['changes']['changed'] # Don't need to print the boolean
# Clean
else:
ret['comment'] = _get_summary(result['stdout'])
ret['changes'] = {}
return ret | def function[synchronized, parameter[name, source, delete, force, update, passwordfile, exclude, excludefrom, prepare, dryrun, additional_opts]]:
constant[
Guarantees that the source directory is always copied to the target.
name
Name of the target directory.
source
Source directory.
prepare
Create destination directory if it does not exists.
delete
Delete extraneous files from the destination dirs (True or False)
force
Force deletion of dirs even if not empty
update
Skip files that are newer on the receiver (True or False)
passwordfile
Read daemon-access password from the file (path)
exclude
Exclude files, that matches pattern.
excludefrom
Read exclude patterns from the file (path)
dryrun
Perform a trial run with no changes made. Is the same as
doing test=True
.. versionadded:: 2016.3.1
additional_opts
Pass additional options to rsync, should be included as a list.
.. versionadded:: 2018.3.0
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da20c7940d0>, <ast.Constant object at 0x7da20c7947f0>, <ast.Constant object at 0x7da20c795930>, <ast.Constant object at 0x7da20c7969b0>], [<ast.Name object at 0x7da20c794940>, <ast.Dict object at 0x7da20c794ee0>, <ast.Constant object at 0x7da20c7965f0>, <ast.Constant object at 0x7da20c796260>]]
if <ast.BoolOp object at 0x7da20c794fa0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Destination directory {dest} was not found.].format, parameter[]]
return[name[ret]] | keyword[def] identifier[synchronized] ( identifier[name] , identifier[source] ,
identifier[delete] = keyword[False] ,
identifier[force] = keyword[False] ,
identifier[update] = keyword[False] ,
identifier[passwordfile] = keyword[None] ,
identifier[exclude] = keyword[None] ,
identifier[excludefrom] = keyword[None] ,
identifier[prepare] = keyword[False] ,
identifier[dryrun] = keyword[False] ,
identifier[additional_opts] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[True] , literal[string] : literal[string] }
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ) keyword[and] keyword[not] identifier[force] keyword[and] keyword[not] identifier[prepare] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[dest] = identifier[name] )
keyword[else] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ) keyword[and] identifier[prepare] :
identifier[os] . identifier[makedirs] ( identifier[name] )
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[dryrun] = keyword[True]
identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[source] , identifier[name] , identifier[delete] = identifier[delete] ,
identifier[force] = identifier[force] , identifier[update] = identifier[update] ,
identifier[passwordfile] = identifier[passwordfile] ,
identifier[exclude] = identifier[exclude] ,
identifier[excludefrom] = identifier[excludefrom] ,
identifier[dryrun] = identifier[dryrun] ,
identifier[additional_opts] = identifier[additional_opts] )
keyword[if] identifier[__opts__] [ literal[string] ] keyword[or] identifier[dryrun] :
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= identifier[_get_summary] ( identifier[result] [ literal[string] ])
keyword[return] identifier[ret]
keyword[if] identifier[result] . identifier[get] ( literal[string] ):
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= identifier[result] [ literal[string] ]
keyword[elif] identifier[_get_changes] ( identifier[result] [ literal[string] ])[ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_get_summary] ( identifier[result] [ literal[string] ])
identifier[ret] [ literal[string] ]= identifier[_get_changes] ( identifier[result] [ literal[string] ])
keyword[del] identifier[ret] [ literal[string] ][ literal[string] ]
keyword[else] :
identifier[ret] [ literal[string] ]= identifier[_get_summary] ( identifier[result] [ literal[string] ])
identifier[ret] [ literal[string] ]={}
keyword[return] identifier[ret] | def synchronized(name, source, delete=False, force=False, update=False, passwordfile=None, exclude=None, excludefrom=None, prepare=False, dryrun=False, additional_opts=None):
"""
Guarantees that the source directory is always copied to the target.
name
Name of the target directory.
source
Source directory.
prepare
Create destination directory if it does not exists.
delete
Delete extraneous files from the destination dirs (True or False)
force
Force deletion of dirs even if not empty
update
Skip files that are newer on the receiver (True or False)
passwordfile
Read daemon-access password from the file (path)
exclude
Exclude files, that matches pattern.
excludefrom
Read exclude patterns from the file (path)
dryrun
Perform a trial run with no changes made. Is the same as
doing test=True
.. versionadded:: 2016.3.1
additional_opts
Pass additional options to rsync, should be included as a list.
.. versionadded:: 2018.3.0
"""
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
if not os.path.exists(name) and (not force) and (not prepare):
ret['result'] = False
ret['comment'] = 'Destination directory {dest} was not found.'.format(dest=name) # depends on [control=['if'], data=[]]
else:
if not os.path.exists(name) and prepare:
os.makedirs(name) # depends on [control=['if'], data=[]]
if __opts__['test']:
dryrun = True # depends on [control=['if'], data=[]]
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update, passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom, dryrun=dryrun, additional_opts=additional_opts)
if __opts__['test'] or dryrun:
ret['result'] = None
ret['comment'] = _get_summary(result['stdout'])
return ret # depends on [control=['if'], data=[]]
# Failed
if result.get('retcode'):
ret['result'] = False
ret['comment'] = result['stderr'] # depends on [control=['if'], data=[]]
# Changed
elif _get_changes(result['stdout'])['changed']:
ret['comment'] = _get_summary(result['stdout'])
ret['changes'] = _get_changes(result['stdout'])
del ret['changes']['changed'] # Don't need to print the boolean # depends on [control=['if'], data=[]]
else:
# Clean
ret['comment'] = _get_summary(result['stdout'])
ret['changes'] = {}
return ret |
def wait_until_finished(
self, uuid, refresh_period=DEFAULT_TASK_INSTANCE_WAIT_REFRESH_PERIOD
):
"""Wait until a task instance with the given UUID is finished.
Args:
uuid (str): The UUID of the task instance to wait for.
refresh_period (float, optional): How many seconds to wait
in between checking the task's status. Defaults to 5
seconds.
Returns:
:class:`saltant.models.base_task_instance.BaseTaskInstance`:
A task instance model instance representing the task
instance which we waited for.
"""
# Wait for the task to finish
task_instance = self.get(uuid)
while task_instance.state not in TASK_INSTANCE_FINISH_STATUSES:
# Wait a bit
time.sleep(refresh_period)
# Query again
task_instance = self.get(uuid)
return task_instance | def function[wait_until_finished, parameter[self, uuid, refresh_period]]:
constant[Wait until a task instance with the given UUID is finished.
Args:
uuid (str): The UUID of the task instance to wait for.
refresh_period (float, optional): How many seconds to wait
in between checking the task's status. Defaults to 5
seconds.
Returns:
:class:`saltant.models.base_task_instance.BaseTaskInstance`:
A task instance model instance representing the task
instance which we waited for.
]
variable[task_instance] assign[=] call[name[self].get, parameter[name[uuid]]]
while compare[name[task_instance].state <ast.NotIn object at 0x7da2590d7190> name[TASK_INSTANCE_FINISH_STATUSES]] begin[:]
call[name[time].sleep, parameter[name[refresh_period]]]
variable[task_instance] assign[=] call[name[self].get, parameter[name[uuid]]]
return[name[task_instance]] | keyword[def] identifier[wait_until_finished] (
identifier[self] , identifier[uuid] , identifier[refresh_period] = identifier[DEFAULT_TASK_INSTANCE_WAIT_REFRESH_PERIOD]
):
literal[string]
identifier[task_instance] = identifier[self] . identifier[get] ( identifier[uuid] )
keyword[while] identifier[task_instance] . identifier[state] keyword[not] keyword[in] identifier[TASK_INSTANCE_FINISH_STATUSES] :
identifier[time] . identifier[sleep] ( identifier[refresh_period] )
identifier[task_instance] = identifier[self] . identifier[get] ( identifier[uuid] )
keyword[return] identifier[task_instance] | def wait_until_finished(self, uuid, refresh_period=DEFAULT_TASK_INSTANCE_WAIT_REFRESH_PERIOD):
"""Wait until a task instance with the given UUID is finished.
Args:
uuid (str): The UUID of the task instance to wait for.
refresh_period (float, optional): How many seconds to wait
in between checking the task's status. Defaults to 5
seconds.
Returns:
:class:`saltant.models.base_task_instance.BaseTaskInstance`:
A task instance model instance representing the task
instance which we waited for.
"""
# Wait for the task to finish
task_instance = self.get(uuid)
while task_instance.state not in TASK_INSTANCE_FINISH_STATUSES:
# Wait a bit
time.sleep(refresh_period)
# Query again
task_instance = self.get(uuid) # depends on [control=['while'], data=[]]
return task_instance |
def get_selector(self, name):
"""Find a selector mapped to a style in this or a base style sheet.
Args:
name (str): a style name
Returns:
:class:`.Selector`: the selector mapped to the style `name`
Raises:
KeyError: if the style `name` was not found in this or a base
style sheet
"""
try:
return self.matcher.by_name[name]
except (AttributeError, KeyError):
if self.base is not None:
return self.base.get_selector(name)
else:
raise KeyError("No selector found for style '{}'".format(name)) | def function[get_selector, parameter[self, name]]:
constant[Find a selector mapped to a style in this or a base style sheet.
Args:
name (str): a style name
Returns:
:class:`.Selector`: the selector mapped to the style `name`
Raises:
KeyError: if the style `name` was not found in this or a base
style sheet
]
<ast.Try object at 0x7da18f58d930> | keyword[def] identifier[get_selector] ( identifier[self] , identifier[name] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[matcher] . identifier[by_name] [ identifier[name] ]
keyword[except] ( identifier[AttributeError] , identifier[KeyError] ):
keyword[if] identifier[self] . identifier[base] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[base] . identifier[get_selector] ( identifier[name] )
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[name] )) | def get_selector(self, name):
"""Find a selector mapped to a style in this or a base style sheet.
Args:
name (str): a style name
Returns:
:class:`.Selector`: the selector mapped to the style `name`
Raises:
KeyError: if the style `name` was not found in this or a base
style sheet
"""
try:
return self.matcher.by_name[name] # depends on [control=['try'], data=[]]
except (AttributeError, KeyError):
if self.base is not None:
return self.base.get_selector(name) # depends on [control=['if'], data=[]]
else:
raise KeyError("No selector found for style '{}'".format(name)) # depends on [control=['except'], data=[]] |
def _to_str(s):
"""
Convert a filename to a string (on Python 2, explicitly
a byte string, not Unicode) as distutils checks for the
exact type str.
"""
if sys.version_info[0] == 2 and not isinstance(s, str):
# Assume it's Unicode, as that's what the PEP says
# should be provided.
return s.encode(sys.getfilesystemencoding())
return s | def function[_to_str, parameter[s]]:
constant[
Convert a filename to a string (on Python 2, explicitly
a byte string, not Unicode) as distutils checks for the
exact type str.
]
if <ast.BoolOp object at 0x7da1b1b12ec0> begin[:]
return[call[name[s].encode, parameter[call[name[sys].getfilesystemencoding, parameter[]]]]]
return[name[s]] | keyword[def] identifier[_to_str] ( identifier[s] ):
literal[string]
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] keyword[and] keyword[not] identifier[isinstance] ( identifier[s] , identifier[str] ):
keyword[return] identifier[s] . identifier[encode] ( identifier[sys] . identifier[getfilesystemencoding] ())
keyword[return] identifier[s] | def _to_str(s):
"""
Convert a filename to a string (on Python 2, explicitly
a byte string, not Unicode) as distutils checks for the
exact type str.
"""
if sys.version_info[0] == 2 and (not isinstance(s, str)):
# Assume it's Unicode, as that's what the PEP says
# should be provided.
return s.encode(sys.getfilesystemencoding()) # depends on [control=['if'], data=[]]
return s |
def valid(a, b):
"""Check whether `a` and `b` are not inf or nan"""
return ~(np.isnan(a) | np.isinf(a) | np.isnan(b) | np.isinf(b)) | def function[valid, parameter[a, b]]:
constant[Check whether `a` and `b` are not inf or nan]
return[<ast.UnaryOp object at 0x7da1b19313f0>] | keyword[def] identifier[valid] ( identifier[a] , identifier[b] ):
literal[string]
keyword[return] ~( identifier[np] . identifier[isnan] ( identifier[a] )| identifier[np] . identifier[isinf] ( identifier[a] )| identifier[np] . identifier[isnan] ( identifier[b] )| identifier[np] . identifier[isinf] ( identifier[b] )) | def valid(a, b):
"""Check whether `a` and `b` are not inf or nan"""
return ~(np.isnan(a) | np.isinf(a) | np.isnan(b) | np.isinf(b)) |
def create_ticket(self, subject, **kwargs):
"""
Creates a ticket
To create ticket with attachments,
pass a key 'attachments' with value as list of fully qualified file paths in string format.
ex: attachments = ('/path/to/attachment1', '/path/to/attachment2')
"""
url = 'tickets'
status = kwargs.get('status', 2)
priority = kwargs.get('priority', 1)
data = {
'subject': subject,
'status': status,
'priority': priority,
}
data.update(kwargs)
if 'attachments' in data:
ticket = self._create_ticket_with_attachment(url, data)
return Ticket(**ticket)
ticket = self._api._post(url, data=json.dumps(data))
return Ticket(**ticket) | def function[create_ticket, parameter[self, subject]]:
constant[
Creates a ticket
To create ticket with attachments,
pass a key 'attachments' with value as list of fully qualified file paths in string format.
ex: attachments = ('/path/to/attachment1', '/path/to/attachment2')
]
variable[url] assign[=] constant[tickets]
variable[status] assign[=] call[name[kwargs].get, parameter[constant[status], constant[2]]]
variable[priority] assign[=] call[name[kwargs].get, parameter[constant[priority], constant[1]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b11385b0>, <ast.Constant object at 0x7da1b1139270>, <ast.Constant object at 0x7da1b1139870>], [<ast.Name object at 0x7da1b113a410>, <ast.Name object at 0x7da1b113a740>, <ast.Name object at 0x7da1b113b0a0>]]
call[name[data].update, parameter[name[kwargs]]]
if compare[constant[attachments] in name[data]] begin[:]
variable[ticket] assign[=] call[name[self]._create_ticket_with_attachment, parameter[name[url], name[data]]]
return[call[name[Ticket], parameter[]]]
variable[ticket] assign[=] call[name[self]._api._post, parameter[name[url]]]
return[call[name[Ticket], parameter[]]] | keyword[def] identifier[create_ticket] ( identifier[self] , identifier[subject] ,** identifier[kwargs] ):
literal[string]
identifier[url] = literal[string]
identifier[status] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[priority] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[data] ={
literal[string] : identifier[subject] ,
literal[string] : identifier[status] ,
literal[string] : identifier[priority] ,
}
identifier[data] . identifier[update] ( identifier[kwargs] )
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[ticket] = identifier[self] . identifier[_create_ticket_with_attachment] ( identifier[url] , identifier[data] )
keyword[return] identifier[Ticket] (** identifier[ticket] )
identifier[ticket] = identifier[self] . identifier[_api] . identifier[_post] ( identifier[url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ))
keyword[return] identifier[Ticket] (** identifier[ticket] ) | def create_ticket(self, subject, **kwargs):
"""
Creates a ticket
To create ticket with attachments,
pass a key 'attachments' with value as list of fully qualified file paths in string format.
ex: attachments = ('/path/to/attachment1', '/path/to/attachment2')
"""
url = 'tickets'
status = kwargs.get('status', 2)
priority = kwargs.get('priority', 1)
data = {'subject': subject, 'status': status, 'priority': priority}
data.update(kwargs)
if 'attachments' in data:
ticket = self._create_ticket_with_attachment(url, data)
return Ticket(**ticket) # depends on [control=['if'], data=['data']]
ticket = self._api._post(url, data=json.dumps(data))
return Ticket(**ticket) |
def ccmodmd_xstep(k):
"""Do the X step of the ccmod stage. The only parameter is the slice
index `k` and there are no return values; all inputs and outputs are
from and to global variables.
"""
YU0 = mp_D_Y0 - mp_D_U0[k]
YU1 = mp_D_Y1[k] + mp_S[k] - mp_D_U1[k]
b = sl.rfftn(YU0, None, mp_cri.axisN) + \
np.conj(mp_Zf[k]) * sl.rfftn(YU1, None, mp_cri.axisN)
Xf = sl.solvedbi_sm(mp_Zf[k], 1.0, b, axis=mp_cri.axisM)
mp_D_X[k] = sl.irfftn(Xf, mp_cri.Nv, mp_cri.axisN)
mp_DX[k] = sl.irfftn(sl.inner(Xf, mp_Zf[k]), mp_cri.Nv, mp_cri.axisN) | def function[ccmodmd_xstep, parameter[k]]:
constant[Do the X step of the ccmod stage. The only parameter is the slice
index `k` and there are no return values; all inputs and outputs are
from and to global variables.
]
variable[YU0] assign[=] binary_operation[name[mp_D_Y0] - call[name[mp_D_U0]][name[k]]]
variable[YU1] assign[=] binary_operation[binary_operation[call[name[mp_D_Y1]][name[k]] + call[name[mp_S]][name[k]]] - call[name[mp_D_U1]][name[k]]]
variable[b] assign[=] binary_operation[call[name[sl].rfftn, parameter[name[YU0], constant[None], name[mp_cri].axisN]] + binary_operation[call[name[np].conj, parameter[call[name[mp_Zf]][name[k]]]] * call[name[sl].rfftn, parameter[name[YU1], constant[None], name[mp_cri].axisN]]]]
variable[Xf] assign[=] call[name[sl].solvedbi_sm, parameter[call[name[mp_Zf]][name[k]], constant[1.0], name[b]]]
call[name[mp_D_X]][name[k]] assign[=] call[name[sl].irfftn, parameter[name[Xf], name[mp_cri].Nv, name[mp_cri].axisN]]
call[name[mp_DX]][name[k]] assign[=] call[name[sl].irfftn, parameter[call[name[sl].inner, parameter[name[Xf], call[name[mp_Zf]][name[k]]]], name[mp_cri].Nv, name[mp_cri].axisN]] | keyword[def] identifier[ccmodmd_xstep] ( identifier[k] ):
literal[string]
identifier[YU0] = identifier[mp_D_Y0] - identifier[mp_D_U0] [ identifier[k] ]
identifier[YU1] = identifier[mp_D_Y1] [ identifier[k] ]+ identifier[mp_S] [ identifier[k] ]- identifier[mp_D_U1] [ identifier[k] ]
identifier[b] = identifier[sl] . identifier[rfftn] ( identifier[YU0] , keyword[None] , identifier[mp_cri] . identifier[axisN] )+ identifier[np] . identifier[conj] ( identifier[mp_Zf] [ identifier[k] ])* identifier[sl] . identifier[rfftn] ( identifier[YU1] , keyword[None] , identifier[mp_cri] . identifier[axisN] )
identifier[Xf] = identifier[sl] . identifier[solvedbi_sm] ( identifier[mp_Zf] [ identifier[k] ], literal[int] , identifier[b] , identifier[axis] = identifier[mp_cri] . identifier[axisM] )
identifier[mp_D_X] [ identifier[k] ]= identifier[sl] . identifier[irfftn] ( identifier[Xf] , identifier[mp_cri] . identifier[Nv] , identifier[mp_cri] . identifier[axisN] )
identifier[mp_DX] [ identifier[k] ]= identifier[sl] . identifier[irfftn] ( identifier[sl] . identifier[inner] ( identifier[Xf] , identifier[mp_Zf] [ identifier[k] ]), identifier[mp_cri] . identifier[Nv] , identifier[mp_cri] . identifier[axisN] ) | def ccmodmd_xstep(k):
"""Do the X step of the ccmod stage. The only parameter is the slice
index `k` and there are no return values; all inputs and outputs are
from and to global variables.
"""
YU0 = mp_D_Y0 - mp_D_U0[k]
YU1 = mp_D_Y1[k] + mp_S[k] - mp_D_U1[k]
b = sl.rfftn(YU0, None, mp_cri.axisN) + np.conj(mp_Zf[k]) * sl.rfftn(YU1, None, mp_cri.axisN)
Xf = sl.solvedbi_sm(mp_Zf[k], 1.0, b, axis=mp_cri.axisM)
mp_D_X[k] = sl.irfftn(Xf, mp_cri.Nv, mp_cri.axisN)
mp_DX[k] = sl.irfftn(sl.inner(Xf, mp_Zf[k]), mp_cri.Nv, mp_cri.axisN) |
def ParseOptions(cls, options, output_module):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
output_module (XLSXOutputModule): output module to configure.
Raises:
BadConfigObject: when the output module object is of the wrong type.
BadConfigOption: when the output filename was not provided.
"""
if not isinstance(output_module, xlsx.XLSXOutputModule):
raise errors.BadConfigObject(
'Output module is not an instance of XLSXOutputModule')
fields = cls._ParseStringOption(
options, 'fields', default_value=cls._DEFAULT_FIELDS)
additional_fields = cls._ParseStringOption(options, 'additional_fields')
if additional_fields:
fields = '{0:s},{1:s}'.format(fields, additional_fields)
filename = getattr(options, 'write', None)
if not filename:
raise errors.BadConfigOption(
'Output filename was not provided use "-w filename" to specify.')
timestamp_format = cls._ParseStringOption(
options, 'timestamp_format',
default_value=cls._DEFAULT_TIMESTAMP_FORMAT)
output_module.SetFields([
field_name.strip() for field_name in fields.split(',')])
output_module.SetFilename(filename)
output_module.SetTimestampFormat(timestamp_format) | def function[ParseOptions, parameter[cls, options, output_module]]:
constant[Parses and validates options.
Args:
options (argparse.Namespace): parser options.
output_module (XLSXOutputModule): output module to configure.
Raises:
BadConfigObject: when the output module object is of the wrong type.
BadConfigOption: when the output filename was not provided.
]
if <ast.UnaryOp object at 0x7da18eb55cf0> begin[:]
<ast.Raise object at 0x7da18eb54ac0>
variable[fields] assign[=] call[name[cls]._ParseStringOption, parameter[name[options], constant[fields]]]
variable[additional_fields] assign[=] call[name[cls]._ParseStringOption, parameter[name[options], constant[additional_fields]]]
if name[additional_fields] begin[:]
variable[fields] assign[=] call[constant[{0:s},{1:s}].format, parameter[name[fields], name[additional_fields]]]
variable[filename] assign[=] call[name[getattr], parameter[name[options], constant[write], constant[None]]]
if <ast.UnaryOp object at 0x7da18eb57f40> begin[:]
<ast.Raise object at 0x7da18eb57130>
variable[timestamp_format] assign[=] call[name[cls]._ParseStringOption, parameter[name[options], constant[timestamp_format]]]
call[name[output_module].SetFields, parameter[<ast.ListComp object at 0x7da20c7942b0>]]
call[name[output_module].SetFilename, parameter[name[filename]]]
call[name[output_module].SetTimestampFormat, parameter[name[timestamp_format]]] | keyword[def] identifier[ParseOptions] ( identifier[cls] , identifier[options] , identifier[output_module] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[output_module] , identifier[xlsx] . identifier[XLSXOutputModule] ):
keyword[raise] identifier[errors] . identifier[BadConfigObject] (
literal[string] )
identifier[fields] = identifier[cls] . identifier[_ParseStringOption] (
identifier[options] , literal[string] , identifier[default_value] = identifier[cls] . identifier[_DEFAULT_FIELDS] )
identifier[additional_fields] = identifier[cls] . identifier[_ParseStringOption] ( identifier[options] , literal[string] )
keyword[if] identifier[additional_fields] :
identifier[fields] = literal[string] . identifier[format] ( identifier[fields] , identifier[additional_fields] )
identifier[filename] = identifier[getattr] ( identifier[options] , literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[filename] :
keyword[raise] identifier[errors] . identifier[BadConfigOption] (
literal[string] )
identifier[timestamp_format] = identifier[cls] . identifier[_ParseStringOption] (
identifier[options] , literal[string] ,
identifier[default_value] = identifier[cls] . identifier[_DEFAULT_TIMESTAMP_FORMAT] )
identifier[output_module] . identifier[SetFields] ([
identifier[field_name] . identifier[strip] () keyword[for] identifier[field_name] keyword[in] identifier[fields] . identifier[split] ( literal[string] )])
identifier[output_module] . identifier[SetFilename] ( identifier[filename] )
identifier[output_module] . identifier[SetTimestampFormat] ( identifier[timestamp_format] ) | def ParseOptions(cls, options, output_module):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
output_module (XLSXOutputModule): output module to configure.
Raises:
BadConfigObject: when the output module object is of the wrong type.
BadConfigOption: when the output filename was not provided.
"""
if not isinstance(output_module, xlsx.XLSXOutputModule):
raise errors.BadConfigObject('Output module is not an instance of XLSXOutputModule') # depends on [control=['if'], data=[]]
fields = cls._ParseStringOption(options, 'fields', default_value=cls._DEFAULT_FIELDS)
additional_fields = cls._ParseStringOption(options, 'additional_fields')
if additional_fields:
fields = '{0:s},{1:s}'.format(fields, additional_fields) # depends on [control=['if'], data=[]]
filename = getattr(options, 'write', None)
if not filename:
raise errors.BadConfigOption('Output filename was not provided use "-w filename" to specify.') # depends on [control=['if'], data=[]]
timestamp_format = cls._ParseStringOption(options, 'timestamp_format', default_value=cls._DEFAULT_TIMESTAMP_FORMAT)
output_module.SetFields([field_name.strip() for field_name in fields.split(',')])
output_module.SetFilename(filename)
output_module.SetTimestampFormat(timestamp_format) |
def add_filter(self, func, rules):
"""
为 BaseRoBot 添加一个 ``filter handler``。
:param func: 如果 rules 通过,则处理该消息的 handler。
:param rules: 一个 list,包含要匹配的字符串或者正则表达式。
:return: None
"""
if not callable(func):
raise ValueError("{} is not callable".format(func))
if not isinstance(rules, list):
raise ValueError("{} is not list".format(rules))
if len(rules) > 1:
for x in rules:
self.add_filter(func, [x])
else:
target_content = rules[0]
if isinstance(target_content, six.string_types):
target_content = to_text(target_content)
def _check_content(message):
return message.content == target_content
elif is_regex(target_content):
def _check_content(message):
return target_content.match(message.content)
else:
raise TypeError("%s is not a valid rule" % target_content)
argc = len(signature(func).parameters.keys())
@self.text
def _f(message, session=None):
_check_result = _check_content(message)
if _check_result:
if isinstance(_check_result, bool):
_check_result = None
return func(*[message, session, _check_result][:argc]) | def function[add_filter, parameter[self, func, rules]]:
constant[
为 BaseRoBot 添加一个 ``filter handler``。
:param func: 如果 rules 通过,则处理该消息的 handler。
:param rules: 一个 list,包含要匹配的字符串或者正则表达式。
:return: None
]
if <ast.UnaryOp object at 0x7da1b1c2ba00> begin[:]
<ast.Raise object at 0x7da1b1c2b070>
if <ast.UnaryOp object at 0x7da18dc9a680> begin[:]
<ast.Raise object at 0x7da18dc99090>
if compare[call[name[len], parameter[name[rules]]] greater[>] constant[1]] begin[:]
for taget[name[x]] in starred[name[rules]] begin[:]
call[name[self].add_filter, parameter[name[func], list[[<ast.Name object at 0x7da1b1c2a2c0>]]]] | keyword[def] identifier[add_filter] ( identifier[self] , identifier[func] , identifier[rules] ):
literal[string]
keyword[if] keyword[not] identifier[callable] ( identifier[func] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[func] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[rules] , identifier[list] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[rules] ))
keyword[if] identifier[len] ( identifier[rules] )> literal[int] :
keyword[for] identifier[x] keyword[in] identifier[rules] :
identifier[self] . identifier[add_filter] ( identifier[func] ,[ identifier[x] ])
keyword[else] :
identifier[target_content] = identifier[rules] [ literal[int] ]
keyword[if] identifier[isinstance] ( identifier[target_content] , identifier[six] . identifier[string_types] ):
identifier[target_content] = identifier[to_text] ( identifier[target_content] )
keyword[def] identifier[_check_content] ( identifier[message] ):
keyword[return] identifier[message] . identifier[content] == identifier[target_content]
keyword[elif] identifier[is_regex] ( identifier[target_content] ):
keyword[def] identifier[_check_content] ( identifier[message] ):
keyword[return] identifier[target_content] . identifier[match] ( identifier[message] . identifier[content] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[target_content] )
identifier[argc] = identifier[len] ( identifier[signature] ( identifier[func] ). identifier[parameters] . identifier[keys] ())
@ identifier[self] . identifier[text]
keyword[def] identifier[_f] ( identifier[message] , identifier[session] = keyword[None] ):
identifier[_check_result] = identifier[_check_content] ( identifier[message] )
keyword[if] identifier[_check_result] :
keyword[if] identifier[isinstance] ( identifier[_check_result] , identifier[bool] ):
identifier[_check_result] = keyword[None]
keyword[return] identifier[func] (*[ identifier[message] , identifier[session] , identifier[_check_result] ][: identifier[argc] ]) | def add_filter(self, func, rules):
"""
为 BaseRoBot 添加一个 ``filter handler``。
:param func: 如果 rules 通过,则处理该消息的 handler。
:param rules: 一个 list,包含要匹配的字符串或者正则表达式。
:return: None
"""
if not callable(func):
raise ValueError('{} is not callable'.format(func)) # depends on [control=['if'], data=[]]
if not isinstance(rules, list):
raise ValueError('{} is not list'.format(rules)) # depends on [control=['if'], data=[]]
if len(rules) > 1:
for x in rules:
self.add_filter(func, [x]) # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]]
else:
target_content = rules[0]
if isinstance(target_content, six.string_types):
target_content = to_text(target_content)
def _check_content(message):
return message.content == target_content # depends on [control=['if'], data=[]]
elif is_regex(target_content):
def _check_content(message):
return target_content.match(message.content) # depends on [control=['if'], data=[]]
else:
raise TypeError('%s is not a valid rule' % target_content)
argc = len(signature(func).parameters.keys())
@self.text
def _f(message, session=None):
_check_result = _check_content(message)
if _check_result:
if isinstance(_check_result, bool):
_check_result = None # depends on [control=['if'], data=[]]
return func(*[message, session, _check_result][:argc]) # depends on [control=['if'], data=[]] |
def render_templates_generator(*files, **template_map):
"""Render jinja templates according to template_map.
Yields (path, result)
"""
for path in files:
if not os.path.isfile(path):
raise ValueError("Template file %s not found"
% os.path.relpath(path))
else:
try:
with codecs.open(path, encoding='utf-8') as f:
text = f.read()
template = JINJA_ENV.from_string(text)
except jinja2.TemplateSyntaxError as err:
msg = ("Error rendering jinja2 template for file %s "
"on line %s. Error: %s"
% (path, err.lineno, err.message))
raise type(err)(
msg, err.lineno, filename=os.path.basename(path))
result = template.render(**template_map)
if not result.endswith('\n'):
result += '\n'
yield path, result | def function[render_templates_generator, parameter[]]:
constant[Render jinja templates according to template_map.
Yields (path, result)
]
for taget[name[path]] in starred[name[files]] begin[:]
if <ast.UnaryOp object at 0x7da20c6aa4d0> begin[:]
<ast.Raise object at 0x7da20c6aa650> | keyword[def] identifier[render_templates_generator] (* identifier[files] ,** identifier[template_map] ):
literal[string]
keyword[for] identifier[path] keyword[in] identifier[files] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[os] . identifier[path] . identifier[relpath] ( identifier[path] ))
keyword[else] :
keyword[try] :
keyword[with] identifier[codecs] . identifier[open] ( identifier[path] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] :
identifier[text] = identifier[f] . identifier[read] ()
identifier[template] = identifier[JINJA_ENV] . identifier[from_string] ( identifier[text] )
keyword[except] identifier[jinja2] . identifier[TemplateSyntaxError] keyword[as] identifier[err] :
identifier[msg] =( literal[string]
literal[string]
%( identifier[path] , identifier[err] . identifier[lineno] , identifier[err] . identifier[message] ))
keyword[raise] identifier[type] ( identifier[err] )(
identifier[msg] , identifier[err] . identifier[lineno] , identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ))
identifier[result] = identifier[template] . identifier[render] (** identifier[template_map] )
keyword[if] keyword[not] identifier[result] . identifier[endswith] ( literal[string] ):
identifier[result] += literal[string]
keyword[yield] identifier[path] , identifier[result] | def render_templates_generator(*files, **template_map):
"""Render jinja templates according to template_map.
Yields (path, result)
"""
for path in files:
if not os.path.isfile(path):
raise ValueError('Template file %s not found' % os.path.relpath(path)) # depends on [control=['if'], data=[]]
else:
try:
with codecs.open(path, encoding='utf-8') as f:
text = f.read() # depends on [control=['with'], data=['f']]
template = JINJA_ENV.from_string(text) # depends on [control=['try'], data=[]]
except jinja2.TemplateSyntaxError as err:
msg = 'Error rendering jinja2 template for file %s on line %s. Error: %s' % (path, err.lineno, err.message)
raise type(err)(msg, err.lineno, filename=os.path.basename(path)) # depends on [control=['except'], data=['err']]
result = template.render(**template_map)
if not result.endswith('\n'):
result += '\n' # depends on [control=['if'], data=[]]
yield (path, result) # depends on [control=['for'], data=['path']] |
def get_foldrate_at_temp(ref_rate, new_temp, ref_temp=37.0):
"""Scale the predicted kinetic folding rate of a protein to temperature T, based on the relationship ln(k_f)∝1/T
Args:
ref_rate (float): Kinetic folding rate calculated from the function :func:`~ssbio.protein.sequence.properties.kinetic_folding_rate.get_foldrate`
new_temp (float): Temperature in degrees C
ref_temp (float): Reference temperature, default to 37 C
Returns:
float: Kinetic folding rate k_f at temperature T
"""
# Not much data available on this slope value, however its effect on growth rate in a model is very small
slope = 22000
# Get folding rate for the reference temperature
preFactor = float(ref_rate) + slope / (float(ref_temp) + 273.15)
# Calculate folding rate at desired temperature
rate = math.exp(preFactor - slope / (float(new_temp) + 273.15))
return rate | def function[get_foldrate_at_temp, parameter[ref_rate, new_temp, ref_temp]]:
constant[Scale the predicted kinetic folding rate of a protein to temperature T, based on the relationship ln(k_f)∝1/T
Args:
ref_rate (float): Kinetic folding rate calculated from the function :func:`~ssbio.protein.sequence.properties.kinetic_folding_rate.get_foldrate`
new_temp (float): Temperature in degrees C
ref_temp (float): Reference temperature, default to 37 C
Returns:
float: Kinetic folding rate k_f at temperature T
]
variable[slope] assign[=] constant[22000]
variable[preFactor] assign[=] binary_operation[call[name[float], parameter[name[ref_rate]]] + binary_operation[name[slope] / binary_operation[call[name[float], parameter[name[ref_temp]]] + constant[273.15]]]]
variable[rate] assign[=] call[name[math].exp, parameter[binary_operation[name[preFactor] - binary_operation[name[slope] / binary_operation[call[name[float], parameter[name[new_temp]]] + constant[273.15]]]]]]
return[name[rate]] | keyword[def] identifier[get_foldrate_at_temp] ( identifier[ref_rate] , identifier[new_temp] , identifier[ref_temp] = literal[int] ):
literal[string]
identifier[slope] = literal[int]
identifier[preFactor] = identifier[float] ( identifier[ref_rate] )+ identifier[slope] /( identifier[float] ( identifier[ref_temp] )+ literal[int] )
identifier[rate] = identifier[math] . identifier[exp] ( identifier[preFactor] - identifier[slope] /( identifier[float] ( identifier[new_temp] )+ literal[int] ))
keyword[return] identifier[rate] | def get_foldrate_at_temp(ref_rate, new_temp, ref_temp=37.0):
"""Scale the predicted kinetic folding rate of a protein to temperature T, based on the relationship ln(k_f)∝1/T
Args:
ref_rate (float): Kinetic folding rate calculated from the function :func:`~ssbio.protein.sequence.properties.kinetic_folding_rate.get_foldrate`
new_temp (float): Temperature in degrees C
ref_temp (float): Reference temperature, default to 37 C
Returns:
float: Kinetic folding rate k_f at temperature T
"""
# Not much data available on this slope value, however its effect on growth rate in a model is very small
slope = 22000
# Get folding rate for the reference temperature
preFactor = float(ref_rate) + slope / (float(ref_temp) + 273.15)
# Calculate folding rate at desired temperature
rate = math.exp(preFactor - slope / (float(new_temp) + 273.15))
return rate |
def triples_to_graph(self, triples, top=None):
"""
Create a Graph from *triples* considering codec configuration.
The Graph class does not know about information in the codec,
so if Graph instantiation depends on special `TYPE_REL` or
`TOP_VAR` values, use this function instead of instantiating
a Graph object directly. This is also where edge
normalization (de-inversion) and value type conversion occur
(via handle_triple()).
Args:
triples: an iterable of (lhs, relation, rhs) triples
top: node identifier of the top node
Returns:
a Graph object
"""
inferred_top = triples[0][0] if triples else None
ts = []
for triple in triples:
if triple[0] == self.TOP_VAR and triple[1] == self.TOP_REL:
inferred_top = triple[2]
else:
ts.append(self.handle_triple(*triple))
top = self.handle_triple(self.TOP_VAR, self.TOP_REL, top).target
return Graph(ts, top=top or inferred_top) | def function[triples_to_graph, parameter[self, triples, top]]:
constant[
Create a Graph from *triples* considering codec configuration.
The Graph class does not know about information in the codec,
so if Graph instantiation depends on special `TYPE_REL` or
`TOP_VAR` values, use this function instead of instantiating
a Graph object directly. This is also where edge
normalization (de-inversion) and value type conversion occur
(via handle_triple()).
Args:
triples: an iterable of (lhs, relation, rhs) triples
top: node identifier of the top node
Returns:
a Graph object
]
variable[inferred_top] assign[=] <ast.IfExp object at 0x7da18dc057e0>
variable[ts] assign[=] list[[]]
for taget[name[triple]] in starred[name[triples]] begin[:]
if <ast.BoolOp object at 0x7da18dc06b60> begin[:]
variable[inferred_top] assign[=] call[name[triple]][constant[2]]
variable[top] assign[=] call[name[self].handle_triple, parameter[name[self].TOP_VAR, name[self].TOP_REL, name[top]]].target
return[call[name[Graph], parameter[name[ts]]]] | keyword[def] identifier[triples_to_graph] ( identifier[self] , identifier[triples] , identifier[top] = keyword[None] ):
literal[string]
identifier[inferred_top] = identifier[triples] [ literal[int] ][ literal[int] ] keyword[if] identifier[triples] keyword[else] keyword[None]
identifier[ts] =[]
keyword[for] identifier[triple] keyword[in] identifier[triples] :
keyword[if] identifier[triple] [ literal[int] ]== identifier[self] . identifier[TOP_VAR] keyword[and] identifier[triple] [ literal[int] ]== identifier[self] . identifier[TOP_REL] :
identifier[inferred_top] = identifier[triple] [ literal[int] ]
keyword[else] :
identifier[ts] . identifier[append] ( identifier[self] . identifier[handle_triple] (* identifier[triple] ))
identifier[top] = identifier[self] . identifier[handle_triple] ( identifier[self] . identifier[TOP_VAR] , identifier[self] . identifier[TOP_REL] , identifier[top] ). identifier[target]
keyword[return] identifier[Graph] ( identifier[ts] , identifier[top] = identifier[top] keyword[or] identifier[inferred_top] ) | def triples_to_graph(self, triples, top=None):
"""
Create a Graph from *triples* considering codec configuration.
The Graph class does not know about information in the codec,
so if Graph instantiation depends on special `TYPE_REL` or
`TOP_VAR` values, use this function instead of instantiating
a Graph object directly. This is also where edge
normalization (de-inversion) and value type conversion occur
(via handle_triple()).
Args:
triples: an iterable of (lhs, relation, rhs) triples
top: node identifier of the top node
Returns:
a Graph object
"""
inferred_top = triples[0][0] if triples else None
ts = []
for triple in triples:
if triple[0] == self.TOP_VAR and triple[1] == self.TOP_REL:
inferred_top = triple[2] # depends on [control=['if'], data=[]]
else:
ts.append(self.handle_triple(*triple)) # depends on [control=['for'], data=['triple']]
top = self.handle_triple(self.TOP_VAR, self.TOP_REL, top).target
return Graph(ts, top=top or inferred_top) |
def rm(device, minor): # pylint: disable=C0103
'''
Removes the partition with number <minor>.
CLI Example:
.. code-block:: bash
salt '*' partition.rm /dev/sda 5
'''
_validate_device(device)
try:
int(minor)
except Exception:
raise CommandExecutionError(
'Invalid minor number passed to partition.rm'
)
cmd = 'parted -m -s {0} rm {1}'.format(device, minor)
out = __salt__['cmd.run'](cmd).splitlines()
return out | def function[rm, parameter[device, minor]]:
constant[
Removes the partition with number <minor>.
CLI Example:
.. code-block:: bash
salt '*' partition.rm /dev/sda 5
]
call[name[_validate_device], parameter[name[device]]]
<ast.Try object at 0x7da18f812e00>
variable[cmd] assign[=] call[constant[parted -m -s {0} rm {1}].format, parameter[name[device], name[minor]]]
variable[out] assign[=] call[call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]].splitlines, parameter[]]
return[name[out]] | keyword[def] identifier[rm] ( identifier[device] , identifier[minor] ):
literal[string]
identifier[_validate_device] ( identifier[device] )
keyword[try] :
identifier[int] ( identifier[minor] )
keyword[except] identifier[Exception] :
keyword[raise] identifier[CommandExecutionError] (
literal[string]
)
identifier[cmd] = literal[string] . identifier[format] ( identifier[device] , identifier[minor] )
identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] ). identifier[splitlines] ()
keyword[return] identifier[out] | def rm(device, minor): # pylint: disable=C0103
"\n Removes the partition with number <minor>.\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' partition.rm /dev/sda 5\n "
_validate_device(device)
try:
int(minor) # depends on [control=['try'], data=[]]
except Exception:
raise CommandExecutionError('Invalid minor number passed to partition.rm') # depends on [control=['except'], data=[]]
cmd = 'parted -m -s {0} rm {1}'.format(device, minor)
out = __salt__['cmd.run'](cmd).splitlines()
return out |
def find_and_replace_userids(self, text):
'''Finds occurrences of Slack userids and attempts to replace them with
display names.
Args:
text (string): The message text
Returns:
string: The message text with userids replaced.
'''
match = True
pattern = re.compile('<@([A-Z0-9]{9})>')
while match:
match = pattern.search(text)
if match:
name = self.get_user_display_name(match.group(1))
text = re.sub(re.compile(match.group(0)), '@' + name, text)
return text | def function[find_and_replace_userids, parameter[self, text]]:
constant[Finds occurrences of Slack userids and attempts to replace them with
display names.
Args:
text (string): The message text
Returns:
string: The message text with userids replaced.
]
variable[match] assign[=] constant[True]
variable[pattern] assign[=] call[name[re].compile, parameter[constant[<@([A-Z0-9]{9})>]]]
while name[match] begin[:]
variable[match] assign[=] call[name[pattern].search, parameter[name[text]]]
if name[match] begin[:]
variable[name] assign[=] call[name[self].get_user_display_name, parameter[call[name[match].group, parameter[constant[1]]]]]
variable[text] assign[=] call[name[re].sub, parameter[call[name[re].compile, parameter[call[name[match].group, parameter[constant[0]]]]], binary_operation[constant[@] + name[name]], name[text]]]
return[name[text]] | keyword[def] identifier[find_and_replace_userids] ( identifier[self] , identifier[text] ):
literal[string]
identifier[match] = keyword[True]
identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] )
keyword[while] identifier[match] :
identifier[match] = identifier[pattern] . identifier[search] ( identifier[text] )
keyword[if] identifier[match] :
identifier[name] = identifier[self] . identifier[get_user_display_name] ( identifier[match] . identifier[group] ( literal[int] ))
identifier[text] = identifier[re] . identifier[sub] ( identifier[re] . identifier[compile] ( identifier[match] . identifier[group] ( literal[int] )), literal[string] + identifier[name] , identifier[text] )
keyword[return] identifier[text] | def find_and_replace_userids(self, text):
"""Finds occurrences of Slack userids and attempts to replace them with
display names.
Args:
text (string): The message text
Returns:
string: The message text with userids replaced.
"""
match = True
pattern = re.compile('<@([A-Z0-9]{9})>')
while match:
match = pattern.search(text)
if match:
name = self.get_user_display_name(match.group(1))
text = re.sub(re.compile(match.group(0)), '@' + name, text) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return text |
def delete_unused(self, keyword_ids=None):
"""
Removes all instances that are not assigned to any object. Limits
processing to ``keyword_ids`` if given.
"""
if keyword_ids is None:
keywords = self.all()
else:
keywords = self.filter(id__in=keyword_ids)
keywords.filter(assignments__isnull=True).delete() | def function[delete_unused, parameter[self, keyword_ids]]:
constant[
Removes all instances that are not assigned to any object. Limits
processing to ``keyword_ids`` if given.
]
if compare[name[keyword_ids] is constant[None]] begin[:]
variable[keywords] assign[=] call[name[self].all, parameter[]]
call[call[name[keywords].filter, parameter[]].delete, parameter[]] | keyword[def] identifier[delete_unused] ( identifier[self] , identifier[keyword_ids] = keyword[None] ):
literal[string]
keyword[if] identifier[keyword_ids] keyword[is] keyword[None] :
identifier[keywords] = identifier[self] . identifier[all] ()
keyword[else] :
identifier[keywords] = identifier[self] . identifier[filter] ( identifier[id__in] = identifier[keyword_ids] )
identifier[keywords] . identifier[filter] ( identifier[assignments__isnull] = keyword[True] ). identifier[delete] () | def delete_unused(self, keyword_ids=None):
"""
Removes all instances that are not assigned to any object. Limits
processing to ``keyword_ids`` if given.
"""
if keyword_ids is None:
keywords = self.all() # depends on [control=['if'], data=[]]
else:
keywords = self.filter(id__in=keyword_ids)
keywords.filter(assignments__isnull=True).delete() |
def extend_children(self, parent, wanted_children, child_class, child_glossary=None):
"""
Extend the number of children so that the parent object contains wanted children.
No child will be removed if wanted_children is smaller than the current number of children.
"""
from cms.api import add_plugin
current_children = parent.get_num_children()
for _ in range(current_children, wanted_children):
child = add_plugin(parent.placeholder, child_class, parent.language, target=parent)
if isinstance(child_glossary, dict):
child.glossary.update(child_glossary)
child.save() | def function[extend_children, parameter[self, parent, wanted_children, child_class, child_glossary]]:
constant[
Extend the number of children so that the parent object contains wanted children.
No child will be removed if wanted_children is smaller than the current number of children.
]
from relative_module[cms.api] import module[add_plugin]
variable[current_children] assign[=] call[name[parent].get_num_children, parameter[]]
for taget[name[_]] in starred[call[name[range], parameter[name[current_children], name[wanted_children]]]] begin[:]
variable[child] assign[=] call[name[add_plugin], parameter[name[parent].placeholder, name[child_class], name[parent].language]]
if call[name[isinstance], parameter[name[child_glossary], name[dict]]] begin[:]
call[name[child].glossary.update, parameter[name[child_glossary]]]
call[name[child].save, parameter[]] | keyword[def] identifier[extend_children] ( identifier[self] , identifier[parent] , identifier[wanted_children] , identifier[child_class] , identifier[child_glossary] = keyword[None] ):
literal[string]
keyword[from] identifier[cms] . identifier[api] keyword[import] identifier[add_plugin]
identifier[current_children] = identifier[parent] . identifier[get_num_children] ()
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[current_children] , identifier[wanted_children] ):
identifier[child] = identifier[add_plugin] ( identifier[parent] . identifier[placeholder] , identifier[child_class] , identifier[parent] . identifier[language] , identifier[target] = identifier[parent] )
keyword[if] identifier[isinstance] ( identifier[child_glossary] , identifier[dict] ):
identifier[child] . identifier[glossary] . identifier[update] ( identifier[child_glossary] )
identifier[child] . identifier[save] () | def extend_children(self, parent, wanted_children, child_class, child_glossary=None):
"""
Extend the number of children so that the parent object contains wanted children.
No child will be removed if wanted_children is smaller than the current number of children.
"""
from cms.api import add_plugin
current_children = parent.get_num_children()
for _ in range(current_children, wanted_children):
child = add_plugin(parent.placeholder, child_class, parent.language, target=parent)
if isinstance(child_glossary, dict):
child.glossary.update(child_glossary) # depends on [control=['if'], data=[]]
child.save() # depends on [control=['for'], data=[]] |
def get_netconf_client_capabilities_output_session_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_netconf_client_capabilities = ET.Element("get_netconf_client_capabilities")
config = get_netconf_client_capabilities
output = ET.SubElement(get_netconf_client_capabilities, "output")
session = ET.SubElement(output, "session")
time = ET.SubElement(session, "time")
time.text = kwargs.pop('time')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_netconf_client_capabilities_output_session_time, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_netconf_client_capabilities] assign[=] call[name[ET].Element, parameter[constant[get_netconf_client_capabilities]]]
variable[config] assign[=] name[get_netconf_client_capabilities]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_netconf_client_capabilities], constant[output]]]
variable[session] assign[=] call[name[ET].SubElement, parameter[name[output], constant[session]]]
variable[time] assign[=] call[name[ET].SubElement, parameter[name[session], constant[time]]]
name[time].text assign[=] call[name[kwargs].pop, parameter[constant[time]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_netconf_client_capabilities_output_session_time] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_netconf_client_capabilities] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_netconf_client_capabilities]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_netconf_client_capabilities] , literal[string] )
identifier[session] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[time] = identifier[ET] . identifier[SubElement] ( identifier[session] , literal[string] )
identifier[time] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_netconf_client_capabilities_output_session_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_netconf_client_capabilities = ET.Element('get_netconf_client_capabilities')
config = get_netconf_client_capabilities
output = ET.SubElement(get_netconf_client_capabilities, 'output')
session = ET.SubElement(output, 'session')
time = ET.SubElement(session, 'time')
time.text = kwargs.pop('time')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def curvature_matrix_from_blurred_mapping_matrix_jit(blurred_mapping_matrix, noise_map_1d, flist, iflist):
"""Compute the curvature matrix *F* from a blurred mapping matrix *f* and the 1D noise-map *\sigma* \
(see Warren & Dye 2003).
Parameters
-----------
blurred_mapping_matrix : ndarray
The matrix representing the blurred mappings between sub-grid pixels and pixelization pixels.
noise_map_1d : ndarray
Flattened 1D array of the noise-map used by the inversion during the fit.
flist : ndarray
NumPy array of floats used to store mappings for efficienctly calculation.
iflist : ndarray
NumPy array of integers used to store mappings for efficienctly calculation.
"""
curvature_matrix = np.zeros((blurred_mapping_matrix.shape[1], blurred_mapping_matrix.shape[1]))
for image_index in range(blurred_mapping_matrix.shape[0]):
index = 0
for pixel_index in range(blurred_mapping_matrix.shape[1]):
if blurred_mapping_matrix[image_index, pixel_index] > 0.0:
flist[index] = blurred_mapping_matrix[image_index, pixel_index] / noise_map_1d[image_index]
iflist[index] = pixel_index
index += 1
if index > 0:
for i1 in range(index):
for j1 in range(index):
ix = iflist[i1]
iy = iflist[j1]
curvature_matrix[ix, iy] += flist[i1] * flist[j1]
for i in range(blurred_mapping_matrix.shape[1]):
for j in range(blurred_mapping_matrix.shape[1]):
curvature_matrix[i, j] = curvature_matrix[j, i]
return curvature_matrix | def function[curvature_matrix_from_blurred_mapping_matrix_jit, parameter[blurred_mapping_matrix, noise_map_1d, flist, iflist]]:
constant[Compute the curvature matrix *F* from a blurred mapping matrix *f* and the 1D noise-map *\sigma* (see Warren & Dye 2003).
Parameters
-----------
blurred_mapping_matrix : ndarray
The matrix representing the blurred mappings between sub-grid pixels and pixelization pixels.
noise_map_1d : ndarray
Flattened 1D array of the noise-map used by the inversion during the fit.
flist : ndarray
NumPy array of floats used to store mappings for efficienctly calculation.
iflist : ndarray
NumPy array of integers used to store mappings for efficienctly calculation.
]
variable[curvature_matrix] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da20c76ed10>, <ast.Subscript object at 0x7da20c76e2c0>]]]]
for taget[name[image_index]] in starred[call[name[range], parameter[call[name[blurred_mapping_matrix].shape][constant[0]]]]] begin[:]
variable[index] assign[=] constant[0]
for taget[name[pixel_index]] in starred[call[name[range], parameter[call[name[blurred_mapping_matrix].shape][constant[1]]]]] begin[:]
if compare[call[name[blurred_mapping_matrix]][tuple[[<ast.Name object at 0x7da20c76caf0>, <ast.Name object at 0x7da20c76e0e0>]]] greater[>] constant[0.0]] begin[:]
call[name[flist]][name[index]] assign[=] binary_operation[call[name[blurred_mapping_matrix]][tuple[[<ast.Name object at 0x7da20c76ed70>, <ast.Name object at 0x7da20c76cca0>]]] / call[name[noise_map_1d]][name[image_index]]]
call[name[iflist]][name[index]] assign[=] name[pixel_index]
<ast.AugAssign object at 0x7da20c76ca30>
if compare[name[index] greater[>] constant[0]] begin[:]
for taget[name[i1]] in starred[call[name[range], parameter[name[index]]]] begin[:]
for taget[name[j1]] in starred[call[name[range], parameter[name[index]]]] begin[:]
variable[ix] assign[=] call[name[iflist]][name[i1]]
variable[iy] assign[=] call[name[iflist]][name[j1]]
<ast.AugAssign object at 0x7da20c76f4f0>
for taget[name[i]] in starred[call[name[range], parameter[call[name[blurred_mapping_matrix].shape][constant[1]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[call[name[blurred_mapping_matrix].shape][constant[1]]]]] begin[:]
call[name[curvature_matrix]][tuple[[<ast.Name object at 0x7da20c76da20>, <ast.Name object at 0x7da20c76cdc0>]]] assign[=] call[name[curvature_matrix]][tuple[[<ast.Name object at 0x7da20c76d420>, <ast.Name object at 0x7da20c76f520>]]]
return[name[curvature_matrix]] | keyword[def] identifier[curvature_matrix_from_blurred_mapping_matrix_jit] ( identifier[blurred_mapping_matrix] , identifier[noise_map_1d] , identifier[flist] , identifier[iflist] ):
literal[string]
identifier[curvature_matrix] = identifier[np] . identifier[zeros] (( identifier[blurred_mapping_matrix] . identifier[shape] [ literal[int] ], identifier[blurred_mapping_matrix] . identifier[shape] [ literal[int] ]))
keyword[for] identifier[image_index] keyword[in] identifier[range] ( identifier[blurred_mapping_matrix] . identifier[shape] [ literal[int] ]):
identifier[index] = literal[int]
keyword[for] identifier[pixel_index] keyword[in] identifier[range] ( identifier[blurred_mapping_matrix] . identifier[shape] [ literal[int] ]):
keyword[if] identifier[blurred_mapping_matrix] [ identifier[image_index] , identifier[pixel_index] ]> literal[int] :
identifier[flist] [ identifier[index] ]= identifier[blurred_mapping_matrix] [ identifier[image_index] , identifier[pixel_index] ]/ identifier[noise_map_1d] [ identifier[image_index] ]
identifier[iflist] [ identifier[index] ]= identifier[pixel_index]
identifier[index] += literal[int]
keyword[if] identifier[index] > literal[int] :
keyword[for] identifier[i1] keyword[in] identifier[range] ( identifier[index] ):
keyword[for] identifier[j1] keyword[in] identifier[range] ( identifier[index] ):
identifier[ix] = identifier[iflist] [ identifier[i1] ]
identifier[iy] = identifier[iflist] [ identifier[j1] ]
identifier[curvature_matrix] [ identifier[ix] , identifier[iy] ]+= identifier[flist] [ identifier[i1] ]* identifier[flist] [ identifier[j1] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[blurred_mapping_matrix] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[blurred_mapping_matrix] . identifier[shape] [ literal[int] ]):
identifier[curvature_matrix] [ identifier[i] , identifier[j] ]= identifier[curvature_matrix] [ identifier[j] , identifier[i] ]
keyword[return] identifier[curvature_matrix] | def curvature_matrix_from_blurred_mapping_matrix_jit(blurred_mapping_matrix, noise_map_1d, flist, iflist):
"""Compute the curvature matrix *F* from a blurred mapping matrix *f* and the 1D noise-map *\\sigma* (see Warren & Dye 2003).
Parameters
-----------
blurred_mapping_matrix : ndarray
The matrix representing the blurred mappings between sub-grid pixels and pixelization pixels.
noise_map_1d : ndarray
Flattened 1D array of the noise-map used by the inversion during the fit.
flist : ndarray
NumPy array of floats used to store mappings for efficienctly calculation.
iflist : ndarray
NumPy array of integers used to store mappings for efficienctly calculation.
"""
curvature_matrix = np.zeros((blurred_mapping_matrix.shape[1], blurred_mapping_matrix.shape[1]))
for image_index in range(blurred_mapping_matrix.shape[0]):
index = 0
for pixel_index in range(blurred_mapping_matrix.shape[1]):
if blurred_mapping_matrix[image_index, pixel_index] > 0.0:
flist[index] = blurred_mapping_matrix[image_index, pixel_index] / noise_map_1d[image_index]
iflist[index] = pixel_index
index += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pixel_index']]
if index > 0:
for i1 in range(index):
for j1 in range(index):
ix = iflist[i1]
iy = iflist[j1]
curvature_matrix[ix, iy] += flist[i1] * flist[j1] # depends on [control=['for'], data=['j1']] # depends on [control=['for'], data=['i1']] # depends on [control=['if'], data=['index']] # depends on [control=['for'], data=['image_index']]
for i in range(blurred_mapping_matrix.shape[1]):
for j in range(blurred_mapping_matrix.shape[1]):
curvature_matrix[i, j] = curvature_matrix[j, i] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
return curvature_matrix |
def upsert_document(self, document, database_name=None, collection_name=None, document_id=None):
"""
Inserts a new document (or updates an existing one) into an existing
collection in the CosmosDB database.
"""
# Assign unique ID if one isn't provided
if document_id is None:
document_id = str(uuid.uuid4())
if document is None:
raise AirflowBadRequest("You cannot insert a None document")
# Add document id if isn't found
if 'id' in document:
if document['id'] is None:
document['id'] = document_id
else:
document['id'] = document_id
created_document = self.get_conn().CreateItem(
get_collection_link(
self.__get_database_name(database_name),
self.__get_collection_name(collection_name)),
document)
return created_document | def function[upsert_document, parameter[self, document, database_name, collection_name, document_id]]:
constant[
Inserts a new document (or updates an existing one) into an existing
collection in the CosmosDB database.
]
if compare[name[document_id] is constant[None]] begin[:]
variable[document_id] assign[=] call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]]
if compare[name[document] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0595330>
if compare[constant[id] in name[document]] begin[:]
if compare[call[name[document]][constant[id]] is constant[None]] begin[:]
call[name[document]][constant[id]] assign[=] name[document_id]
variable[created_document] assign[=] call[call[name[self].get_conn, parameter[]].CreateItem, parameter[call[name[get_collection_link], parameter[call[name[self].__get_database_name, parameter[name[database_name]]], call[name[self].__get_collection_name, parameter[name[collection_name]]]]], name[document]]]
return[name[created_document]] | keyword[def] identifier[upsert_document] ( identifier[self] , identifier[document] , identifier[database_name] = keyword[None] , identifier[collection_name] = keyword[None] , identifier[document_id] = keyword[None] ):
literal[string]
keyword[if] identifier[document_id] keyword[is] keyword[None] :
identifier[document_id] = identifier[str] ( identifier[uuid] . identifier[uuid4] ())
keyword[if] identifier[document] keyword[is] keyword[None] :
keyword[raise] identifier[AirflowBadRequest] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[document] :
keyword[if] identifier[document] [ literal[string] ] keyword[is] keyword[None] :
identifier[document] [ literal[string] ]= identifier[document_id]
keyword[else] :
identifier[document] [ literal[string] ]= identifier[document_id]
identifier[created_document] = identifier[self] . identifier[get_conn] (). identifier[CreateItem] (
identifier[get_collection_link] (
identifier[self] . identifier[__get_database_name] ( identifier[database_name] ),
identifier[self] . identifier[__get_collection_name] ( identifier[collection_name] )),
identifier[document] )
keyword[return] identifier[created_document] | def upsert_document(self, document, database_name=None, collection_name=None, document_id=None):
"""
Inserts a new document (or updates an existing one) into an existing
collection in the CosmosDB database.
"""
# Assign unique ID if one isn't provided
if document_id is None:
document_id = str(uuid.uuid4()) # depends on [control=['if'], data=['document_id']]
if document is None:
raise AirflowBadRequest('You cannot insert a None document') # depends on [control=['if'], data=[]]
# Add document id if isn't found
if 'id' in document:
if document['id'] is None:
document['id'] = document_id # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['document']]
else:
document['id'] = document_id
created_document = self.get_conn().CreateItem(get_collection_link(self.__get_database_name(database_name), self.__get_collection_name(collection_name)), document)
return created_document |
def session(self):
"""A context manager for this client's session.
This function closes the current session when this client goes out of
scope.
"""
self._session = requests.session()
yield
self._session.close()
self._session = None | def function[session, parameter[self]]:
constant[A context manager for this client's session.
This function closes the current session when this client goes out of
scope.
]
name[self]._session assign[=] call[name[requests].session, parameter[]]
<ast.Yield object at 0x7da18f721420>
call[name[self]._session.close, parameter[]]
name[self]._session assign[=] constant[None] | keyword[def] identifier[session] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_session] = identifier[requests] . identifier[session] ()
keyword[yield]
identifier[self] . identifier[_session] . identifier[close] ()
identifier[self] . identifier[_session] = keyword[None] | def session(self):
"""A context manager for this client's session.
This function closes the current session when this client goes out of
scope.
"""
self._session = requests.session()
yield
self._session.close()
self._session = None |
def _CompareFunction(self, control: 'Control', depth: int) -> bool:
"""
Define how to search.
control: `Control` or its subclass.
depth: int, tree depth from searchFromControl.
Return bool.
"""
for key, value in self.searchProperties.items():
if 'ControlType' == key:
if value != control.ControlType:
return False
elif 'ClassName' == key:
if value != control.ClassName:
return False
elif 'AutomationId' == key:
if value != control.AutomationId:
return False
elif 'Name' == key:
if value != control.Name:
return False
elif 'SubName' == key:
if value not in control.Name:
return False
elif 'RegexName' == key:
if not self.regexName.match(control.Name):
return False
elif 'Depth' == key:
if value != depth:
return False
elif 'Compare' == key:
if not value(control, depth):
return False
return True | def function[_CompareFunction, parameter[self, control, depth]]:
constant[
Define how to search.
control: `Control` or its subclass.
depth: int, tree depth from searchFromControl.
Return bool.
]
for taget[tuple[[<ast.Name object at 0x7da20c6c71c0>, <ast.Name object at 0x7da20c6c7ca0>]]] in starred[call[name[self].searchProperties.items, parameter[]]] begin[:]
if compare[constant[ControlType] equal[==] name[key]] begin[:]
if compare[name[value] not_equal[!=] name[control].ControlType] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[_CompareFunction] ( identifier[self] , identifier[control] : literal[string] , identifier[depth] : identifier[int] )-> identifier[bool] :
literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[searchProperties] . identifier[items] ():
keyword[if] literal[string] == identifier[key] :
keyword[if] identifier[value] != identifier[control] . identifier[ControlType] :
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] identifier[value] != identifier[control] . identifier[ClassName] :
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] identifier[value] != identifier[control] . identifier[AutomationId] :
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] identifier[value] != identifier[control] . identifier[Name] :
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] identifier[value] keyword[not] keyword[in] identifier[control] . identifier[Name] :
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] keyword[not] identifier[self] . identifier[regexName] . identifier[match] ( identifier[control] . identifier[Name] ):
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] identifier[value] != identifier[depth] :
keyword[return] keyword[False]
keyword[elif] literal[string] == identifier[key] :
keyword[if] keyword[not] identifier[value] ( identifier[control] , identifier[depth] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def _CompareFunction(self, control: 'Control', depth: int) -> bool:
"""
Define how to search.
control: `Control` or its subclass.
depth: int, tree depth from searchFromControl.
Return bool.
"""
for (key, value) in self.searchProperties.items():
if 'ControlType' == key:
if value != control.ControlType:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'ClassName' == key:
if value != control.ClassName:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'AutomationId' == key:
if value != control.AutomationId:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'Name' == key:
if value != control.Name:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'SubName' == key:
if value not in control.Name:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'RegexName' == key:
if not self.regexName.match(control.Name):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'Depth' == key:
if value != depth:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'Compare' == key:
if not value(control, depth):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return True |
def _entity_list_as_bel(entities: Iterable[BaseEntity]) -> str:
"""Stringify a list of BEL entities."""
return ', '.join(
e.as_bel()
for e in entities
) | def function[_entity_list_as_bel, parameter[entities]]:
constant[Stringify a list of BEL entities.]
return[call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da1b0cb22f0>]]] | keyword[def] identifier[_entity_list_as_bel] ( identifier[entities] : identifier[Iterable] [ identifier[BaseEntity] ])-> identifier[str] :
literal[string]
keyword[return] literal[string] . identifier[join] (
identifier[e] . identifier[as_bel] ()
keyword[for] identifier[e] keyword[in] identifier[entities]
) | def _entity_list_as_bel(entities: Iterable[BaseEntity]) -> str:
"""Stringify a list of BEL entities."""
return ', '.join((e.as_bel() for e in entities)) |
def handle_var(value, context):
"""
Handle template tag variable
"""
# Resolve FilterExpression and Variable immediately
if isinstance(value, FilterExpression) or isinstance(value, Variable):
return value.resolve(context)
# Return quoted strings unquoted
# http://djangosnippets.org/snippets/886
stringval = QUOTED_STRING.search(value)
if stringval:
return stringval.group("noquotes")
# Resolve variable or return string value
try:
return Variable(value).resolve(context)
except VariableDoesNotExist:
return value | def function[handle_var, parameter[value, context]]:
constant[
Handle template tag variable
]
if <ast.BoolOp object at 0x7da1b22630d0> begin[:]
return[call[name[value].resolve, parameter[name[context]]]]
variable[stringval] assign[=] call[name[QUOTED_STRING].search, parameter[name[value]]]
if name[stringval] begin[:]
return[call[name[stringval].group, parameter[constant[noquotes]]]]
<ast.Try object at 0x7da1b2260f40> | keyword[def] identifier[handle_var] ( identifier[value] , identifier[context] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[FilterExpression] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[Variable] ):
keyword[return] identifier[value] . identifier[resolve] ( identifier[context] )
identifier[stringval] = identifier[QUOTED_STRING] . identifier[search] ( identifier[value] )
keyword[if] identifier[stringval] :
keyword[return] identifier[stringval] . identifier[group] ( literal[string] )
keyword[try] :
keyword[return] identifier[Variable] ( identifier[value] ). identifier[resolve] ( identifier[context] )
keyword[except] identifier[VariableDoesNotExist] :
keyword[return] identifier[value] | def handle_var(value, context):
"""
Handle template tag variable
"""
# Resolve FilterExpression and Variable immediately
if isinstance(value, FilterExpression) or isinstance(value, Variable):
return value.resolve(context) # depends on [control=['if'], data=[]]
# Return quoted strings unquoted
# http://djangosnippets.org/snippets/886
stringval = QUOTED_STRING.search(value)
if stringval:
return stringval.group('noquotes') # depends on [control=['if'], data=[]]
# Resolve variable or return string value
try:
return Variable(value).resolve(context) # depends on [control=['try'], data=[]]
except VariableDoesNotExist:
return value # depends on [control=['except'], data=[]] |
def isValue(self):
"""Indicate that |ASN.1| object represents ASN.1 value.
If *isValue* is `False` then this object represents just ASN.1 schema.
If *isValue* is `True` then, in addition to its ASN.1 schema features,
this object can also be used like a Python built-in object (e.g. `int`,
`str`, `dict` etc.).
Returns
-------
: :class:`bool`
:class:`False` if object represents just ASN.1 schema.
:class:`True` if object represents ASN.1 schema and can be used as a normal value.
Note
----
There is an important distinction between PyASN1 schema and value objects.
The PyASN1 schema objects can only participate in ASN.1 schema-related
operations (e.g. defining or testing the structure of the data). Most
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
encoding/decoding serialised ASN.1 contents.
The PyASN1 value objects can **additionally** participate in many operations
involving regular Python objects (e.g. arithmetic, comprehension etc).
"""
if self._currentIdx is None:
return False
componentValue = self._componentValues[self._currentIdx]
return componentValue is not noValue and componentValue.isValue | def function[isValue, parameter[self]]:
constant[Indicate that |ASN.1| object represents ASN.1 value.
If *isValue* is `False` then this object represents just ASN.1 schema.
If *isValue* is `True` then, in addition to its ASN.1 schema features,
this object can also be used like a Python built-in object (e.g. `int`,
`str`, `dict` etc.).
Returns
-------
: :class:`bool`
:class:`False` if object represents just ASN.1 schema.
:class:`True` if object represents ASN.1 schema and can be used as a normal value.
Note
----
There is an important distinction between PyASN1 schema and value objects.
The PyASN1 schema objects can only participate in ASN.1 schema-related
operations (e.g. defining or testing the structure of the data). Most
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
encoding/decoding serialised ASN.1 contents.
The PyASN1 value objects can **additionally** participate in many operations
involving regular Python objects (e.g. arithmetic, comprehension etc).
]
if compare[name[self]._currentIdx is constant[None]] begin[:]
return[constant[False]]
variable[componentValue] assign[=] call[name[self]._componentValues][name[self]._currentIdx]
return[<ast.BoolOp object at 0x7da204566b30>] | keyword[def] identifier[isValue] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_currentIdx] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[componentValue] = identifier[self] . identifier[_componentValues] [ identifier[self] . identifier[_currentIdx] ]
keyword[return] identifier[componentValue] keyword[is] keyword[not] identifier[noValue] keyword[and] identifier[componentValue] . identifier[isValue] | def isValue(self):
"""Indicate that |ASN.1| object represents ASN.1 value.
If *isValue* is `False` then this object represents just ASN.1 schema.
If *isValue* is `True` then, in addition to its ASN.1 schema features,
this object can also be used like a Python built-in object (e.g. `int`,
`str`, `dict` etc.).
Returns
-------
: :class:`bool`
:class:`False` if object represents just ASN.1 schema.
:class:`True` if object represents ASN.1 schema and can be used as a normal value.
Note
----
There is an important distinction between PyASN1 schema and value objects.
The PyASN1 schema objects can only participate in ASN.1 schema-related
operations (e.g. defining or testing the structure of the data). Most
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
encoding/decoding serialised ASN.1 contents.
The PyASN1 value objects can **additionally** participate in many operations
involving regular Python objects (e.g. arithmetic, comprehension etc).
"""
if self._currentIdx is None:
return False # depends on [control=['if'], data=[]]
componentValue = self._componentValues[self._currentIdx]
return componentValue is not noValue and componentValue.isValue |
def write_file(self, file_name, vasp4_compatible=False):
"""
Write the VolumetricData object to a vasp compatible file.
Args:
file_name (str): Path to a file
vasp4_compatible (bool): True if the format is vasp4 compatible
"""
def _print_fortran_float(f):
"""
Fortran codes print floats with a leading zero in scientific
notation. When writing CHGCAR files, we adopt this convention
to ensure written CHGCAR files are byte-to-byte identical to
their input files as far as possible.
:param f: float
:return: str
"""
s = "{:.10E}".format(f)
if f >= 0:
return "0." + s[0] + s[2:12] + 'E' + "{:+03}".format(int(s[13:]) + 1)
else:
return "-." + s[1] + s[3:13] + 'E' + "{:+03}".format(int(s[14:]) + 1)
with zopen(file_name, "wt") as f:
p = Poscar(self.structure)
# use original name if it's been set (e.g. from Chgcar)
comment = getattr(self, 'name', p.comment)
lines = comment + "\n"
lines += " 1.00000000000000\n"
latt = self.structure.lattice.matrix
lines += " %12.6f%12.6f%12.6f\n" % tuple(latt[0, :])
lines += " %12.6f%12.6f%12.6f\n" % tuple(latt[1, :])
lines += " %12.6f%12.6f%12.6f\n" % tuple(latt[2, :])
if not vasp4_compatible:
lines += "".join(["%5s" % s for s in p.site_symbols]) + "\n"
lines += "".join(["%6d" % x for x in p.natoms]) + "\n"
lines += "Direct\n"
for site in self.structure:
lines += "%10.6f%10.6f%10.6f\n" % tuple(site.frac_coords)
lines += " \n"
f.write(lines)
a = self.dim
def write_spin(data_type):
lines = []
count = 0
f.write(" {} {} {}\n".format(a[0], a[1], a[2]))
for (k, j, i) in itertools.product(list(range(a[2])),
list(range(a[1])),
list(range(a[0]))):
lines.append(_print_fortran_float(self.data[data_type][i, j, k]))
count += 1
if count % 5 == 0:
f.write(" " + "".join(lines) + "\n")
lines = []
else:
lines.append(" ")
f.write(" " + "".join(lines) + " \n")
f.write("".join(self.data_aug.get(data_type, [])))
write_spin("total")
if self.is_spin_polarized and self.is_soc:
write_spin("diff_x")
write_spin("diff_y")
write_spin("diff_z")
elif self.is_spin_polarized:
write_spin("diff") | def function[write_file, parameter[self, file_name, vasp4_compatible]]:
constant[
Write the VolumetricData object to a vasp compatible file.
Args:
file_name (str): Path to a file
vasp4_compatible (bool): True if the format is vasp4 compatible
]
def function[_print_fortran_float, parameter[f]]:
constant[
Fortran codes print floats with a leading zero in scientific
notation. When writing CHGCAR files, we adopt this convention
to ensure written CHGCAR files are byte-to-byte identical to
their input files as far as possible.
:param f: float
:return: str
]
variable[s] assign[=] call[constant[{:.10E}].format, parameter[name[f]]]
if compare[name[f] greater_or_equal[>=] constant[0]] begin[:]
return[binary_operation[binary_operation[binary_operation[binary_operation[constant[0.] + call[name[s]][constant[0]]] + call[name[s]][<ast.Slice object at 0x7da1b26ad960>]] + constant[E]] + call[constant[{:+03}].format, parameter[binary_operation[call[name[int], parameter[call[name[s]][<ast.Slice object at 0x7da1b26ae6b0>]]] + constant[1]]]]]]
with call[name[zopen], parameter[name[file_name], constant[wt]]] begin[:]
variable[p] assign[=] call[name[Poscar], parameter[name[self].structure]]
variable[comment] assign[=] call[name[getattr], parameter[name[self], constant[name], name[p].comment]]
variable[lines] assign[=] binary_operation[name[comment] + constant[
]]
<ast.AugAssign object at 0x7da1b26afc40>
variable[latt] assign[=] name[self].structure.lattice.matrix
<ast.AugAssign object at 0x7da1b26ac580>
<ast.AugAssign object at 0x7da1b26ad540>
<ast.AugAssign object at 0x7da1b26ad150>
if <ast.UnaryOp object at 0x7da1b26afbe0> begin[:]
<ast.AugAssign object at 0x7da1b26ae6e0>
<ast.AugAssign object at 0x7da1b26adc60>
<ast.AugAssign object at 0x7da1b26afdc0>
for taget[name[site]] in starred[name[self].structure] begin[:]
<ast.AugAssign object at 0x7da1b26af7f0>
<ast.AugAssign object at 0x7da1b26acc70>
call[name[f].write, parameter[name[lines]]]
variable[a] assign[=] name[self].dim
def function[write_spin, parameter[data_type]]:
variable[lines] assign[=] list[[]]
variable[count] assign[=] constant[0]
call[name[f].write, parameter[call[constant[ {} {} {}
].format, parameter[call[name[a]][constant[0]], call[name[a]][constant[1]], call[name[a]][constant[2]]]]]]
for taget[tuple[[<ast.Name object at 0x7da18f810070>, <ast.Name object at 0x7da18f810b20>, <ast.Name object at 0x7da18f811bd0>]]] in starred[call[name[itertools].product, parameter[call[name[list], parameter[call[name[range], parameter[call[name[a]][constant[2]]]]]], call[name[list], parameter[call[name[range], parameter[call[name[a]][constant[1]]]]]], call[name[list], parameter[call[name[range], parameter[call[name[a]][constant[0]]]]]]]]] begin[:]
call[name[lines].append, parameter[call[name[_print_fortran_float], parameter[call[call[name[self].data][name[data_type]]][tuple[[<ast.Name object at 0x7da18f811c90>, <ast.Name object at 0x7da18f813310>, <ast.Name object at 0x7da18f8119f0>]]]]]]]
<ast.AugAssign object at 0x7da18f812b00>
if compare[binary_operation[name[count] <ast.Mod object at 0x7da2590d6920> constant[5]] equal[==] constant[0]] begin[:]
call[name[f].write, parameter[binary_operation[binary_operation[constant[ ] + call[constant[].join, parameter[name[lines]]]] + constant[
]]]]
variable[lines] assign[=] list[[]]
call[name[f].write, parameter[binary_operation[binary_operation[constant[ ] + call[constant[].join, parameter[name[lines]]]] + constant[
]]]]
call[name[f].write, parameter[call[constant[].join, parameter[call[name[self].data_aug.get, parameter[name[data_type], list[[]]]]]]]]
call[name[write_spin], parameter[constant[total]]]
if <ast.BoolOp object at 0x7da18bc72d40> begin[:]
call[name[write_spin], parameter[constant[diff_x]]]
call[name[write_spin], parameter[constant[diff_y]]]
call[name[write_spin], parameter[constant[diff_z]]] | keyword[def] identifier[write_file] ( identifier[self] , identifier[file_name] , identifier[vasp4_compatible] = keyword[False] ):
literal[string]
keyword[def] identifier[_print_fortran_float] ( identifier[f] ):
literal[string]
identifier[s] = literal[string] . identifier[format] ( identifier[f] )
keyword[if] identifier[f] >= literal[int] :
keyword[return] literal[string] + identifier[s] [ literal[int] ]+ identifier[s] [ literal[int] : literal[int] ]+ literal[string] + literal[string] . identifier[format] ( identifier[int] ( identifier[s] [ literal[int] :])+ literal[int] )
keyword[else] :
keyword[return] literal[string] + identifier[s] [ literal[int] ]+ identifier[s] [ literal[int] : literal[int] ]+ literal[string] + literal[string] . identifier[format] ( identifier[int] ( identifier[s] [ literal[int] :])+ literal[int] )
keyword[with] identifier[zopen] ( identifier[file_name] , literal[string] ) keyword[as] identifier[f] :
identifier[p] = identifier[Poscar] ( identifier[self] . identifier[structure] )
identifier[comment] = identifier[getattr] ( identifier[self] , literal[string] , identifier[p] . identifier[comment] )
identifier[lines] = identifier[comment] + literal[string]
identifier[lines] += literal[string]
identifier[latt] = identifier[self] . identifier[structure] . identifier[lattice] . identifier[matrix]
identifier[lines] += literal[string] % identifier[tuple] ( identifier[latt] [ literal[int] ,:])
identifier[lines] += literal[string] % identifier[tuple] ( identifier[latt] [ literal[int] ,:])
identifier[lines] += literal[string] % identifier[tuple] ( identifier[latt] [ literal[int] ,:])
keyword[if] keyword[not] identifier[vasp4_compatible] :
identifier[lines] += literal[string] . identifier[join] ([ literal[string] % identifier[s] keyword[for] identifier[s] keyword[in] identifier[p] . identifier[site_symbols] ])+ literal[string]
identifier[lines] += literal[string] . identifier[join] ([ literal[string] % identifier[x] keyword[for] identifier[x] keyword[in] identifier[p] . identifier[natoms] ])+ literal[string]
identifier[lines] += literal[string]
keyword[for] identifier[site] keyword[in] identifier[self] . identifier[structure] :
identifier[lines] += literal[string] % identifier[tuple] ( identifier[site] . identifier[frac_coords] )
identifier[lines] += literal[string]
identifier[f] . identifier[write] ( identifier[lines] )
identifier[a] = identifier[self] . identifier[dim]
keyword[def] identifier[write_spin] ( identifier[data_type] ):
identifier[lines] =[]
identifier[count] = literal[int]
identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[a] [ literal[int] ], identifier[a] [ literal[int] ], identifier[a] [ literal[int] ]))
keyword[for] ( identifier[k] , identifier[j] , identifier[i] ) keyword[in] identifier[itertools] . identifier[product] ( identifier[list] ( identifier[range] ( identifier[a] [ literal[int] ])),
identifier[list] ( identifier[range] ( identifier[a] [ literal[int] ])),
identifier[list] ( identifier[range] ( identifier[a] [ literal[int] ]))):
identifier[lines] . identifier[append] ( identifier[_print_fortran_float] ( identifier[self] . identifier[data] [ identifier[data_type] ][ identifier[i] , identifier[j] , identifier[k] ]))
identifier[count] += literal[int]
keyword[if] identifier[count] % literal[int] == literal[int] :
identifier[f] . identifier[write] ( literal[string] + literal[string] . identifier[join] ( identifier[lines] )+ literal[string] )
identifier[lines] =[]
keyword[else] :
identifier[lines] . identifier[append] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] + literal[string] . identifier[join] ( identifier[lines] )+ literal[string] )
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[self] . identifier[data_aug] . identifier[get] ( identifier[data_type] ,[])))
identifier[write_spin] ( literal[string] )
keyword[if] identifier[self] . identifier[is_spin_polarized] keyword[and] identifier[self] . identifier[is_soc] :
identifier[write_spin] ( literal[string] )
identifier[write_spin] ( literal[string] )
identifier[write_spin] ( literal[string] )
keyword[elif] identifier[self] . identifier[is_spin_polarized] :
identifier[write_spin] ( literal[string] ) | def write_file(self, file_name, vasp4_compatible=False):
"""
Write the VolumetricData object to a vasp compatible file.
Args:
file_name (str): Path to a file
vasp4_compatible (bool): True if the format is vasp4 compatible
"""
def _print_fortran_float(f):
"""
Fortran codes print floats with a leading zero in scientific
notation. When writing CHGCAR files, we adopt this convention
to ensure written CHGCAR files are byte-to-byte identical to
their input files as far as possible.
:param f: float
:return: str
"""
s = '{:.10E}'.format(f)
if f >= 0:
return '0.' + s[0] + s[2:12] + 'E' + '{:+03}'.format(int(s[13:]) + 1) # depends on [control=['if'], data=[]]
else:
return '-.' + s[1] + s[3:13] + 'E' + '{:+03}'.format(int(s[14:]) + 1)
with zopen(file_name, 'wt') as f:
p = Poscar(self.structure)
# use original name if it's been set (e.g. from Chgcar)
comment = getattr(self, 'name', p.comment)
lines = comment + '\n'
lines += ' 1.00000000000000\n'
latt = self.structure.lattice.matrix
lines += ' %12.6f%12.6f%12.6f\n' % tuple(latt[0, :])
lines += ' %12.6f%12.6f%12.6f\n' % tuple(latt[1, :])
lines += ' %12.6f%12.6f%12.6f\n' % tuple(latt[2, :])
if not vasp4_compatible:
lines += ''.join(['%5s' % s for s in p.site_symbols]) + '\n' # depends on [control=['if'], data=[]]
lines += ''.join(['%6d' % x for x in p.natoms]) + '\n'
lines += 'Direct\n'
for site in self.structure:
lines += '%10.6f%10.6f%10.6f\n' % tuple(site.frac_coords) # depends on [control=['for'], data=['site']]
lines += ' \n'
f.write(lines)
a = self.dim
def write_spin(data_type):
lines = []
count = 0
f.write(' {} {} {}\n'.format(a[0], a[1], a[2]))
for (k, j, i) in itertools.product(list(range(a[2])), list(range(a[1])), list(range(a[0]))):
lines.append(_print_fortran_float(self.data[data_type][i, j, k]))
count += 1
if count % 5 == 0:
f.write(' ' + ''.join(lines) + '\n')
lines = [] # depends on [control=['if'], data=[]]
else:
lines.append(' ') # depends on [control=['for'], data=[]]
f.write(' ' + ''.join(lines) + ' \n')
f.write(''.join(self.data_aug.get(data_type, [])))
write_spin('total')
if self.is_spin_polarized and self.is_soc:
write_spin('diff_x')
write_spin('diff_y')
write_spin('diff_z') # depends on [control=['if'], data=[]]
elif self.is_spin_polarized:
write_spin('diff') # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] |
def main(argv: list) -> int:
"""Passphrase CLI interface."""
passphrase = Passphrase()
# Set defaults
passphrase.entropy_bits_req = ENTROPY_BITS_MIN
passwordlen_default = passphrase.password_length_needed()
amount_n_default = 0
passphrase.amount_n = amount_n_default
# To avoid loading the wordlist unnecessarily, I'm hardcoding this value
# It's ok, it's only used to show help information
amount_w_default = 6
parser = ArgumentParser(
formatter_class=RawDescriptionHelpFormatter,
description='{version_string}\n\n'
'Generates a cryptographically secure passphrase, based on '
'a wordlist, or a\npassword, and prints it to standard output.\n'
'By default, it uses an embedded EFF Large Wordlist for passphrases.\n'
'Passphrases with less than {wordsamountmin} words are considered '
'insecure. A safe bet is \nbetween {wordsamountmin} and 7 words, '
'plus at least a number.\n'
'For passwords, use at least {passwdmin} characters, but prefer '
'{passwdpref} or more, using the\ncomplete characters set.\n\n'
'Instead of words and numbers, a password (random string of '
'printable\ncharacters from Python String standard) can be generated '
'by\n-p | --password, specifying the length. It uses uppercase, '
'lowercase, digits\nand punctuation characters unless otherwise '
'specified.\n'
'Also, a UUID v4 string can be generated by --uuid4 or a coin can be '
'thrown\nwith --coin.\n'
'A custom wordlist can be specified by -i | --input, the format must '
'be: \nsingle column, one word per line. If -d | --diceware is used, '
'the input\nfile is treated as a diceware wordlist (two columns).'
'\nOptionally, -o | --output can be used to specify an output file '
'(existing \nfile is overwritten).\n'
'The number of words is {wordsamountmin} by default, but it '
'can be changed by -w | --words.\n'
'The number of numbers is {numsamountmin} by default, but it can be '
'changed by\n-n | --numbers. The generated numbers are between '
'{minnum} and {maxnum}.\n'
'The default separator is a blank space, but any character or '
'character\nsequence can be specified by -s | --separator.\n'
'\nExample output:\n'
'\tDefault parameters:\tchalice sheath postcard modular cider size\n'
'\tWords=3, Numbers=2:\tdepraved widow office 184022 320264\n'
'\tPassword, 20 chars:\tsF#s@B+iR#ZIL-yUWKPR'.format(
version_string=__version_string__,
minnum=passphrase.randnum_min,
maxnum=passphrase.randnum_max,
wordsamountmin=amount_w_default,
numsamountmin=amount_n_default,
passwdmin=passwordlen_default,
passwdpref=passwordlen_default + 4
)
)
parser.add_argument(
'--version',
action='store_true',
help='print program version and licensing information and exit'
)
parser.add_argument(
'--insecure',
action='store_true',
default=False,
help="force password/passphrase generation even if the system's "
"entropy is too low"
)
parser.add_argument(
'--no-newline',
action='store_true',
default=False,
help="don't print newline at the end of the passphrase/password"
)
parser.add_argument(
'-m',
'--mute',
action='store_true',
default=False,
help="muted mode: it won't print output, only informational, warning "
"or error messages (usefull with -o | --output)"
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
default=False,
help='print additional information (can coexist with -m | --mute)'
)
parser.add_argument(
'-e',
'--entropybits',
type=_bigger_than_zero,
default=ENTROPY_BITS_MIN,
help='specify the number of bits to use for entropy calculations '
'(defaults to {})'.format(ENTROPY_BITS_MIN)
)
parser.add_argument(
'--uuid4',
action='store_true',
default=False,
help='generate an UUID v4 string'
)
parser.add_argument(
'--coin',
action='store_true',
default=False,
help='generate a random coin throw: heads or tails'
)
parser.add_argument(
'-p',
'--password',
type=_bigger_than_zero,
const=-1,
nargs='?',
help='generate a password of the specified length from all printable '
'or selected characters'
)
parser.add_argument(
'--use-uppercase',
type=_bigger_than_zero,
const=0,
nargs='?',
help='use uppercase characters for password generation or give the '
'amount of uppercase characters in the passphrase: zero or no '
'input for all uppercase or any number of uppercase '
'characters wanted (the rest are lowercase)'
)
parser.add_argument(
'--use-lowercase',
type=_bigger_than_zero,
const=0,
nargs='?',
help='use lowercase characters for password generation or give the '
'amount of lowercase characters in the passphrase: zero or no '
'input for all lowercase (default) or any number of lowercase '
'characters wanted (the rest are uppercase)'
)
parser.add_argument(
'--use-digits',
action='store_true',
default=False,
help='use digits for password generation'
)
parser.add_argument(
'--use-alphanumeric',
action='store_true',
default=False,
help='use lowercase and uppercase characters, and digits for password '
'generation (equivalent to --use-lowercase --use-uppercase '
'--use-digits)'
)
parser.add_argument(
'--use-punctuation',
action='store_true',
default=False,
help='use punctuation characters for password generation'
)
parser.add_argument(
'-w',
'--words',
type=_bigger_than_zero,
help='specify the amount of words (0 or more)'
)
parser.add_argument(
'-n',
'--numbers',
type=_bigger_than_zero,
default=amount_n_default,
help='specify the amount of numbers (0 or more)'
)
parser.add_argument(
'-s',
'--separator',
type=str,
default=' ',
help='specify a separator character (space by default)'
)
parser.add_argument(
'-o',
'--output',
type=str,
help='specify an output file (existing file is overwritten)'
)
parser.add_argument(
'-i',
'--input',
type=str,
help='specify an input file (it must have the following format: '
'single column, one word per line)'
)
parser.add_argument(
'-d',
'--diceware',
action='store_true',
default=False,
help='specify input file as a diceware list (format: two colums)'
)
args = parser.parse_args(argv)
inputfile = args.input
outputfile = args.output
separator = args.separator
is_diceware = args.diceware
passwordlen = args.password
amount_w = args.words
amount_n = args.numbers
show_version = args.version
mute = args.mute
verbose = args.verbose
no_newline = args.no_newline
gen_uuid4 = args.uuid4
gen_coin = args.coin
p_uppercase = args.use_uppercase
p_lowercase = args.use_lowercase
p_digits = args.use_digits
p_punctuation = args.use_punctuation
p_alphanumeric = args.use_alphanumeric
entropy_bits = args.entropybits
gen_insecure = args.insecure
if show_version:
print(__version_string__)
return 0
if verbose:
Aux.print_stderr(__version_string__)
# Check system entropy
system_entropy = Aux.system_entropy()
if system_entropy < SYSTEM_ENTROPY_BITS_MIN:
Aux.print_stderr(
'Warning: the system has too few entropy: {} bits; randomness '
'quality could be poor'.format(system_entropy)
)
if not gen_insecure:
Aux.print_stderr(
'Error: system entropy too low: {system_entropy} '
'< {system_entropy_min}'.format(
system_entropy=system_entropy,
system_entropy_min=SYSTEM_ENTROPY_BITS_MIN
)
)
return 1
if verbose:
Aux.print_stderr(
'Using {} bits of entropy for calculations (if any). The minimum '
'recommended is {}'.format(entropy_bits, ENTROPY_BITS_MIN)
)
# Check selected entropy
check_chosen_entropy = False if gen_uuid4 or gen_coin else not (
amount_n and amount_w and passwordlen is None
)
if check_chosen_entropy and entropy_bits < ENTROPY_BITS_MIN:
Aux.print_stderr(
'Warning: insecure number of bits for entropy calculations '
'chosen! Should be bigger than {}'.format(ENTROPY_BITS_MIN)
)
passphrase.entropy_bits_req = entropy_bits
# Generate whatever is requested
if gen_uuid4:
# Generate uuid4
if verbose:
Aux.print_stderr('Generating UUID v4')
gen_what = 'UUID v4'
gen_ent = 120
passphrase.generate_uuid4()
passphrase.separator = '-'
elif gen_coin:
# Generate a coin throw
if verbose:
Aux.print_stderr('Throwing a coin')
gen_what = 'coin'
gen_ent = 1
passphrase = 'Heads' if randbool() else 'Tails'
elif passwordlen is not None:
# Generate a password
gen_what = 'password'
p_uppercase = True if p_uppercase is not None else False
p_lowercase = True if p_lowercase is not None else False
if (
p_uppercase
or p_lowercase
or p_digits
or p_punctuation
or p_alphanumeric
):
passphrase.password_use_uppercase = (p_uppercase or p_alphanumeric)
passphrase.password_use_lowercase = (p_lowercase or p_alphanumeric)
passphrase.password_use_digits = (p_digits or p_alphanumeric)
passphrase.password_use_punctuation = p_punctuation
min_len = passphrase.password_length_needed()
if passwordlen < 1:
passwordlen = min_len
elif passwordlen < min_len:
Aux.print_stderr(
'Warning: insecure password length chosen! Should be bigger '
'than or equal to {}'.format(min_len)
)
passphrase.passwordlen = passwordlen
gen_ent = passphrase.generated_password_entropy()
if verbose:
verbose_string = (
'Generating password of {} characters long '
'using '.format(passwordlen)
)
verbose_string += (
'uppercase characters, ' if (
passphrase.password_use_uppercase or p_alphanumeric
) else ''
)
verbose_string += (
'lowercase characters, ' if (
passphrase.password_use_lowercase or p_alphanumeric
) else ''
)
verbose_string += (
'digits, ' if (
passphrase.password_use_digits or p_alphanumeric
) else ''
)
verbose_string += (
'punctuation characters, ' if (
passphrase.password_use_punctuation
) else ''
)
Aux.print_stderr(
verbose_string[:-2] if (
verbose_string[-2:] == ', '
) else verbose_string
)
passphrase.generate_password()
passphrase.separator = ''
else:
# Generate a passphrase
gen_what = 'passphrase'
# Read wordlist if indicated
if inputfile is None:
passphrase.load_internal_wordlist()
else:
try:
passphrase.import_words_from_file(inputfile, is_diceware)
except IOError:
Aux.print_stderr(
"Error: input file {} is empty or it can't be opened or "
"read".format(inputfile)
)
return 1
passphrase.amount_n = amount_n
amount_w_good = passphrase.words_amount_needed()
if amount_w is None:
amount_w = amount_w_good
elif amount_w < amount_w_good:
Aux.print_stderr(
'Warning: insecure amount of words chosen! Should be '
'bigger than or equal to {}'.format(amount_w_good)
)
passphrase.amount_w = amount_w
gen_ent = passphrase.generated_passphrase_entropy()
if verbose:
Aux.print_stderr(
'Generating a passphrase of {} words and {} '
'numbers using {}'.format(
amount_w,
amount_n,
'internal wordlist' if inputfile is None else (
'external wordlist: ' + inputfile + (
' (diceware-like)' if is_diceware else ''
)
)
)
)
case = (-1 * p_lowercase) if p_lowercase else p_uppercase
passphrase.generate(case)
passphrase.separator = separator
if verbose:
Aux.print_stderr(
'The entropy of this {what} is {ent:.2f} bits'.format(
what=gen_what,
ent=gen_ent
)
)
if not gen_coin and gen_ent < ENTROPY_BITS_MIN:
Aux.print_stderr('Warning: the {} is too short!'.format(gen_what))
if not mute:
if no_newline:
print(passphrase, end='')
else:
print(passphrase)
if outputfile is not None:
# ensure path to file exists or create
dir_ = os_path_dirname(outputfile)
if dir_:
try:
os_makedirs(dir_, exist_ok=True)
except PermissionError:
Aux.print_stderr(
'Error: permission denied to create directory {}'.format(
dir_,
)
)
return 1
try:
with open(outputfile, mode='wt', encoding='utf-8') as outfile:
linefeed = '' if no_newline else '\n'
outfile.write(str(passphrase) + linefeed)
except IOError:
Aux.print_stderr(
"Error: file {} can't be opened or written".format(
outputfile,
)
)
return 1
return 0 | def function[main, parameter[argv]]:
constant[Passphrase CLI interface.]
variable[passphrase] assign[=] call[name[Passphrase], parameter[]]
name[passphrase].entropy_bits_req assign[=] name[ENTROPY_BITS_MIN]
variable[passwordlen_default] assign[=] call[name[passphrase].password_length_needed, parameter[]]
variable[amount_n_default] assign[=] constant[0]
name[passphrase].amount_n assign[=] name[amount_n_default]
variable[amount_w_default] assign[=] constant[6]
variable[parser] assign[=] call[name[ArgumentParser], parameter[]]
call[name[parser].add_argument, parameter[constant[--version]]]
call[name[parser].add_argument, parameter[constant[--insecure]]]
call[name[parser].add_argument, parameter[constant[--no-newline]]]
call[name[parser].add_argument, parameter[constant[-m], constant[--mute]]]
call[name[parser].add_argument, parameter[constant[-v], constant[--verbose]]]
call[name[parser].add_argument, parameter[constant[-e], constant[--entropybits]]]
call[name[parser].add_argument, parameter[constant[--uuid4]]]
call[name[parser].add_argument, parameter[constant[--coin]]]
call[name[parser].add_argument, parameter[constant[-p], constant[--password]]]
call[name[parser].add_argument, parameter[constant[--use-uppercase]]]
call[name[parser].add_argument, parameter[constant[--use-lowercase]]]
call[name[parser].add_argument, parameter[constant[--use-digits]]]
call[name[parser].add_argument, parameter[constant[--use-alphanumeric]]]
call[name[parser].add_argument, parameter[constant[--use-punctuation]]]
call[name[parser].add_argument, parameter[constant[-w], constant[--words]]]
call[name[parser].add_argument, parameter[constant[-n], constant[--numbers]]]
call[name[parser].add_argument, parameter[constant[-s], constant[--separator]]]
call[name[parser].add_argument, parameter[constant[-o], constant[--output]]]
call[name[parser].add_argument, parameter[constant[-i], constant[--input]]]
call[name[parser].add_argument, parameter[constant[-d], constant[--diceware]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[argv]]]
variable[inputfile] assign[=] name[args].input
variable[outputfile] assign[=] name[args].output
variable[separator] assign[=] name[args].separator
variable[is_diceware] assign[=] name[args].diceware
variable[passwordlen] assign[=] name[args].password
variable[amount_w] assign[=] name[args].words
variable[amount_n] assign[=] name[args].numbers
variable[show_version] assign[=] name[args].version
variable[mute] assign[=] name[args].mute
variable[verbose] assign[=] name[args].verbose
variable[no_newline] assign[=] name[args].no_newline
variable[gen_uuid4] assign[=] name[args].uuid4
variable[gen_coin] assign[=] name[args].coin
variable[p_uppercase] assign[=] name[args].use_uppercase
variable[p_lowercase] assign[=] name[args].use_lowercase
variable[p_digits] assign[=] name[args].use_digits
variable[p_punctuation] assign[=] name[args].use_punctuation
variable[p_alphanumeric] assign[=] name[args].use_alphanumeric
variable[entropy_bits] assign[=] name[args].entropybits
variable[gen_insecure] assign[=] name[args].insecure
if name[show_version] begin[:]
call[name[print], parameter[name[__version_string__]]]
return[constant[0]]
if name[verbose] begin[:]
call[name[Aux].print_stderr, parameter[name[__version_string__]]]
variable[system_entropy] assign[=] call[name[Aux].system_entropy, parameter[]]
if compare[name[system_entropy] less[<] name[SYSTEM_ENTROPY_BITS_MIN]] begin[:]
call[name[Aux].print_stderr, parameter[call[constant[Warning: the system has too few entropy: {} bits; randomness quality could be poor].format, parameter[name[system_entropy]]]]]
if <ast.UnaryOp object at 0x7da20c991c90> begin[:]
call[name[Aux].print_stderr, parameter[call[constant[Error: system entropy too low: {system_entropy} < {system_entropy_min}].format, parameter[]]]]
return[constant[1]]
if name[verbose] begin[:]
call[name[Aux].print_stderr, parameter[call[constant[Using {} bits of entropy for calculations (if any). The minimum recommended is {}].format, parameter[name[entropy_bits], name[ENTROPY_BITS_MIN]]]]]
variable[check_chosen_entropy] assign[=] <ast.IfExp object at 0x7da20c991a80>
if <ast.BoolOp object at 0x7da20c992860> begin[:]
call[name[Aux].print_stderr, parameter[call[constant[Warning: insecure number of bits for entropy calculations chosen! Should be bigger than {}].format, parameter[name[ENTROPY_BITS_MIN]]]]]
name[passphrase].entropy_bits_req assign[=] name[entropy_bits]
if name[gen_uuid4] begin[:]
if name[verbose] begin[:]
call[name[Aux].print_stderr, parameter[constant[Generating UUID v4]]]
variable[gen_what] assign[=] constant[UUID v4]
variable[gen_ent] assign[=] constant[120]
call[name[passphrase].generate_uuid4, parameter[]]
name[passphrase].separator assign[=] constant[-]
if name[verbose] begin[:]
call[name[Aux].print_stderr, parameter[call[constant[The entropy of this {what} is {ent:.2f} bits].format, parameter[]]]]
if <ast.BoolOp object at 0x7da2041daec0> begin[:]
call[name[Aux].print_stderr, parameter[call[constant[Warning: the {} is too short!].format, parameter[name[gen_what]]]]]
if <ast.UnaryOp object at 0x7da2041dbeb0> begin[:]
if name[no_newline] begin[:]
call[name[print], parameter[name[passphrase]]]
if compare[name[outputfile] is_not constant[None]] begin[:]
variable[dir_] assign[=] call[name[os_path_dirname], parameter[name[outputfile]]]
if name[dir_] begin[:]
<ast.Try object at 0x7da2041d88e0>
<ast.Try object at 0x7da2041d9d50>
return[constant[0]] | keyword[def] identifier[main] ( identifier[argv] : identifier[list] )-> identifier[int] :
literal[string]
identifier[passphrase] = identifier[Passphrase] ()
identifier[passphrase] . identifier[entropy_bits_req] = identifier[ENTROPY_BITS_MIN]
identifier[passwordlen_default] = identifier[passphrase] . identifier[password_length_needed] ()
identifier[amount_n_default] = literal[int]
identifier[passphrase] . identifier[amount_n] = identifier[amount_n_default]
identifier[amount_w_default] = literal[int]
identifier[parser] = identifier[ArgumentParser] (
identifier[formatter_class] = identifier[RawDescriptionHelpFormatter] ,
identifier[description] = literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[version_string] = identifier[__version_string__] ,
identifier[minnum] = identifier[passphrase] . identifier[randnum_min] ,
identifier[maxnum] = identifier[passphrase] . identifier[randnum_max] ,
identifier[wordsamountmin] = identifier[amount_w_default] ,
identifier[numsamountmin] = identifier[amount_n_default] ,
identifier[passwdmin] = identifier[passwordlen_default] ,
identifier[passwdpref] = identifier[passwordlen_default] + literal[int]
)
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[_bigger_than_zero] ,
identifier[default] = identifier[ENTROPY_BITS_MIN] ,
identifier[help] = literal[string]
literal[string] . identifier[format] ( identifier[ENTROPY_BITS_MIN] )
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[_bigger_than_zero] ,
identifier[const] =- literal[int] ,
identifier[nargs] = literal[string] ,
identifier[help] = literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[type] = identifier[_bigger_than_zero] ,
identifier[const] = literal[int] ,
identifier[nargs] = literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[type] = identifier[_bigger_than_zero] ,
identifier[const] = literal[int] ,
identifier[nargs] = literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[_bigger_than_zero] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[_bigger_than_zero] ,
identifier[default] = identifier[amount_n_default] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[str] ,
identifier[default] = literal[string] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[str] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[type] = identifier[str] ,
identifier[help] = literal[string]
literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string]
)
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[argv] )
identifier[inputfile] = identifier[args] . identifier[input]
identifier[outputfile] = identifier[args] . identifier[output]
identifier[separator] = identifier[args] . identifier[separator]
identifier[is_diceware] = identifier[args] . identifier[diceware]
identifier[passwordlen] = identifier[args] . identifier[password]
identifier[amount_w] = identifier[args] . identifier[words]
identifier[amount_n] = identifier[args] . identifier[numbers]
identifier[show_version] = identifier[args] . identifier[version]
identifier[mute] = identifier[args] . identifier[mute]
identifier[verbose] = identifier[args] . identifier[verbose]
identifier[no_newline] = identifier[args] . identifier[no_newline]
identifier[gen_uuid4] = identifier[args] . identifier[uuid4]
identifier[gen_coin] = identifier[args] . identifier[coin]
identifier[p_uppercase] = identifier[args] . identifier[use_uppercase]
identifier[p_lowercase] = identifier[args] . identifier[use_lowercase]
identifier[p_digits] = identifier[args] . identifier[use_digits]
identifier[p_punctuation] = identifier[args] . identifier[use_punctuation]
identifier[p_alphanumeric] = identifier[args] . identifier[use_alphanumeric]
identifier[entropy_bits] = identifier[args] . identifier[entropybits]
identifier[gen_insecure] = identifier[args] . identifier[insecure]
keyword[if] identifier[show_version] :
identifier[print] ( identifier[__version_string__] )
keyword[return] literal[int]
keyword[if] identifier[verbose] :
identifier[Aux] . identifier[print_stderr] ( identifier[__version_string__] )
identifier[system_entropy] = identifier[Aux] . identifier[system_entropy] ()
keyword[if] identifier[system_entropy] < identifier[SYSTEM_ENTROPY_BITS_MIN] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] ( identifier[system_entropy] )
)
keyword[if] keyword[not] identifier[gen_insecure] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] (
identifier[system_entropy] = identifier[system_entropy] ,
identifier[system_entropy_min] = identifier[SYSTEM_ENTROPY_BITS_MIN]
)
)
keyword[return] literal[int]
keyword[if] identifier[verbose] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] ( identifier[entropy_bits] , identifier[ENTROPY_BITS_MIN] )
)
identifier[check_chosen_entropy] = keyword[False] keyword[if] identifier[gen_uuid4] keyword[or] identifier[gen_coin] keyword[else] keyword[not] (
identifier[amount_n] keyword[and] identifier[amount_w] keyword[and] identifier[passwordlen] keyword[is] keyword[None]
)
keyword[if] identifier[check_chosen_entropy] keyword[and] identifier[entropy_bits] < identifier[ENTROPY_BITS_MIN] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] ( identifier[ENTROPY_BITS_MIN] )
)
identifier[passphrase] . identifier[entropy_bits_req] = identifier[entropy_bits]
keyword[if] identifier[gen_uuid4] :
keyword[if] identifier[verbose] :
identifier[Aux] . identifier[print_stderr] ( literal[string] )
identifier[gen_what] = literal[string]
identifier[gen_ent] = literal[int]
identifier[passphrase] . identifier[generate_uuid4] ()
identifier[passphrase] . identifier[separator] = literal[string]
keyword[elif] identifier[gen_coin] :
keyword[if] identifier[verbose] :
identifier[Aux] . identifier[print_stderr] ( literal[string] )
identifier[gen_what] = literal[string]
identifier[gen_ent] = literal[int]
identifier[passphrase] = literal[string] keyword[if] identifier[randbool] () keyword[else] literal[string]
keyword[elif] identifier[passwordlen] keyword[is] keyword[not] keyword[None] :
identifier[gen_what] = literal[string]
identifier[p_uppercase] = keyword[True] keyword[if] identifier[p_uppercase] keyword[is] keyword[not] keyword[None] keyword[else] keyword[False]
identifier[p_lowercase] = keyword[True] keyword[if] identifier[p_lowercase] keyword[is] keyword[not] keyword[None] keyword[else] keyword[False]
keyword[if] (
identifier[p_uppercase]
keyword[or] identifier[p_lowercase]
keyword[or] identifier[p_digits]
keyword[or] identifier[p_punctuation]
keyword[or] identifier[p_alphanumeric]
):
identifier[passphrase] . identifier[password_use_uppercase] =( identifier[p_uppercase] keyword[or] identifier[p_alphanumeric] )
identifier[passphrase] . identifier[password_use_lowercase] =( identifier[p_lowercase] keyword[or] identifier[p_alphanumeric] )
identifier[passphrase] . identifier[password_use_digits] =( identifier[p_digits] keyword[or] identifier[p_alphanumeric] )
identifier[passphrase] . identifier[password_use_punctuation] = identifier[p_punctuation]
identifier[min_len] = identifier[passphrase] . identifier[password_length_needed] ()
keyword[if] identifier[passwordlen] < literal[int] :
identifier[passwordlen] = identifier[min_len]
keyword[elif] identifier[passwordlen] < identifier[min_len] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] ( identifier[min_len] )
)
identifier[passphrase] . identifier[passwordlen] = identifier[passwordlen]
identifier[gen_ent] = identifier[passphrase] . identifier[generated_password_entropy] ()
keyword[if] identifier[verbose] :
identifier[verbose_string] =(
literal[string]
literal[string] . identifier[format] ( identifier[passwordlen] )
)
identifier[verbose_string] +=(
literal[string] keyword[if] (
identifier[passphrase] . identifier[password_use_uppercase] keyword[or] identifier[p_alphanumeric]
) keyword[else] literal[string]
)
identifier[verbose_string] +=(
literal[string] keyword[if] (
identifier[passphrase] . identifier[password_use_lowercase] keyword[or] identifier[p_alphanumeric]
) keyword[else] literal[string]
)
identifier[verbose_string] +=(
literal[string] keyword[if] (
identifier[passphrase] . identifier[password_use_digits] keyword[or] identifier[p_alphanumeric]
) keyword[else] literal[string]
)
identifier[verbose_string] +=(
literal[string] keyword[if] (
identifier[passphrase] . identifier[password_use_punctuation]
) keyword[else] literal[string]
)
identifier[Aux] . identifier[print_stderr] (
identifier[verbose_string] [:- literal[int] ] keyword[if] (
identifier[verbose_string] [- literal[int] :]== literal[string]
) keyword[else] identifier[verbose_string]
)
identifier[passphrase] . identifier[generate_password] ()
identifier[passphrase] . identifier[separator] = literal[string]
keyword[else] :
identifier[gen_what] = literal[string]
keyword[if] identifier[inputfile] keyword[is] keyword[None] :
identifier[passphrase] . identifier[load_internal_wordlist] ()
keyword[else] :
keyword[try] :
identifier[passphrase] . identifier[import_words_from_file] ( identifier[inputfile] , identifier[is_diceware] )
keyword[except] identifier[IOError] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] ( identifier[inputfile] )
)
keyword[return] literal[int]
identifier[passphrase] . identifier[amount_n] = identifier[amount_n]
identifier[amount_w_good] = identifier[passphrase] . identifier[words_amount_needed] ()
keyword[if] identifier[amount_w] keyword[is] keyword[None] :
identifier[amount_w] = identifier[amount_w_good]
keyword[elif] identifier[amount_w] < identifier[amount_w_good] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] ( identifier[amount_w_good] )
)
identifier[passphrase] . identifier[amount_w] = identifier[amount_w]
identifier[gen_ent] = identifier[passphrase] . identifier[generated_passphrase_entropy] ()
keyword[if] identifier[verbose] :
identifier[Aux] . identifier[print_stderr] (
literal[string]
literal[string] . identifier[format] (
identifier[amount_w] ,
identifier[amount_n] ,
literal[string] keyword[if] identifier[inputfile] keyword[is] keyword[None] keyword[else] (
literal[string] + identifier[inputfile] +(
literal[string] keyword[if] identifier[is_diceware] keyword[else] literal[string]
)
)
)
)
identifier[case] =(- literal[int] * identifier[p_lowercase] ) keyword[if] identifier[p_lowercase] keyword[else] identifier[p_uppercase]
identifier[passphrase] . identifier[generate] ( identifier[case] )
identifier[passphrase] . identifier[separator] = identifier[separator]
keyword[if] identifier[verbose] :
identifier[Aux] . identifier[print_stderr] (
literal[string] . identifier[format] (
identifier[what] = identifier[gen_what] ,
identifier[ent] = identifier[gen_ent]
)
)
keyword[if] keyword[not] identifier[gen_coin] keyword[and] identifier[gen_ent] < identifier[ENTROPY_BITS_MIN] :
identifier[Aux] . identifier[print_stderr] ( literal[string] . identifier[format] ( identifier[gen_what] ))
keyword[if] keyword[not] identifier[mute] :
keyword[if] identifier[no_newline] :
identifier[print] ( identifier[passphrase] , identifier[end] = literal[string] )
keyword[else] :
identifier[print] ( identifier[passphrase] )
keyword[if] identifier[outputfile] keyword[is] keyword[not] keyword[None] :
identifier[dir_] = identifier[os_path_dirname] ( identifier[outputfile] )
keyword[if] identifier[dir_] :
keyword[try] :
identifier[os_makedirs] ( identifier[dir_] , identifier[exist_ok] = keyword[True] )
keyword[except] identifier[PermissionError] :
identifier[Aux] . identifier[print_stderr] (
literal[string] . identifier[format] (
identifier[dir_] ,
)
)
keyword[return] literal[int]
keyword[try] :
keyword[with] identifier[open] ( identifier[outputfile] , identifier[mode] = literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[outfile] :
identifier[linefeed] = literal[string] keyword[if] identifier[no_newline] keyword[else] literal[string]
identifier[outfile] . identifier[write] ( identifier[str] ( identifier[passphrase] )+ identifier[linefeed] )
keyword[except] identifier[IOError] :
identifier[Aux] . identifier[print_stderr] (
literal[string] . identifier[format] (
identifier[outputfile] ,
)
)
keyword[return] literal[int]
keyword[return] literal[int] | def main(argv: list) -> int:
"""Passphrase CLI interface."""
passphrase = Passphrase()
# Set defaults
passphrase.entropy_bits_req = ENTROPY_BITS_MIN
passwordlen_default = passphrase.password_length_needed()
amount_n_default = 0
passphrase.amount_n = amount_n_default
# To avoid loading the wordlist unnecessarily, I'm hardcoding this value
# It's ok, it's only used to show help information
amount_w_default = 6
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, description='{version_string}\n\nGenerates a cryptographically secure passphrase, based on a wordlist, or a\npassword, and prints it to standard output.\nBy default, it uses an embedded EFF Large Wordlist for passphrases.\nPassphrases with less than {wordsamountmin} words are considered insecure. A safe bet is \nbetween {wordsamountmin} and 7 words, plus at least a number.\nFor passwords, use at least {passwdmin} characters, but prefer {passwdpref} or more, using the\ncomplete characters set.\n\nInstead of words and numbers, a password (random string of printable\ncharacters from Python String standard) can be generated by\n-p | --password, specifying the length. It uses uppercase, lowercase, digits\nand punctuation characters unless otherwise specified.\nAlso, a UUID v4 string can be generated by --uuid4 or a coin can be thrown\nwith --coin.\nA custom wordlist can be specified by -i | --input, the format must be: \nsingle column, one word per line. If -d | --diceware is used, the input\nfile is treated as a diceware wordlist (two columns).\nOptionally, -o | --output can be used to specify an output file (existing \nfile is overwritten).\nThe number of words is {wordsamountmin} by default, but it can be changed by -w | --words.\nThe number of numbers is {numsamountmin} by default, but it can be changed by\n-n | --numbers. The generated numbers are between {minnum} and {maxnum}.\nThe default separator is a blank space, but any character or character\nsequence can be specified by -s | --separator.\n\nExample output:\n\tDefault parameters:\tchalice sheath postcard modular cider size\n\tWords=3, Numbers=2:\tdepraved widow office 184022 320264\n\tPassword, 20 chars:\tsF#s@B+iR#ZIL-yUWKPR'.format(version_string=__version_string__, minnum=passphrase.randnum_min, maxnum=passphrase.randnum_max, wordsamountmin=amount_w_default, numsamountmin=amount_n_default, passwdmin=passwordlen_default, passwdpref=passwordlen_default + 4))
parser.add_argument('--version', action='store_true', help='print program version and licensing information and exit')
parser.add_argument('--insecure', action='store_true', default=False, help="force password/passphrase generation even if the system's entropy is too low")
parser.add_argument('--no-newline', action='store_true', default=False, help="don't print newline at the end of the passphrase/password")
parser.add_argument('-m', '--mute', action='store_true', default=False, help="muted mode: it won't print output, only informational, warning or error messages (usefull with -o | --output)")
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='print additional information (can coexist with -m | --mute)')
parser.add_argument('-e', '--entropybits', type=_bigger_than_zero, default=ENTROPY_BITS_MIN, help='specify the number of bits to use for entropy calculations (defaults to {})'.format(ENTROPY_BITS_MIN))
parser.add_argument('--uuid4', action='store_true', default=False, help='generate an UUID v4 string')
parser.add_argument('--coin', action='store_true', default=False, help='generate a random coin throw: heads or tails')
parser.add_argument('-p', '--password', type=_bigger_than_zero, const=-1, nargs='?', help='generate a password of the specified length from all printable or selected characters')
parser.add_argument('--use-uppercase', type=_bigger_than_zero, const=0, nargs='?', help='use uppercase characters for password generation or give the amount of uppercase characters in the passphrase: zero or no input for all uppercase or any number of uppercase characters wanted (the rest are lowercase)')
parser.add_argument('--use-lowercase', type=_bigger_than_zero, const=0, nargs='?', help='use lowercase characters for password generation or give the amount of lowercase characters in the passphrase: zero or no input for all lowercase (default) or any number of lowercase characters wanted (the rest are uppercase)')
parser.add_argument('--use-digits', action='store_true', default=False, help='use digits for password generation')
parser.add_argument('--use-alphanumeric', action='store_true', default=False, help='use lowercase and uppercase characters, and digits for password generation (equivalent to --use-lowercase --use-uppercase --use-digits)')
parser.add_argument('--use-punctuation', action='store_true', default=False, help='use punctuation characters for password generation')
parser.add_argument('-w', '--words', type=_bigger_than_zero, help='specify the amount of words (0 or more)')
parser.add_argument('-n', '--numbers', type=_bigger_than_zero, default=amount_n_default, help='specify the amount of numbers (0 or more)')
parser.add_argument('-s', '--separator', type=str, default=' ', help='specify a separator character (space by default)')
parser.add_argument('-o', '--output', type=str, help='specify an output file (existing file is overwritten)')
parser.add_argument('-i', '--input', type=str, help='specify an input file (it must have the following format: single column, one word per line)')
parser.add_argument('-d', '--diceware', action='store_true', default=False, help='specify input file as a diceware list (format: two colums)')
args = parser.parse_args(argv)
inputfile = args.input
outputfile = args.output
separator = args.separator
is_diceware = args.diceware
passwordlen = args.password
amount_w = args.words
amount_n = args.numbers
show_version = args.version
mute = args.mute
verbose = args.verbose
no_newline = args.no_newline
gen_uuid4 = args.uuid4
gen_coin = args.coin
p_uppercase = args.use_uppercase
p_lowercase = args.use_lowercase
p_digits = args.use_digits
p_punctuation = args.use_punctuation
p_alphanumeric = args.use_alphanumeric
entropy_bits = args.entropybits
gen_insecure = args.insecure
if show_version:
print(__version_string__)
return 0 # depends on [control=['if'], data=[]]
if verbose:
Aux.print_stderr(__version_string__) # depends on [control=['if'], data=[]]
# Check system entropy
system_entropy = Aux.system_entropy()
if system_entropy < SYSTEM_ENTROPY_BITS_MIN:
Aux.print_stderr('Warning: the system has too few entropy: {} bits; randomness quality could be poor'.format(system_entropy))
if not gen_insecure:
Aux.print_stderr('Error: system entropy too low: {system_entropy} < {system_entropy_min}'.format(system_entropy=system_entropy, system_entropy_min=SYSTEM_ENTROPY_BITS_MIN))
return 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['system_entropy', 'SYSTEM_ENTROPY_BITS_MIN']]
if verbose:
Aux.print_stderr('Using {} bits of entropy for calculations (if any). The minimum recommended is {}'.format(entropy_bits, ENTROPY_BITS_MIN)) # depends on [control=['if'], data=[]]
# Check selected entropy
check_chosen_entropy = False if gen_uuid4 or gen_coin else not (amount_n and amount_w and (passwordlen is None))
if check_chosen_entropy and entropy_bits < ENTROPY_BITS_MIN:
Aux.print_stderr('Warning: insecure number of bits for entropy calculations chosen! Should be bigger than {}'.format(ENTROPY_BITS_MIN)) # depends on [control=['if'], data=[]]
passphrase.entropy_bits_req = entropy_bits
# Generate whatever is requested
if gen_uuid4:
# Generate uuid4
if verbose:
Aux.print_stderr('Generating UUID v4') # depends on [control=['if'], data=[]]
gen_what = 'UUID v4'
gen_ent = 120
passphrase.generate_uuid4()
passphrase.separator = '-' # depends on [control=['if'], data=[]]
elif gen_coin:
# Generate a coin throw
if verbose:
Aux.print_stderr('Throwing a coin') # depends on [control=['if'], data=[]]
gen_what = 'coin'
gen_ent = 1
passphrase = 'Heads' if randbool() else 'Tails' # depends on [control=['if'], data=[]]
elif passwordlen is not None:
# Generate a password
gen_what = 'password'
p_uppercase = True if p_uppercase is not None else False
p_lowercase = True if p_lowercase is not None else False
if p_uppercase or p_lowercase or p_digits or p_punctuation or p_alphanumeric:
passphrase.password_use_uppercase = p_uppercase or p_alphanumeric
passphrase.password_use_lowercase = p_lowercase or p_alphanumeric
passphrase.password_use_digits = p_digits or p_alphanumeric
passphrase.password_use_punctuation = p_punctuation # depends on [control=['if'], data=[]]
min_len = passphrase.password_length_needed()
if passwordlen < 1:
passwordlen = min_len # depends on [control=['if'], data=['passwordlen']]
elif passwordlen < min_len:
Aux.print_stderr('Warning: insecure password length chosen! Should be bigger than or equal to {}'.format(min_len)) # depends on [control=['if'], data=['min_len']]
passphrase.passwordlen = passwordlen
gen_ent = passphrase.generated_password_entropy()
if verbose:
verbose_string = 'Generating password of {} characters long using '.format(passwordlen)
verbose_string += 'uppercase characters, ' if passphrase.password_use_uppercase or p_alphanumeric else ''
verbose_string += 'lowercase characters, ' if passphrase.password_use_lowercase or p_alphanumeric else ''
verbose_string += 'digits, ' if passphrase.password_use_digits or p_alphanumeric else ''
verbose_string += 'punctuation characters, ' if passphrase.password_use_punctuation else ''
Aux.print_stderr(verbose_string[:-2] if verbose_string[-2:] == ', ' else verbose_string) # depends on [control=['if'], data=[]]
passphrase.generate_password()
passphrase.separator = '' # depends on [control=['if'], data=['passwordlen']]
else:
# Generate a passphrase
gen_what = 'passphrase'
# Read wordlist if indicated
if inputfile is None:
passphrase.load_internal_wordlist() # depends on [control=['if'], data=[]]
else:
try:
passphrase.import_words_from_file(inputfile, is_diceware) # depends on [control=['try'], data=[]]
except IOError:
Aux.print_stderr("Error: input file {} is empty or it can't be opened or read".format(inputfile))
return 1 # depends on [control=['except'], data=[]]
passphrase.amount_n = amount_n
amount_w_good = passphrase.words_amount_needed()
if amount_w is None:
amount_w = amount_w_good # depends on [control=['if'], data=['amount_w']]
elif amount_w < amount_w_good:
Aux.print_stderr('Warning: insecure amount of words chosen! Should be bigger than or equal to {}'.format(amount_w_good)) # depends on [control=['if'], data=['amount_w_good']]
passphrase.amount_w = amount_w
gen_ent = passphrase.generated_passphrase_entropy()
if verbose:
Aux.print_stderr('Generating a passphrase of {} words and {} numbers using {}'.format(amount_w, amount_n, 'internal wordlist' if inputfile is None else 'external wordlist: ' + inputfile + (' (diceware-like)' if is_diceware else ''))) # depends on [control=['if'], data=[]]
case = -1 * p_lowercase if p_lowercase else p_uppercase
passphrase.generate(case)
passphrase.separator = separator
if verbose:
Aux.print_stderr('The entropy of this {what} is {ent:.2f} bits'.format(what=gen_what, ent=gen_ent)) # depends on [control=['if'], data=[]]
if not gen_coin and gen_ent < ENTROPY_BITS_MIN:
Aux.print_stderr('Warning: the {} is too short!'.format(gen_what)) # depends on [control=['if'], data=[]]
if not mute:
if no_newline:
print(passphrase, end='') # depends on [control=['if'], data=[]]
else:
print(passphrase) # depends on [control=['if'], data=[]]
if outputfile is not None:
# ensure path to file exists or create
dir_ = os_path_dirname(outputfile)
if dir_:
try:
os_makedirs(dir_, exist_ok=True) # depends on [control=['try'], data=[]]
except PermissionError:
Aux.print_stderr('Error: permission denied to create directory {}'.format(dir_))
return 1 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
try:
with open(outputfile, mode='wt', encoding='utf-8') as outfile:
linefeed = '' if no_newline else '\n'
outfile.write(str(passphrase) + linefeed) # depends on [control=['with'], data=['outfile']] # depends on [control=['try'], data=[]]
except IOError:
Aux.print_stderr("Error: file {} can't be opened or written".format(outputfile))
return 1 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['outputfile']]
return 0 |
def project_remove_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
"""
return DXHTTPRequest('/%s/removeFolder' % object_id, input_params, always_retry=always_retry, **kwargs) | def function[project_remove_folder, parameter[object_id, input_params, always_retry]]:
constant[
Invokes the /project-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
]
return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/removeFolder] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]] | keyword[def] identifier[project_remove_folder] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] ) | def project_remove_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
"""
return DXHTTPRequest('/%s/removeFolder' % object_id, input_params, always_retry=always_retry, **kwargs) |
def _decreaseIndent(self, indent):
"""Remove 1 indentation level
"""
if indent.endswith(self._qpartIndent()):
return indent[:-len(self._qpartIndent())]
else: # oops, strange indentation, just return previous indent
return indent | def function[_decreaseIndent, parameter[self, indent]]:
constant[Remove 1 indentation level
]
if call[name[indent].endswith, parameter[call[name[self]._qpartIndent, parameter[]]]] begin[:]
return[call[name[indent]][<ast.Slice object at 0x7da204564dc0>]] | keyword[def] identifier[_decreaseIndent] ( identifier[self] , identifier[indent] ):
literal[string]
keyword[if] identifier[indent] . identifier[endswith] ( identifier[self] . identifier[_qpartIndent] ()):
keyword[return] identifier[indent] [:- identifier[len] ( identifier[self] . identifier[_qpartIndent] ())]
keyword[else] :
keyword[return] identifier[indent] | def _decreaseIndent(self, indent):
"""Remove 1 indentation level
"""
if indent.endswith(self._qpartIndent()):
return indent[:-len(self._qpartIndent())] # depends on [control=['if'], data=[]]
else: # oops, strange indentation, just return previous indent
return indent |
def gatk_cmd(name, jvm_opts, params, config=None):
"""Retrieve PATH to gatk using locally installed java.
"""
if name == "gatk":
if isinstance(config, dict) and "config" not in config:
data = {"config": config}
else:
data = config
if not data or "gatk4" not in dd.get_tools_off(data):
return _gatk4_cmd(jvm_opts, params, data)
else:
name = "gatk3"
gatk_cmd = utils.which(os.path.join(os.path.dirname(os.path.realpath(sys.executable)), name))
# if we can't find via the local executable, fallback to being in the path
if not gatk_cmd:
gatk_cmd = utils.which(name)
if gatk_cmd:
return "%s && export PATH=%s:\"$PATH\" && %s %s %s" % \
(utils.clear_java_home(), utils.get_java_binpath(gatk_cmd), gatk_cmd,
" ".join(jvm_opts), " ".join([str(x) for x in params])) | def function[gatk_cmd, parameter[name, jvm_opts, params, config]]:
constant[Retrieve PATH to gatk using locally installed java.
]
if compare[name[name] equal[==] constant[gatk]] begin[:]
if <ast.BoolOp object at 0x7da1b1985ed0> begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1985ab0>], [<ast.Name object at 0x7da1b1984a90>]]
if <ast.BoolOp object at 0x7da1b19845e0> begin[:]
return[call[name[_gatk4_cmd], parameter[name[jvm_opts], name[params], name[data]]]]
variable[gatk_cmd] assign[=] call[name[utils].which, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.realpath, parameter[name[sys].executable]]]], name[name]]]]]
if <ast.UnaryOp object at 0x7da1b1984130> begin[:]
variable[gatk_cmd] assign[=] call[name[utils].which, parameter[name[name]]]
if name[gatk_cmd] begin[:]
return[binary_operation[constant[%s && export PATH=%s:"$PATH" && %s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1985f60>, <ast.Call object at 0x7da1b1986a70>, <ast.Name object at 0x7da1b1987880>, <ast.Call object at 0x7da1b1984430>, <ast.Call object at 0x7da1b1984c10>]]]] | keyword[def] identifier[gatk_cmd] ( identifier[name] , identifier[jvm_opts] , identifier[params] , identifier[config] = keyword[None] ):
literal[string]
keyword[if] identifier[name] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[config] , identifier[dict] ) keyword[and] literal[string] keyword[not] keyword[in] identifier[config] :
identifier[data] ={ literal[string] : identifier[config] }
keyword[else] :
identifier[data] = identifier[config]
keyword[if] keyword[not] identifier[data] keyword[or] literal[string] keyword[not] keyword[in] identifier[dd] . identifier[get_tools_off] ( identifier[data] ):
keyword[return] identifier[_gatk4_cmd] ( identifier[jvm_opts] , identifier[params] , identifier[data] )
keyword[else] :
identifier[name] = literal[string]
identifier[gatk_cmd] = identifier[utils] . identifier[which] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[sys] . identifier[executable] )), identifier[name] ))
keyword[if] keyword[not] identifier[gatk_cmd] :
identifier[gatk_cmd] = identifier[utils] . identifier[which] ( identifier[name] )
keyword[if] identifier[gatk_cmd] :
keyword[return] literal[string] %( identifier[utils] . identifier[clear_java_home] (), identifier[utils] . identifier[get_java_binpath] ( identifier[gatk_cmd] ), identifier[gatk_cmd] ,
literal[string] . identifier[join] ( identifier[jvm_opts] ), literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[params] ])) | def gatk_cmd(name, jvm_opts, params, config=None):
"""Retrieve PATH to gatk using locally installed java.
"""
if name == 'gatk':
if isinstance(config, dict) and 'config' not in config:
data = {'config': config} # depends on [control=['if'], data=[]]
else:
data = config
if not data or 'gatk4' not in dd.get_tools_off(data):
return _gatk4_cmd(jvm_opts, params, data) # depends on [control=['if'], data=[]]
else:
name = 'gatk3' # depends on [control=['if'], data=['name']]
gatk_cmd = utils.which(os.path.join(os.path.dirname(os.path.realpath(sys.executable)), name))
# if we can't find via the local executable, fallback to being in the path
if not gatk_cmd:
gatk_cmd = utils.which(name) # depends on [control=['if'], data=[]]
if gatk_cmd:
return '%s && export PATH=%s:"$PATH" && %s %s %s' % (utils.clear_java_home(), utils.get_java_binpath(gatk_cmd), gatk_cmd, ' '.join(jvm_opts), ' '.join([str(x) for x in params])) # depends on [control=['if'], data=[]] |
def import_lsdinst(self, struct_data):
"""import from an lsdinst struct"""
self.name = struct_data['name']
self.automate = struct_data['data']['automate']
self.pan = struct_data['data']['pan']
if self.table is not None:
self.table.import_lsdinst(struct_data) | def function[import_lsdinst, parameter[self, struct_data]]:
constant[import from an lsdinst struct]
name[self].name assign[=] call[name[struct_data]][constant[name]]
name[self].automate assign[=] call[call[name[struct_data]][constant[data]]][constant[automate]]
name[self].pan assign[=] call[call[name[struct_data]][constant[data]]][constant[pan]]
if compare[name[self].table is_not constant[None]] begin[:]
call[name[self].table.import_lsdinst, parameter[name[struct_data]]] | keyword[def] identifier[import_lsdinst] ( identifier[self] , identifier[struct_data] ):
literal[string]
identifier[self] . identifier[name] = identifier[struct_data] [ literal[string] ]
identifier[self] . identifier[automate] = identifier[struct_data] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[pan] = identifier[struct_data] [ literal[string] ][ literal[string] ]
keyword[if] identifier[self] . identifier[table] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[table] . identifier[import_lsdinst] ( identifier[struct_data] ) | def import_lsdinst(self, struct_data):
"""import from an lsdinst struct"""
self.name = struct_data['name']
self.automate = struct_data['data']['automate']
self.pan = struct_data['data']['pan']
if self.table is not None:
self.table.import_lsdinst(struct_data) # depends on [control=['if'], data=[]] |
def get_kwdefaults(func, parse_source=False):
r"""
Args:
func (func):
Returns:
dict:
CommandLine:
python -m utool.util_inspect get_kwdefaults
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_inspect import * # NOQA
>>> import utool as ut
>>> func = dummy_func
>>> parse_source = True
>>> kwdefaults = get_kwdefaults(func, parse_source)
>>> print('kwdefaults = %s' % (ut.repr4(kwdefaults),))
"""
#import utool as ut
#with ut.embed_on_exception_context:
argspec = inspect.getargspec(func)
kwdefaults = {}
if argspec.args is None or argspec.defaults is None:
pass
else:
args = argspec.args
defaults = argspec.defaults
#kwdefaults = OrderedDict(zip(argspec.args[::-1], argspec.defaults[::-1]))
kwpos = len(args) - len(defaults)
kwdefaults = OrderedDict(zip(args[kwpos:], defaults))
if parse_source and argspec.keywords:
# TODO parse for kwargs.get/pop
keyword_defaults = parse_func_kwarg_keys(func, with_vals=True)
for key, val in keyword_defaults:
assert key not in kwdefaults, 'parsing error'
kwdefaults[key] = val
return kwdefaults | def function[get_kwdefaults, parameter[func, parse_source]]:
constant[
Args:
func (func):
Returns:
dict:
CommandLine:
python -m utool.util_inspect get_kwdefaults
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_inspect import * # NOQA
>>> import utool as ut
>>> func = dummy_func
>>> parse_source = True
>>> kwdefaults = get_kwdefaults(func, parse_source)
>>> print('kwdefaults = %s' % (ut.repr4(kwdefaults),))
]
variable[argspec] assign[=] call[name[inspect].getargspec, parameter[name[func]]]
variable[kwdefaults] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b24eae90> begin[:]
pass
if <ast.BoolOp object at 0x7da1b24ebe80> begin[:]
variable[keyword_defaults] assign[=] call[name[parse_func_kwarg_keys], parameter[name[func]]]
for taget[tuple[[<ast.Name object at 0x7da1b24e9900>, <ast.Name object at 0x7da1b24ebfd0>]]] in starred[name[keyword_defaults]] begin[:]
assert[compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[kwdefaults]]]
call[name[kwdefaults]][name[key]] assign[=] name[val]
return[name[kwdefaults]] | keyword[def] identifier[get_kwdefaults] ( identifier[func] , identifier[parse_source] = keyword[False] ):
literal[string]
identifier[argspec] = identifier[inspect] . identifier[getargspec] ( identifier[func] )
identifier[kwdefaults] ={}
keyword[if] identifier[argspec] . identifier[args] keyword[is] keyword[None] keyword[or] identifier[argspec] . identifier[defaults] keyword[is] keyword[None] :
keyword[pass]
keyword[else] :
identifier[args] = identifier[argspec] . identifier[args]
identifier[defaults] = identifier[argspec] . identifier[defaults]
identifier[kwpos] = identifier[len] ( identifier[args] )- identifier[len] ( identifier[defaults] )
identifier[kwdefaults] = identifier[OrderedDict] ( identifier[zip] ( identifier[args] [ identifier[kwpos] :], identifier[defaults] ))
keyword[if] identifier[parse_source] keyword[and] identifier[argspec] . identifier[keywords] :
identifier[keyword_defaults] = identifier[parse_func_kwarg_keys] ( identifier[func] , identifier[with_vals] = keyword[True] )
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[keyword_defaults] :
keyword[assert] identifier[key] keyword[not] keyword[in] identifier[kwdefaults] , literal[string]
identifier[kwdefaults] [ identifier[key] ]= identifier[val]
keyword[return] identifier[kwdefaults] | def get_kwdefaults(func, parse_source=False):
"""
Args:
func (func):
Returns:
dict:
CommandLine:
python -m utool.util_inspect get_kwdefaults
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_inspect import * # NOQA
>>> import utool as ut
>>> func = dummy_func
>>> parse_source = True
>>> kwdefaults = get_kwdefaults(func, parse_source)
>>> print('kwdefaults = %s' % (ut.repr4(kwdefaults),))
"""
#import utool as ut
#with ut.embed_on_exception_context:
argspec = inspect.getargspec(func)
kwdefaults = {}
if argspec.args is None or argspec.defaults is None:
pass # depends on [control=['if'], data=[]]
else:
args = argspec.args
defaults = argspec.defaults
#kwdefaults = OrderedDict(zip(argspec.args[::-1], argspec.defaults[::-1]))
kwpos = len(args) - len(defaults)
kwdefaults = OrderedDict(zip(args[kwpos:], defaults))
if parse_source and argspec.keywords:
# TODO parse for kwargs.get/pop
keyword_defaults = parse_func_kwarg_keys(func, with_vals=True)
for (key, val) in keyword_defaults:
assert key not in kwdefaults, 'parsing error'
kwdefaults[key] = val # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return kwdefaults |
def table(self, header, body):
"""Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
"""
table = '\n.. list-table::\n'
if header and not header.isspace():
table = (table + self.indent + ':header-rows: 1\n\n' +
self._indent_block(header) + '\n')
else:
table = table + '\n'
table = table + self._indent_block(body) + '\n\n'
return table | def function[table, parameter[self, header, body]]:
constant[Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
]
variable[table] assign[=] constant[
.. list-table::
]
if <ast.BoolOp object at 0x7da2041db160> begin[:]
variable[table] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[table] + name[self].indent] + constant[:header-rows: 1
]] + call[name[self]._indent_block, parameter[name[header]]]] + constant[
]]
variable[table] assign[=] binary_operation[binary_operation[name[table] + call[name[self]._indent_block, parameter[name[body]]]] + constant[
]]
return[name[table]] | keyword[def] identifier[table] ( identifier[self] , identifier[header] , identifier[body] ):
literal[string]
identifier[table] = literal[string]
keyword[if] identifier[header] keyword[and] keyword[not] identifier[header] . identifier[isspace] ():
identifier[table] =( identifier[table] + identifier[self] . identifier[indent] + literal[string] +
identifier[self] . identifier[_indent_block] ( identifier[header] )+ literal[string] )
keyword[else] :
identifier[table] = identifier[table] + literal[string]
identifier[table] = identifier[table] + identifier[self] . identifier[_indent_block] ( identifier[body] )+ literal[string]
keyword[return] identifier[table] | def table(self, header, body):
"""Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
"""
table = '\n.. list-table::\n'
if header and (not header.isspace()):
table = table + self.indent + ':header-rows: 1\n\n' + self._indent_block(header) + '\n' # depends on [control=['if'], data=[]]
else:
table = table + '\n'
table = table + self._indent_block(body) + '\n\n'
return table |
def get_vertex(self, key):
"""Returns or Creates a Vertex mapped by key.
Args:
key: A string reference for a vertex. May refer to a new Vertex in which
case it will be created.
Returns:
A the Vertex mapped to by key.
"""
if key in self.vertex_map:
return self.vertex_map[key]
vertex = self.new_vertex()
self.vertex_map[key] = vertex
return vertex | def function[get_vertex, parameter[self, key]]:
constant[Returns or Creates a Vertex mapped by key.
Args:
key: A string reference for a vertex. May refer to a new Vertex in which
case it will be created.
Returns:
A the Vertex mapped to by key.
]
if compare[name[key] in name[self].vertex_map] begin[:]
return[call[name[self].vertex_map][name[key]]]
variable[vertex] assign[=] call[name[self].new_vertex, parameter[]]
call[name[self].vertex_map][name[key]] assign[=] name[vertex]
return[name[vertex]] | keyword[def] identifier[get_vertex] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[vertex_map] :
keyword[return] identifier[self] . identifier[vertex_map] [ identifier[key] ]
identifier[vertex] = identifier[self] . identifier[new_vertex] ()
identifier[self] . identifier[vertex_map] [ identifier[key] ]= identifier[vertex]
keyword[return] identifier[vertex] | def get_vertex(self, key):
"""Returns or Creates a Vertex mapped by key.
Args:
key: A string reference for a vertex. May refer to a new Vertex in which
case it will be created.
Returns:
A the Vertex mapped to by key.
"""
if key in self.vertex_map:
return self.vertex_map[key] # depends on [control=['if'], data=['key']]
vertex = self.new_vertex()
self.vertex_map[key] = vertex
return vertex |
def is_same_filename (filename1, filename2):
"""Check if filename1 and filename2 are the same filename."""
return os.path.realpath(filename1) == os.path.realpath(filename2) | def function[is_same_filename, parameter[filename1, filename2]]:
constant[Check if filename1 and filename2 are the same filename.]
return[compare[call[name[os].path.realpath, parameter[name[filename1]]] equal[==] call[name[os].path.realpath, parameter[name[filename2]]]]] | keyword[def] identifier[is_same_filename] ( identifier[filename1] , identifier[filename2] ):
literal[string]
keyword[return] identifier[os] . identifier[path] . identifier[realpath] ( identifier[filename1] )== identifier[os] . identifier[path] . identifier[realpath] ( identifier[filename2] ) | def is_same_filename(filename1, filename2):
"""Check if filename1 and filename2 are the same filename."""
return os.path.realpath(filename1) == os.path.realpath(filename2) |
def start(self):
"""Play the music"""
if self.initialized:
mixer.music.unpause()
else:
mixer.music.play()
# FIXME: Calling play twice to ensure the music is actually playing
mixer.music.play()
self.initialized = True
self.paused = False | def function[start, parameter[self]]:
constant[Play the music]
if name[self].initialized begin[:]
call[name[mixer].music.unpause, parameter[]]
name[self].paused assign[=] constant[False] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[initialized] :
identifier[mixer] . identifier[music] . identifier[unpause] ()
keyword[else] :
identifier[mixer] . identifier[music] . identifier[play] ()
identifier[mixer] . identifier[music] . identifier[play] ()
identifier[self] . identifier[initialized] = keyword[True]
identifier[self] . identifier[paused] = keyword[False] | def start(self):
"""Play the music"""
if self.initialized:
mixer.music.unpause() # depends on [control=['if'], data=[]]
else:
mixer.music.play()
# FIXME: Calling play twice to ensure the music is actually playing
mixer.music.play()
self.initialized = True
self.paused = False |
def t_NAME(self,t):
'[A-Za-z]\w*|\"char\"'
# warning: this allows stuff like SeLeCt with mixed case. who cares.
t.type = KEYWORDS[t.value.lower()] if t.value.lower() in KEYWORDS else 'BOOL' if t.value.lower() in ('is','not') else 'NAME'
return t | def function[t_NAME, parameter[self, t]]:
constant[[A-Za-z]\w*|"char"]
name[t].type assign[=] <ast.IfExp object at 0x7da1b15f1cc0>
return[name[t]] | keyword[def] identifier[t_NAME] ( identifier[self] , identifier[t] ):
literal[string]
identifier[t] . identifier[type] = identifier[KEYWORDS] [ identifier[t] . identifier[value] . identifier[lower] ()] keyword[if] identifier[t] . identifier[value] . identifier[lower] () keyword[in] identifier[KEYWORDS] keyword[else] literal[string] keyword[if] identifier[t] . identifier[value] . identifier[lower] () keyword[in] ( literal[string] , literal[string] ) keyword[else] literal[string]
keyword[return] identifier[t] | def t_NAME(self, t):
'''[A-Za-z]\\w*|"char"'''
# warning: this allows stuff like SeLeCt with mixed case. who cares.
t.type = KEYWORDS[t.value.lower()] if t.value.lower() in KEYWORDS else 'BOOL' if t.value.lower() in ('is', 'not') else 'NAME'
return t |
def advise(self, name, f, *a, **kw):
"""
Add an advice that will be handled later by the handle method.
Arguments:
name
The name of the advice group
f
A callable method or function.
The rest of the arguments will be passed as arguments and
keyword arguments to f when it's invoked.
"""
if name is None:
return
advice = (f, a, kw)
debug = self.get(DEBUG)
frame = currentframe()
if frame is None:
logger.debug('currentframe() failed to return frame')
else:
if name in self._called:
self.__advice_stack_frame_protection(frame)
if debug:
logger.debug(
"advise '%s' invoked by %s:%d",
name,
frame.f_back.f_code.co_filename, frame.f_back.f_lineno,
)
if debug > 1:
# use the memory address of the tuple which should
# be stable
self._frames[id(advice)] = ''.join(
format_stack(frame.f_back))
self._advices[name] = self._advices.get(name, [])
self._advices[name].append(advice) | def function[advise, parameter[self, name, f]]:
constant[
Add an advice that will be handled later by the handle method.
Arguments:
name
The name of the advice group
f
A callable method or function.
The rest of the arguments will be passed as arguments and
keyword arguments to f when it's invoked.
]
if compare[name[name] is constant[None]] begin[:]
return[None]
variable[advice] assign[=] tuple[[<ast.Name object at 0x7da1b1857670>, <ast.Name object at 0x7da1b1857580>, <ast.Name object at 0x7da1b1855450>]]
variable[debug] assign[=] call[name[self].get, parameter[name[DEBUG]]]
variable[frame] assign[=] call[name[currentframe], parameter[]]
if compare[name[frame] is constant[None]] begin[:]
call[name[logger].debug, parameter[constant[currentframe() failed to return frame]]]
call[name[self]._advices][name[name]] assign[=] call[name[self]._advices.get, parameter[name[name], list[[]]]]
call[call[name[self]._advices][name[name]].append, parameter[name[advice]]] | keyword[def] identifier[advise] ( identifier[self] , identifier[name] , identifier[f] ,* identifier[a] ,** identifier[kw] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[return]
identifier[advice] =( identifier[f] , identifier[a] , identifier[kw] )
identifier[debug] = identifier[self] . identifier[get] ( identifier[DEBUG] )
identifier[frame] = identifier[currentframe] ()
keyword[if] identifier[frame] keyword[is] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_called] :
identifier[self] . identifier[__advice_stack_frame_protection] ( identifier[frame] )
keyword[if] identifier[debug] :
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[name] ,
identifier[frame] . identifier[f_back] . identifier[f_code] . identifier[co_filename] , identifier[frame] . identifier[f_back] . identifier[f_lineno] ,
)
keyword[if] identifier[debug] > literal[int] :
identifier[self] . identifier[_frames] [ identifier[id] ( identifier[advice] )]= literal[string] . identifier[join] (
identifier[format_stack] ( identifier[frame] . identifier[f_back] ))
identifier[self] . identifier[_advices] [ identifier[name] ]= identifier[self] . identifier[_advices] . identifier[get] ( identifier[name] ,[])
identifier[self] . identifier[_advices] [ identifier[name] ]. identifier[append] ( identifier[advice] ) | def advise(self, name, f, *a, **kw):
"""
Add an advice that will be handled later by the handle method.
Arguments:
name
The name of the advice group
f
A callable method or function.
The rest of the arguments will be passed as arguments and
keyword arguments to f when it's invoked.
"""
if name is None:
return # depends on [control=['if'], data=[]]
advice = (f, a, kw)
debug = self.get(DEBUG)
frame = currentframe()
if frame is None:
logger.debug('currentframe() failed to return frame') # depends on [control=['if'], data=[]]
else:
if name in self._called:
self.__advice_stack_frame_protection(frame) # depends on [control=['if'], data=[]]
if debug:
logger.debug("advise '%s' invoked by %s:%d", name, frame.f_back.f_code.co_filename, frame.f_back.f_lineno)
if debug > 1:
# use the memory address of the tuple which should
# be stable
self._frames[id(advice)] = ''.join(format_stack(frame.f_back)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self._advices[name] = self._advices.get(name, [])
self._advices[name].append(advice) |
def delete(group_id):
"""Delete group."""
group = Group.query.get_or_404(group_id)
if group.can_edit(current_user):
try:
group.delete()
except Exception as e:
flash(str(e), "error")
return redirect(url_for(".index"))
flash(_('Successfully removed group "%(group_name)s"',
group_name=group.name), 'success')
return redirect(url_for(".index"))
flash(
_(
'You cannot delete the group %(group_name)s',
group_name=group.name
),
'error'
)
return redirect(url_for(".index")) | def function[delete, parameter[group_id]]:
constant[Delete group.]
variable[group] assign[=] call[name[Group].query.get_or_404, parameter[name[group_id]]]
if call[name[group].can_edit, parameter[name[current_user]]] begin[:]
<ast.Try object at 0x7da207f99ed0>
call[name[flash], parameter[call[name[_], parameter[constant[Successfully removed group "%(group_name)s"]]], constant[success]]]
return[call[name[redirect], parameter[call[name[url_for], parameter[constant[.index]]]]]]
call[name[flash], parameter[call[name[_], parameter[constant[You cannot delete the group %(group_name)s]]], constant[error]]]
return[call[name[redirect], parameter[call[name[url_for], parameter[constant[.index]]]]]] | keyword[def] identifier[delete] ( identifier[group_id] ):
literal[string]
identifier[group] = identifier[Group] . identifier[query] . identifier[get_or_404] ( identifier[group_id] )
keyword[if] identifier[group] . identifier[can_edit] ( identifier[current_user] ):
keyword[try] :
identifier[group] . identifier[delete] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[flash] ( identifier[str] ( identifier[e] ), literal[string] )
keyword[return] identifier[redirect] ( identifier[url_for] ( literal[string] ))
identifier[flash] ( identifier[_] ( literal[string] ,
identifier[group_name] = identifier[group] . identifier[name] ), literal[string] )
keyword[return] identifier[redirect] ( identifier[url_for] ( literal[string] ))
identifier[flash] (
identifier[_] (
literal[string] ,
identifier[group_name] = identifier[group] . identifier[name]
),
literal[string]
)
keyword[return] identifier[redirect] ( identifier[url_for] ( literal[string] )) | def delete(group_id):
"""Delete group."""
group = Group.query.get_or_404(group_id)
if group.can_edit(current_user):
try:
group.delete() # depends on [control=['try'], data=[]]
except Exception as e:
flash(str(e), 'error')
return redirect(url_for('.index')) # depends on [control=['except'], data=['e']]
flash(_('Successfully removed group "%(group_name)s"', group_name=group.name), 'success')
return redirect(url_for('.index')) # depends on [control=['if'], data=[]]
flash(_('You cannot delete the group %(group_name)s', group_name=group.name), 'error')
return redirect(url_for('.index')) |
def filter_user_attempts(request: AxesHttpRequest, credentials: dict = None) -> QuerySet:
"""
Return a queryset of AccessAttempts that match the given request and credentials.
"""
username = get_client_username(request, credentials)
filter_kwargs = get_client_parameters(username, request.axes_ip_address, request.axes_user_agent)
return AccessAttempt.objects.filter(**filter_kwargs) | def function[filter_user_attempts, parameter[request, credentials]]:
constant[
Return a queryset of AccessAttempts that match the given request and credentials.
]
variable[username] assign[=] call[name[get_client_username], parameter[name[request], name[credentials]]]
variable[filter_kwargs] assign[=] call[name[get_client_parameters], parameter[name[username], name[request].axes_ip_address, name[request].axes_user_agent]]
return[call[name[AccessAttempt].objects.filter, parameter[]]] | keyword[def] identifier[filter_user_attempts] ( identifier[request] : identifier[AxesHttpRequest] , identifier[credentials] : identifier[dict] = keyword[None] )-> identifier[QuerySet] :
literal[string]
identifier[username] = identifier[get_client_username] ( identifier[request] , identifier[credentials] )
identifier[filter_kwargs] = identifier[get_client_parameters] ( identifier[username] , identifier[request] . identifier[axes_ip_address] , identifier[request] . identifier[axes_user_agent] )
keyword[return] identifier[AccessAttempt] . identifier[objects] . identifier[filter] (** identifier[filter_kwargs] ) | def filter_user_attempts(request: AxesHttpRequest, credentials: dict=None) -> QuerySet:
"""
Return a queryset of AccessAttempts that match the given request and credentials.
"""
username = get_client_username(request, credentials)
filter_kwargs = get_client_parameters(username, request.axes_ip_address, request.axes_user_agent)
return AccessAttempt.objects.filter(**filter_kwargs) |
def setup_top(self):
"""Create top-level elements of the hybrid schema."""
self.top_grammar = SchemaNode("grammar")
self.top_grammar.attr = {
"xmlns": "http://relaxng.org/ns/structure/1.0",
"datatypeLibrary": "http://www.w3.org/2001/XMLSchema-datatypes"}
self.tree = SchemaNode("start") | def function[setup_top, parameter[self]]:
constant[Create top-level elements of the hybrid schema.]
name[self].top_grammar assign[=] call[name[SchemaNode], parameter[constant[grammar]]]
name[self].top_grammar.attr assign[=] dictionary[[<ast.Constant object at 0x7da18eb55c90>, <ast.Constant object at 0x7da18eb55750>], [<ast.Constant object at 0x7da18eb56650>, <ast.Constant object at 0x7da18eb556c0>]]
name[self].tree assign[=] call[name[SchemaNode], parameter[constant[start]]] | keyword[def] identifier[setup_top] ( identifier[self] ):
literal[string]
identifier[self] . identifier[top_grammar] = identifier[SchemaNode] ( literal[string] )
identifier[self] . identifier[top_grammar] . identifier[attr] ={
literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[self] . identifier[tree] = identifier[SchemaNode] ( literal[string] ) | def setup_top(self):
"""Create top-level elements of the hybrid schema."""
self.top_grammar = SchemaNode('grammar')
self.top_grammar.attr = {'xmlns': 'http://relaxng.org/ns/structure/1.0', 'datatypeLibrary': 'http://www.w3.org/2001/XMLSchema-datatypes'}
self.tree = SchemaNode('start') |
def round_datetime(when=None, precision=60, rounding=ROUND_NEAREST):
"""
Round a datetime object to a time that matches the given precision.
when (datetime), default now
The datetime object to be rounded.
precision (int, timedelta, str), default 60
The number of seconds, weekday (MON, TUE, WED, etc.) or timedelta
object to which the datetime object should be rounded.
rounding (str), default ROUND_NEAREST
The rounding method to use (ROUND_DOWN, ROUND_NEAREST, ROUND_UP).
"""
when = when or djtz.now()
weekday = WEEKDAYS.get(precision, WEEKDAYS['MON'])
if precision in WEEKDAYS:
precision = int(timedelta(days=7).total_seconds())
elif isinstance(precision, timedelta):
precision = int(precision.total_seconds())
# Get delta between the beginning of time and the given datetime object.
# If precision is a weekday, the beginning of time must be that same day.
when_min = when.min + timedelta(days=weekday)
if djtz.is_aware(when):
# It doesn't seem to be possible to localise `datetime.min` without
# raising `OverflowError`, so create a timezone aware object manually.
when_min = datetime(tzinfo=when.tzinfo, *when_min.timetuple()[:3])
delta = when - when_min
remainder = int(delta.total_seconds()) % precision
# First round down and strip microseconds.
when -= timedelta(seconds=remainder, microseconds=when.microsecond)
# Then add precision to round up.
if rounding == ROUND_UP or (
rounding == ROUND_NEAREST and remainder >= precision / 2):
when += timedelta(seconds=precision)
return when | def function[round_datetime, parameter[when, precision, rounding]]:
constant[
Round a datetime object to a time that matches the given precision.
when (datetime), default now
The datetime object to be rounded.
precision (int, timedelta, str), default 60
The number of seconds, weekday (MON, TUE, WED, etc.) or timedelta
object to which the datetime object should be rounded.
rounding (str), default ROUND_NEAREST
The rounding method to use (ROUND_DOWN, ROUND_NEAREST, ROUND_UP).
]
variable[when] assign[=] <ast.BoolOp object at 0x7da18dc06c20>
variable[weekday] assign[=] call[name[WEEKDAYS].get, parameter[name[precision], call[name[WEEKDAYS]][constant[MON]]]]
if compare[name[precision] in name[WEEKDAYS]] begin[:]
variable[precision] assign[=] call[name[int], parameter[call[call[name[timedelta], parameter[]].total_seconds, parameter[]]]]
variable[when_min] assign[=] binary_operation[name[when].min + call[name[timedelta], parameter[]]]
if call[name[djtz].is_aware, parameter[name[when]]] begin[:]
variable[when_min] assign[=] call[name[datetime], parameter[<ast.Starred object at 0x7da18dc06b60>]]
variable[delta] assign[=] binary_operation[name[when] - name[when_min]]
variable[remainder] assign[=] binary_operation[call[name[int], parameter[call[name[delta].total_seconds, parameter[]]]] <ast.Mod object at 0x7da2590d6920> name[precision]]
<ast.AugAssign object at 0x7da18dc06110>
if <ast.BoolOp object at 0x7da18dc05e10> begin[:]
<ast.AugAssign object at 0x7da18dc05810>
return[name[when]] | keyword[def] identifier[round_datetime] ( identifier[when] = keyword[None] , identifier[precision] = literal[int] , identifier[rounding] = identifier[ROUND_NEAREST] ):
literal[string]
identifier[when] = identifier[when] keyword[or] identifier[djtz] . identifier[now] ()
identifier[weekday] = identifier[WEEKDAYS] . identifier[get] ( identifier[precision] , identifier[WEEKDAYS] [ literal[string] ])
keyword[if] identifier[precision] keyword[in] identifier[WEEKDAYS] :
identifier[precision] = identifier[int] ( identifier[timedelta] ( identifier[days] = literal[int] ). identifier[total_seconds] ())
keyword[elif] identifier[isinstance] ( identifier[precision] , identifier[timedelta] ):
identifier[precision] = identifier[int] ( identifier[precision] . identifier[total_seconds] ())
identifier[when_min] = identifier[when] . identifier[min] + identifier[timedelta] ( identifier[days] = identifier[weekday] )
keyword[if] identifier[djtz] . identifier[is_aware] ( identifier[when] ):
identifier[when_min] = identifier[datetime] ( identifier[tzinfo] = identifier[when] . identifier[tzinfo] ,* identifier[when_min] . identifier[timetuple] ()[: literal[int] ])
identifier[delta] = identifier[when] - identifier[when_min]
identifier[remainder] = identifier[int] ( identifier[delta] . identifier[total_seconds] ())% identifier[precision]
identifier[when] -= identifier[timedelta] ( identifier[seconds] = identifier[remainder] , identifier[microseconds] = identifier[when] . identifier[microsecond] )
keyword[if] identifier[rounding] == identifier[ROUND_UP] keyword[or] (
identifier[rounding] == identifier[ROUND_NEAREST] keyword[and] identifier[remainder] >= identifier[precision] / literal[int] ):
identifier[when] += identifier[timedelta] ( identifier[seconds] = identifier[precision] )
keyword[return] identifier[when] | def round_datetime(when=None, precision=60, rounding=ROUND_NEAREST):
"""
Round a datetime object to a time that matches the given precision.
when (datetime), default now
The datetime object to be rounded.
precision (int, timedelta, str), default 60
The number of seconds, weekday (MON, TUE, WED, etc.) or timedelta
object to which the datetime object should be rounded.
rounding (str), default ROUND_NEAREST
The rounding method to use (ROUND_DOWN, ROUND_NEAREST, ROUND_UP).
"""
when = when or djtz.now()
weekday = WEEKDAYS.get(precision, WEEKDAYS['MON'])
if precision in WEEKDAYS:
precision = int(timedelta(days=7).total_seconds()) # depends on [control=['if'], data=['precision']]
elif isinstance(precision, timedelta):
precision = int(precision.total_seconds()) # depends on [control=['if'], data=[]]
# Get delta between the beginning of time and the given datetime object.
# If precision is a weekday, the beginning of time must be that same day.
when_min = when.min + timedelta(days=weekday)
if djtz.is_aware(when):
# It doesn't seem to be possible to localise `datetime.min` without
# raising `OverflowError`, so create a timezone aware object manually.
when_min = datetime(*when_min.timetuple()[:3], tzinfo=when.tzinfo) # depends on [control=['if'], data=[]]
delta = when - when_min
remainder = int(delta.total_seconds()) % precision
# First round down and strip microseconds.
when -= timedelta(seconds=remainder, microseconds=when.microsecond)
# Then add precision to round up.
if rounding == ROUND_UP or (rounding == ROUND_NEAREST and remainder >= precision / 2):
when += timedelta(seconds=precision) # depends on [control=['if'], data=[]]
return when |
def dead(self):
"""Whether the callback no longer exists.
If the callback is maintained via a weak reference, and that
weak reference has been collected, this will be true
instead of false.
"""
if not self._weak:
return False
cb = self._callback()
if cb is None:
return True
return False | def function[dead, parameter[self]]:
constant[Whether the callback no longer exists.
If the callback is maintained via a weak reference, and that
weak reference has been collected, this will be true
instead of false.
]
if <ast.UnaryOp object at 0x7da2045661d0> begin[:]
return[constant[False]]
variable[cb] assign[=] call[name[self]._callback, parameter[]]
if compare[name[cb] is constant[None]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[dead] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_weak] :
keyword[return] keyword[False]
identifier[cb] = identifier[self] . identifier[_callback] ()
keyword[if] identifier[cb] keyword[is] keyword[None] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def dead(self):
"""Whether the callback no longer exists.
If the callback is maintained via a weak reference, and that
weak reference has been collected, this will be true
instead of false.
"""
if not self._weak:
return False # depends on [control=['if'], data=[]]
cb = self._callback()
if cb is None:
return True # depends on [control=['if'], data=[]]
return False |
def filter_objects(self, objects, perm=None):
""" Return only objects with specified permission in objects list. If perm not specified, 'view' perm will be used. """
if perm is None:
perm = build_permission_name(self.model_class, 'view')
return filter(lambda o: self.user.has_perm(perm, obj=o), objects) | def function[filter_objects, parameter[self, objects, perm]]:
constant[ Return only objects with specified permission in objects list. If perm not specified, 'view' perm will be used. ]
if compare[name[perm] is constant[None]] begin[:]
variable[perm] assign[=] call[name[build_permission_name], parameter[name[self].model_class, constant[view]]]
return[call[name[filter], parameter[<ast.Lambda object at 0x7da1b0a23d00>, name[objects]]]] | keyword[def] identifier[filter_objects] ( identifier[self] , identifier[objects] , identifier[perm] = keyword[None] ):
literal[string]
keyword[if] identifier[perm] keyword[is] keyword[None] :
identifier[perm] = identifier[build_permission_name] ( identifier[self] . identifier[model_class] , literal[string] )
keyword[return] identifier[filter] ( keyword[lambda] identifier[o] : identifier[self] . identifier[user] . identifier[has_perm] ( identifier[perm] , identifier[obj] = identifier[o] ), identifier[objects] ) | def filter_objects(self, objects, perm=None):
""" Return only objects with specified permission in objects list. If perm not specified, 'view' perm will be used. """
if perm is None:
perm = build_permission_name(self.model_class, 'view') # depends on [control=['if'], data=['perm']]
return filter(lambda o: self.user.has_perm(perm, obj=o), objects) |
def create_xml_file(self, source_file, destination=None):
"""
This method will generate a xml file using an external tool.
The method will return the file path of the generated xml file.
:param source_file: path to the source file that should be parsed.
:type source_file: str
:param destination: if given, will be used as target file path for
the xml generator.
:type destination: str
:rtype: path to xml file.
"""
xml_file = destination
# If file specified, remove it to start else create new file name
if xml_file:
utils.remove_file_no_raise(xml_file, self.__config)
else:
xml_file = utils.create_temp_file_name(suffix='.xml')
ffname = source_file
if not os.path.isabs(ffname):
ffname = self.__file_full_name(source_file)
command_line = self.__create_command_line(ffname, xml_file)
process = subprocess.Popen(
args=command_line,
shell=True,
stdout=subprocess.PIPE)
try:
results = []
while process.poll() is None:
line = process.stdout.readline()
if line.strip():
results.append(line.rstrip())
for line in process.stdout.readlines():
if line.strip():
results.append(line.rstrip())
exit_status = process.returncode
msg = os.linesep.join([str(s) for s in results])
if self.__config.ignore_gccxml_output:
if not os.path.isfile(xml_file):
raise RuntimeError(
"Error occurred while running " +
self.__config.xml_generator.upper() +
": %s status:%s" %
(msg, exit_status))
else:
if msg or exit_status or not \
os.path.isfile(xml_file):
if not os.path.isfile(xml_file):
raise RuntimeError(
"Error occurred while running " +
self.__config.xml_generator.upper() +
" xml file does not exist")
else:
raise RuntimeError(
"Error occurred while running " +
self.__config.xml_generator.upper() +
": %s status:%s" % (msg, exit_status))
except Exception:
utils.remove_file_no_raise(xml_file, self.__config)
raise
finally:
process.wait()
process.stdout.close()
return xml_file | def function[create_xml_file, parameter[self, source_file, destination]]:
constant[
This method will generate a xml file using an external tool.
The method will return the file path of the generated xml file.
:param source_file: path to the source file that should be parsed.
:type source_file: str
:param destination: if given, will be used as target file path for
the xml generator.
:type destination: str
:rtype: path to xml file.
]
variable[xml_file] assign[=] name[destination]
if name[xml_file] begin[:]
call[name[utils].remove_file_no_raise, parameter[name[xml_file], name[self].__config]]
variable[ffname] assign[=] name[source_file]
if <ast.UnaryOp object at 0x7da18dc98520> begin[:]
variable[ffname] assign[=] call[name[self].__file_full_name, parameter[name[source_file]]]
variable[command_line] assign[=] call[name[self].__create_command_line, parameter[name[ffname], name[xml_file]]]
variable[process] assign[=] call[name[subprocess].Popen, parameter[]]
<ast.Try object at 0x7da18dc9bd30>
return[name[xml_file]] | keyword[def] identifier[create_xml_file] ( identifier[self] , identifier[source_file] , identifier[destination] = keyword[None] ):
literal[string]
identifier[xml_file] = identifier[destination]
keyword[if] identifier[xml_file] :
identifier[utils] . identifier[remove_file_no_raise] ( identifier[xml_file] , identifier[self] . identifier[__config] )
keyword[else] :
identifier[xml_file] = identifier[utils] . identifier[create_temp_file_name] ( identifier[suffix] = literal[string] )
identifier[ffname] = identifier[source_file]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[ffname] ):
identifier[ffname] = identifier[self] . identifier[__file_full_name] ( identifier[source_file] )
identifier[command_line] = identifier[self] . identifier[__create_command_line] ( identifier[ffname] , identifier[xml_file] )
identifier[process] = identifier[subprocess] . identifier[Popen] (
identifier[args] = identifier[command_line] ,
identifier[shell] = keyword[True] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
keyword[try] :
identifier[results] =[]
keyword[while] identifier[process] . identifier[poll] () keyword[is] keyword[None] :
identifier[line] = identifier[process] . identifier[stdout] . identifier[readline] ()
keyword[if] identifier[line] . identifier[strip] ():
identifier[results] . identifier[append] ( identifier[line] . identifier[rstrip] ())
keyword[for] identifier[line] keyword[in] identifier[process] . identifier[stdout] . identifier[readlines] ():
keyword[if] identifier[line] . identifier[strip] ():
identifier[results] . identifier[append] ( identifier[line] . identifier[rstrip] ())
identifier[exit_status] = identifier[process] . identifier[returncode]
identifier[msg] = identifier[os] . identifier[linesep] . identifier[join] ([ identifier[str] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[results] ])
keyword[if] identifier[self] . identifier[__config] . identifier[ignore_gccxml_output] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[xml_file] ):
keyword[raise] identifier[RuntimeError] (
literal[string] +
identifier[self] . identifier[__config] . identifier[xml_generator] . identifier[upper] ()+
literal[string] %
( identifier[msg] , identifier[exit_status] ))
keyword[else] :
keyword[if] identifier[msg] keyword[or] identifier[exit_status] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[xml_file] ):
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[xml_file] ):
keyword[raise] identifier[RuntimeError] (
literal[string] +
identifier[self] . identifier[__config] . identifier[xml_generator] . identifier[upper] ()+
literal[string] )
keyword[else] :
keyword[raise] identifier[RuntimeError] (
literal[string] +
identifier[self] . identifier[__config] . identifier[xml_generator] . identifier[upper] ()+
literal[string] %( identifier[msg] , identifier[exit_status] ))
keyword[except] identifier[Exception] :
identifier[utils] . identifier[remove_file_no_raise] ( identifier[xml_file] , identifier[self] . identifier[__config] )
keyword[raise]
keyword[finally] :
identifier[process] . identifier[wait] ()
identifier[process] . identifier[stdout] . identifier[close] ()
keyword[return] identifier[xml_file] | def create_xml_file(self, source_file, destination=None):
"""
This method will generate a xml file using an external tool.
The method will return the file path of the generated xml file.
:param source_file: path to the source file that should be parsed.
:type source_file: str
:param destination: if given, will be used as target file path for
the xml generator.
:type destination: str
:rtype: path to xml file.
"""
xml_file = destination
# If file specified, remove it to start else create new file name
if xml_file:
utils.remove_file_no_raise(xml_file, self.__config) # depends on [control=['if'], data=[]]
else:
xml_file = utils.create_temp_file_name(suffix='.xml')
ffname = source_file
if not os.path.isabs(ffname):
ffname = self.__file_full_name(source_file) # depends on [control=['if'], data=[]]
command_line = self.__create_command_line(ffname, xml_file)
process = subprocess.Popen(args=command_line, shell=True, stdout=subprocess.PIPE)
try:
results = []
while process.poll() is None:
line = process.stdout.readline()
if line.strip():
results.append(line.rstrip()) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
for line in process.stdout.readlines():
if line.strip():
results.append(line.rstrip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
exit_status = process.returncode
msg = os.linesep.join([str(s) for s in results])
if self.__config.ignore_gccxml_output:
if not os.path.isfile(xml_file):
raise RuntimeError('Error occurred while running ' + self.__config.xml_generator.upper() + ': %s status:%s' % (msg, exit_status)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif msg or exit_status or (not os.path.isfile(xml_file)):
if not os.path.isfile(xml_file):
raise RuntimeError('Error occurred while running ' + self.__config.xml_generator.upper() + ' xml file does not exist') # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Error occurred while running ' + self.__config.xml_generator.upper() + ': %s status:%s' % (msg, exit_status)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
utils.remove_file_no_raise(xml_file, self.__config)
raise # depends on [control=['except'], data=[]]
finally:
process.wait()
process.stdout.close()
return xml_file |
def OnOpen(self, event):
"""File open event handler"""
# If changes have taken place save of old grid
if undo.stack().haschanged():
save_choice = self.interfaces.get_save_request_from_user()
if save_choice is None:
# Cancelled close operation
return
elif save_choice:
# User wants to save content
post_command_event(self.main_window, self.main_window.SaveMsg)
# Get filepath from user
f2w = get_filetypes2wildcards(
["pys", "pysu", "xls", "xlsx", "ods", "all"])
filetypes = f2w.keys()
wildcards = f2w.values()
wildcard = "|".join(wildcards)
message = _("Choose file to open.")
style = wx.OPEN
default_filetype = config["default_open_filetype"]
try:
default_filterindex = filetypes.index(default_filetype)
except ValueError:
# Be graceful if the user has entered an unkown filetype
default_filterindex = 0
get_fp_fidx = self.interfaces.get_filepath_findex_from_user
filepath, filterindex = get_fp_fidx(wildcard, message, style,
filterindex=default_filterindex)
if filepath is None:
return
filetype = filetypes[filterindex]
# Change the main window filepath state
self.main_window.filepath = filepath
# Load file into grid
post_command_event(self.main_window,
self.main_window.GridActionOpenMsg,
attr={"filepath": filepath, "filetype": filetype})
# Set Window title to new filepath
title_text = filepath.split("/")[-1] + " - pyspread"
post_command_event(self.main_window,
self.main_window.TitleMsg, text=title_text)
self.main_window.grid.ForceRefresh()
if is_gtk():
try:
wx.Yield()
except:
pass
# Update savepoint and clear the undo stack
undo.stack().clear()
undo.stack().savepoint()
# Update content changed state
try:
post_command_event(self.main_window, self.ContentChangedMsg)
except TypeError:
# The main window does not exist any more
pass | def function[OnOpen, parameter[self, event]]:
constant[File open event handler]
if call[call[name[undo].stack, parameter[]].haschanged, parameter[]] begin[:]
variable[save_choice] assign[=] call[name[self].interfaces.get_save_request_from_user, parameter[]]
if compare[name[save_choice] is constant[None]] begin[:]
return[None]
variable[f2w] assign[=] call[name[get_filetypes2wildcards], parameter[list[[<ast.Constant object at 0x7da1b16008e0>, <ast.Constant object at 0x7da1b1600430>, <ast.Constant object at 0x7da1b1602980>, <ast.Constant object at 0x7da1b1600ac0>, <ast.Constant object at 0x7da1b1600130>, <ast.Constant object at 0x7da1b1600880>]]]]
variable[filetypes] assign[=] call[name[f2w].keys, parameter[]]
variable[wildcards] assign[=] call[name[f2w].values, parameter[]]
variable[wildcard] assign[=] call[constant[|].join, parameter[name[wildcards]]]
variable[message] assign[=] call[name[_], parameter[constant[Choose file to open.]]]
variable[style] assign[=] name[wx].OPEN
variable[default_filetype] assign[=] call[name[config]][constant[default_open_filetype]]
<ast.Try object at 0x7da1b16001c0>
variable[get_fp_fidx] assign[=] name[self].interfaces.get_filepath_findex_from_user
<ast.Tuple object at 0x7da1b16025c0> assign[=] call[name[get_fp_fidx], parameter[name[wildcard], name[message], name[style]]]
if compare[name[filepath] is constant[None]] begin[:]
return[None]
variable[filetype] assign[=] call[name[filetypes]][name[filterindex]]
name[self].main_window.filepath assign[=] name[filepath]
call[name[post_command_event], parameter[name[self].main_window, name[self].main_window.GridActionOpenMsg]]
variable[title_text] assign[=] binary_operation[call[call[name[filepath].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b2344880>] + constant[ - pyspread]]
call[name[post_command_event], parameter[name[self].main_window, name[self].main_window.TitleMsg]]
call[name[self].main_window.grid.ForceRefresh, parameter[]]
if call[name[is_gtk], parameter[]] begin[:]
<ast.Try object at 0x7da1b17f9d80>
call[call[name[undo].stack, parameter[]].clear, parameter[]]
call[call[name[undo].stack, parameter[]].savepoint, parameter[]]
<ast.Try object at 0x7da1b16be770> | keyword[def] identifier[OnOpen] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[undo] . identifier[stack] (). identifier[haschanged] ():
identifier[save_choice] = identifier[self] . identifier[interfaces] . identifier[get_save_request_from_user] ()
keyword[if] identifier[save_choice] keyword[is] keyword[None] :
keyword[return]
keyword[elif] identifier[save_choice] :
identifier[post_command_event] ( identifier[self] . identifier[main_window] , identifier[self] . identifier[main_window] . identifier[SaveMsg] )
identifier[f2w] = identifier[get_filetypes2wildcards] (
[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ])
identifier[filetypes] = identifier[f2w] . identifier[keys] ()
identifier[wildcards] = identifier[f2w] . identifier[values] ()
identifier[wildcard] = literal[string] . identifier[join] ( identifier[wildcards] )
identifier[message] = identifier[_] ( literal[string] )
identifier[style] = identifier[wx] . identifier[OPEN]
identifier[default_filetype] = identifier[config] [ literal[string] ]
keyword[try] :
identifier[default_filterindex] = identifier[filetypes] . identifier[index] ( identifier[default_filetype] )
keyword[except] identifier[ValueError] :
identifier[default_filterindex] = literal[int]
identifier[get_fp_fidx] = identifier[self] . identifier[interfaces] . identifier[get_filepath_findex_from_user]
identifier[filepath] , identifier[filterindex] = identifier[get_fp_fidx] ( identifier[wildcard] , identifier[message] , identifier[style] ,
identifier[filterindex] = identifier[default_filterindex] )
keyword[if] identifier[filepath] keyword[is] keyword[None] :
keyword[return]
identifier[filetype] = identifier[filetypes] [ identifier[filterindex] ]
identifier[self] . identifier[main_window] . identifier[filepath] = identifier[filepath]
identifier[post_command_event] ( identifier[self] . identifier[main_window] ,
identifier[self] . identifier[main_window] . identifier[GridActionOpenMsg] ,
identifier[attr] ={ literal[string] : identifier[filepath] , literal[string] : identifier[filetype] })
identifier[title_text] = identifier[filepath] . identifier[split] ( literal[string] )[- literal[int] ]+ literal[string]
identifier[post_command_event] ( identifier[self] . identifier[main_window] ,
identifier[self] . identifier[main_window] . identifier[TitleMsg] , identifier[text] = identifier[title_text] )
identifier[self] . identifier[main_window] . identifier[grid] . identifier[ForceRefresh] ()
keyword[if] identifier[is_gtk] ():
keyword[try] :
identifier[wx] . identifier[Yield] ()
keyword[except] :
keyword[pass]
identifier[undo] . identifier[stack] (). identifier[clear] ()
identifier[undo] . identifier[stack] (). identifier[savepoint] ()
keyword[try] :
identifier[post_command_event] ( identifier[self] . identifier[main_window] , identifier[self] . identifier[ContentChangedMsg] )
keyword[except] identifier[TypeError] :
keyword[pass] | def OnOpen(self, event):
"""File open event handler"""
# If changes have taken place save of old grid
if undo.stack().haschanged():
save_choice = self.interfaces.get_save_request_from_user()
if save_choice is None:
# Cancelled close operation
return # depends on [control=['if'], data=[]]
elif save_choice:
# User wants to save content
post_command_event(self.main_window, self.main_window.SaveMsg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Get filepath from user
f2w = get_filetypes2wildcards(['pys', 'pysu', 'xls', 'xlsx', 'ods', 'all'])
filetypes = f2w.keys()
wildcards = f2w.values()
wildcard = '|'.join(wildcards)
message = _('Choose file to open.')
style = wx.OPEN
default_filetype = config['default_open_filetype']
try:
default_filterindex = filetypes.index(default_filetype) # depends on [control=['try'], data=[]]
except ValueError:
# Be graceful if the user has entered an unkown filetype
default_filterindex = 0 # depends on [control=['except'], data=[]]
get_fp_fidx = self.interfaces.get_filepath_findex_from_user
(filepath, filterindex) = get_fp_fidx(wildcard, message, style, filterindex=default_filterindex)
if filepath is None:
return # depends on [control=['if'], data=[]]
filetype = filetypes[filterindex]
# Change the main window filepath state
self.main_window.filepath = filepath
# Load file into grid
post_command_event(self.main_window, self.main_window.GridActionOpenMsg, attr={'filepath': filepath, 'filetype': filetype})
# Set Window title to new filepath
title_text = filepath.split('/')[-1] + ' - pyspread'
post_command_event(self.main_window, self.main_window.TitleMsg, text=title_text)
self.main_window.grid.ForceRefresh()
if is_gtk():
try:
wx.Yield() # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Update savepoint and clear the undo stack
undo.stack().clear()
undo.stack().savepoint()
# Update content changed state
try:
post_command_event(self.main_window, self.ContentChangedMsg) # depends on [control=['try'], data=[]]
except TypeError:
# The main window does not exist any more
pass # depends on [control=['except'], data=[]] |
def operator_from_str(op):
"""
Return the operator associated to the given string `op`.
raises:
`KeyError` if invalid string.
>>> assert operator_from_str("==")(1, 1) and operator_from_str("+")(1,1) == 2
"""
d = {"==": operator.eq,
"!=": operator.ne,
">": operator.gt,
">=": operator.ge,
"<": operator.lt,
"<=": operator.le,
'+': operator.add,
'-': operator.sub,
'*': operator.mul,
'%': operator.mod,
'^': operator.xor,
}
try:
d['/'] = operator.truediv
except AttributeError:
pass
return d[op] | def function[operator_from_str, parameter[op]]:
constant[
Return the operator associated to the given string `op`.
raises:
`KeyError` if invalid string.
>>> assert operator_from_str("==")(1, 1) and operator_from_str("+")(1,1) == 2
]
variable[d] assign[=] dictionary[[<ast.Constant object at 0x7da1b1344d60>, <ast.Constant object at 0x7da1b13477f0>, <ast.Constant object at 0x7da1b1347fa0>, <ast.Constant object at 0x7da1b1344700>, <ast.Constant object at 0x7da1b1346aa0>, <ast.Constant object at 0x7da1b1347130>, <ast.Constant object at 0x7da1b1344550>, <ast.Constant object at 0x7da1b1347280>, <ast.Constant object at 0x7da1b13472b0>, <ast.Constant object at 0x7da1b1345480>, <ast.Constant object at 0x7da1b1346bc0>], [<ast.Attribute object at 0x7da1b1344f10>, <ast.Attribute object at 0x7da1b13452a0>, <ast.Attribute object at 0x7da1b1347400>, <ast.Attribute object at 0x7da1b1344ca0>, <ast.Attribute object at 0x7da1b1346fe0>, <ast.Attribute object at 0x7da1b13445e0>, <ast.Attribute object at 0x7da1b13469e0>, <ast.Attribute object at 0x7da1b13472e0>, <ast.Attribute object at 0x7da1b1345ea0>, <ast.Attribute object at 0x7da1b1346110>, <ast.Attribute object at 0x7da1b1345fc0>]]
<ast.Try object at 0x7da1b1346d40>
return[call[name[d]][name[op]]] | keyword[def] identifier[operator_from_str] ( identifier[op] ):
literal[string]
identifier[d] ={ literal[string] : identifier[operator] . identifier[eq] ,
literal[string] : identifier[operator] . identifier[ne] ,
literal[string] : identifier[operator] . identifier[gt] ,
literal[string] : identifier[operator] . identifier[ge] ,
literal[string] : identifier[operator] . identifier[lt] ,
literal[string] : identifier[operator] . identifier[le] ,
literal[string] : identifier[operator] . identifier[add] ,
literal[string] : identifier[operator] . identifier[sub] ,
literal[string] : identifier[operator] . identifier[mul] ,
literal[string] : identifier[operator] . identifier[mod] ,
literal[string] : identifier[operator] . identifier[xor] ,
}
keyword[try] :
identifier[d] [ literal[string] ]= identifier[operator] . identifier[truediv]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[return] identifier[d] [ identifier[op] ] | def operator_from_str(op):
"""
Return the operator associated to the given string `op`.
raises:
`KeyError` if invalid string.
>>> assert operator_from_str("==")(1, 1) and operator_from_str("+")(1,1) == 2
"""
d = {'==': operator.eq, '!=': operator.ne, '>': operator.gt, '>=': operator.ge, '<': operator.lt, '<=': operator.le, '+': operator.add, '-': operator.sub, '*': operator.mul, '%': operator.mod, '^': operator.xor}
try:
d['/'] = operator.truediv # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
return d[op] |
def get_option(name):
"""
Get package option
Parameters
----------
name : str
Name of the option
"""
d = globals()
if name in {'get_option', 'set_option'} or name not in d:
from ..exceptions import PlotnineError
raise PlotnineError("Unknown option {}".format(name))
return d[name] | def function[get_option, parameter[name]]:
constant[
Get package option
Parameters
----------
name : str
Name of the option
]
variable[d] assign[=] call[name[globals], parameter[]]
if <ast.BoolOp object at 0x7da207f02230> begin[:]
from relative_module[exceptions] import module[PlotnineError]
<ast.Raise object at 0x7da207f01570>
return[call[name[d]][name[name]]] | keyword[def] identifier[get_option] ( identifier[name] ):
literal[string]
identifier[d] = identifier[globals] ()
keyword[if] identifier[name] keyword[in] { literal[string] , literal[string] } keyword[or] identifier[name] keyword[not] keyword[in] identifier[d] :
keyword[from] .. identifier[exceptions] keyword[import] identifier[PlotnineError]
keyword[raise] identifier[PlotnineError] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[return] identifier[d] [ identifier[name] ] | def get_option(name):
"""
Get package option
Parameters
----------
name : str
Name of the option
"""
d = globals()
if name in {'get_option', 'set_option'} or name not in d:
from ..exceptions import PlotnineError
raise PlotnineError('Unknown option {}'.format(name)) # depends on [control=['if'], data=[]]
return d[name] |
def index_list(self):
'''
Lists indices
'''
request = self.session
url = 'http://%s:%s/_cluster/state/' % (self.host, self.port)
response = request.get(url)
if request.status_code==200:
return response.get('metadata',{}).get('indices',{}).keys()
else:
return response | def function[index_list, parameter[self]]:
constant[
Lists indices
]
variable[request] assign[=] name[self].session
variable[url] assign[=] binary_operation[constant[http://%s:%s/_cluster/state/] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da207f9b0d0>, <ast.Attribute object at 0x7da207f9a950>]]]
variable[response] assign[=] call[name[request].get, parameter[name[url]]]
if compare[name[request].status_code equal[==] constant[200]] begin[:]
return[call[call[call[name[response].get, parameter[constant[metadata], dictionary[[], []]]].get, parameter[constant[indices], dictionary[[], []]]].keys, parameter[]]] | keyword[def] identifier[index_list] ( identifier[self] ):
literal[string]
identifier[request] = identifier[self] . identifier[session]
identifier[url] = literal[string] %( identifier[self] . identifier[host] , identifier[self] . identifier[port] )
identifier[response] = identifier[request] . identifier[get] ( identifier[url] )
keyword[if] identifier[request] . identifier[status_code] == literal[int] :
keyword[return] identifier[response] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{}). identifier[keys] ()
keyword[else] :
keyword[return] identifier[response] | def index_list(self):
"""
Lists indices
"""
request = self.session
url = 'http://%s:%s/_cluster/state/' % (self.host, self.port)
response = request.get(url)
if request.status_code == 200:
return response.get('metadata', {}).get('indices', {}).keys() # depends on [control=['if'], data=[]]
else:
return response |
def ibatch(iterable, size):
"""Yield a series of batches from iterable, each size elements long."""
source = iter(iterable)
while True:
batch = itertools.islice(source, size)
yield itertools.chain([next(batch)], batch) | def function[ibatch, parameter[iterable, size]]:
constant[Yield a series of batches from iterable, each size elements long.]
variable[source] assign[=] call[name[iter], parameter[name[iterable]]]
while constant[True] begin[:]
variable[batch] assign[=] call[name[itertools].islice, parameter[name[source], name[size]]]
<ast.Yield object at 0x7da1b0a2d030> | keyword[def] identifier[ibatch] ( identifier[iterable] , identifier[size] ):
literal[string]
identifier[source] = identifier[iter] ( identifier[iterable] )
keyword[while] keyword[True] :
identifier[batch] = identifier[itertools] . identifier[islice] ( identifier[source] , identifier[size] )
keyword[yield] identifier[itertools] . identifier[chain] ([ identifier[next] ( identifier[batch] )], identifier[batch] ) | def ibatch(iterable, size):
"""Yield a series of batches from iterable, each size elements long."""
source = iter(iterable)
while True:
batch = itertools.islice(source, size)
yield itertools.chain([next(batch)], batch) # depends on [control=['while'], data=[]] |
def _visible_width(s):
"""Visible width of a printed string. ANSI color codes are removed.
>>> _visible_width('\x1b[31mhello\x1b[0m'), _visible_width("world")
(5, 5)
"""
# optional wide-character support
if wcwidth is not None and WIDE_CHARS_MODE:
len_fn = wcwidth.wcswidth
else:
len_fn = len
if isinstance(s, _text_type) or isinstance(s, _binary_type):
return len_fn(_strip_invisible(s))
else:
return len_fn(_text_type(s)) | def function[_visible_width, parameter[s]]:
constant[Visible width of a printed string. ANSI color codes are removed.
>>> _visible_width('[31mhello[0m'), _visible_width("world")
(5, 5)
]
if <ast.BoolOp object at 0x7da18eb55540> begin[:]
variable[len_fn] assign[=] name[wcwidth].wcswidth
if <ast.BoolOp object at 0x7da18eb55cc0> begin[:]
return[call[name[len_fn], parameter[call[name[_strip_invisible], parameter[name[s]]]]]] | keyword[def] identifier[_visible_width] ( identifier[s] ):
literal[string]
keyword[if] identifier[wcwidth] keyword[is] keyword[not] keyword[None] keyword[and] identifier[WIDE_CHARS_MODE] :
identifier[len_fn] = identifier[wcwidth] . identifier[wcswidth]
keyword[else] :
identifier[len_fn] = identifier[len]
keyword[if] identifier[isinstance] ( identifier[s] , identifier[_text_type] ) keyword[or] identifier[isinstance] ( identifier[s] , identifier[_binary_type] ):
keyword[return] identifier[len_fn] ( identifier[_strip_invisible] ( identifier[s] ))
keyword[else] :
keyword[return] identifier[len_fn] ( identifier[_text_type] ( identifier[s] )) | def _visible_width(s):
"""Visible width of a printed string. ANSI color codes are removed.
>>> _visible_width('\x1b[31mhello\x1b[0m'), _visible_width("world")
(5, 5)
"""
# optional wide-character support
if wcwidth is not None and WIDE_CHARS_MODE:
len_fn = wcwidth.wcswidth # depends on [control=['if'], data=[]]
else:
len_fn = len
if isinstance(s, _text_type) or isinstance(s, _binary_type):
return len_fn(_strip_invisible(s)) # depends on [control=['if'], data=[]]
else:
return len_fn(_text_type(s)) |
def get(self, request):
"""
Forwards to CAS login URL or verifies CAS ticket
:param request:
:return:
"""
next_page = request.GET.get('next')
required = request.GET.get('required', False)
service_url = get_service_url(request, next_page)
client = get_cas_client(service_url=service_url, request=request)
if not next_page and settings.CAS_STORE_NEXT and 'CASNEXT' in request.session:
next_page = request.session['CASNEXT']
del request.session['CASNEXT']
if not next_page:
next_page = get_redirect_url(request)
if request.user.is_authenticated:
if settings.CAS_LOGGED_MSG is not None:
message = settings.CAS_LOGGED_MSG % request.user.get_username()
messages.success(request, message)
return self.successful_login(request=request, next_page=next_page)
ticket = request.GET.get('ticket')
if ticket:
user = authenticate(ticket=ticket,
service=service_url,
request=request)
pgtiou = request.session.get("pgtiou")
if user is not None:
auth_login(request, user)
if not request.session.exists(request.session.session_key):
request.session.create()
SessionTicket.objects.create(
session_key=request.session.session_key,
ticket=ticket
)
if pgtiou and settings.CAS_PROXY_CALLBACK:
# Delete old PGT
ProxyGrantingTicket.objects.filter(
user=user,
session_key=request.session.session_key
).delete()
# Set new PGT ticket
try:
pgt = ProxyGrantingTicket.objects.get(pgtiou=pgtiou)
pgt.user = user
pgt.session_key = request.session.session_key
pgt.save()
except ProxyGrantingTicket.DoesNotExist:
pass
if settings.CAS_LOGIN_MSG is not None:
name = user.get_username()
message = settings.CAS_LOGIN_MSG % name
messages.success(request, message)
return self.successful_login(request=request, next_page=next_page)
elif settings.CAS_RETRY_LOGIN or required:
return HttpResponseRedirect(client.get_login_url())
else:
raise PermissionDenied(_('Login failed.'))
else:
if settings.CAS_STORE_NEXT:
request.session['CASNEXT'] = next_page
return HttpResponseRedirect(client.get_login_url()) | def function[get, parameter[self, request]]:
constant[
Forwards to CAS login URL or verifies CAS ticket
:param request:
:return:
]
variable[next_page] assign[=] call[name[request].GET.get, parameter[constant[next]]]
variable[required] assign[=] call[name[request].GET.get, parameter[constant[required], constant[False]]]
variable[service_url] assign[=] call[name[get_service_url], parameter[name[request], name[next_page]]]
variable[client] assign[=] call[name[get_cas_client], parameter[]]
if <ast.BoolOp object at 0x7da1b1edae30> begin[:]
variable[next_page] assign[=] call[name[request].session][constant[CASNEXT]]
<ast.Delete object at 0x7da1b1edbfa0>
if <ast.UnaryOp object at 0x7da1b1edb130> begin[:]
variable[next_page] assign[=] call[name[get_redirect_url], parameter[name[request]]]
if name[request].user.is_authenticated begin[:]
if compare[name[settings].CAS_LOGGED_MSG is_not constant[None]] begin[:]
variable[message] assign[=] binary_operation[name[settings].CAS_LOGGED_MSG <ast.Mod object at 0x7da2590d6920> call[name[request].user.get_username, parameter[]]]
call[name[messages].success, parameter[name[request], name[message]]]
return[call[name[self].successful_login, parameter[]]]
variable[ticket] assign[=] call[name[request].GET.get, parameter[constant[ticket]]]
if name[ticket] begin[:]
variable[user] assign[=] call[name[authenticate], parameter[]]
variable[pgtiou] assign[=] call[name[request].session.get, parameter[constant[pgtiou]]]
if compare[name[user] is_not constant[None]] begin[:]
call[name[auth_login], parameter[name[request], name[user]]]
if <ast.UnaryOp object at 0x7da1b1d39de0> begin[:]
call[name[request].session.create, parameter[]]
call[name[SessionTicket].objects.create, parameter[]]
if <ast.BoolOp object at 0x7da1b1e79600> begin[:]
call[call[name[ProxyGrantingTicket].objects.filter, parameter[]].delete, parameter[]]
<ast.Try object at 0x7da1b1e7a260>
if compare[name[settings].CAS_LOGIN_MSG is_not constant[None]] begin[:]
variable[name] assign[=] call[name[user].get_username, parameter[]]
variable[message] assign[=] binary_operation[name[settings].CAS_LOGIN_MSG <ast.Mod object at 0x7da2590d6920> name[name]]
call[name[messages].success, parameter[name[request], name[message]]]
return[call[name[self].successful_login, parameter[]]] | keyword[def] identifier[get] ( identifier[self] , identifier[request] ):
literal[string]
identifier[next_page] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] )
identifier[required] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] , keyword[False] )
identifier[service_url] = identifier[get_service_url] ( identifier[request] , identifier[next_page] )
identifier[client] = identifier[get_cas_client] ( identifier[service_url] = identifier[service_url] , identifier[request] = identifier[request] )
keyword[if] keyword[not] identifier[next_page] keyword[and] identifier[settings] . identifier[CAS_STORE_NEXT] keyword[and] literal[string] keyword[in] identifier[request] . identifier[session] :
identifier[next_page] = identifier[request] . identifier[session] [ literal[string] ]
keyword[del] identifier[request] . identifier[session] [ literal[string] ]
keyword[if] keyword[not] identifier[next_page] :
identifier[next_page] = identifier[get_redirect_url] ( identifier[request] )
keyword[if] identifier[request] . identifier[user] . identifier[is_authenticated] :
keyword[if] identifier[settings] . identifier[CAS_LOGGED_MSG] keyword[is] keyword[not] keyword[None] :
identifier[message] = identifier[settings] . identifier[CAS_LOGGED_MSG] % identifier[request] . identifier[user] . identifier[get_username] ()
identifier[messages] . identifier[success] ( identifier[request] , identifier[message] )
keyword[return] identifier[self] . identifier[successful_login] ( identifier[request] = identifier[request] , identifier[next_page] = identifier[next_page] )
identifier[ticket] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] )
keyword[if] identifier[ticket] :
identifier[user] = identifier[authenticate] ( identifier[ticket] = identifier[ticket] ,
identifier[service] = identifier[service_url] ,
identifier[request] = identifier[request] )
identifier[pgtiou] = identifier[request] . identifier[session] . identifier[get] ( literal[string] )
keyword[if] identifier[user] keyword[is] keyword[not] keyword[None] :
identifier[auth_login] ( identifier[request] , identifier[user] )
keyword[if] keyword[not] identifier[request] . identifier[session] . identifier[exists] ( identifier[request] . identifier[session] . identifier[session_key] ):
identifier[request] . identifier[session] . identifier[create] ()
identifier[SessionTicket] . identifier[objects] . identifier[create] (
identifier[session_key] = identifier[request] . identifier[session] . identifier[session_key] ,
identifier[ticket] = identifier[ticket]
)
keyword[if] identifier[pgtiou] keyword[and] identifier[settings] . identifier[CAS_PROXY_CALLBACK] :
identifier[ProxyGrantingTicket] . identifier[objects] . identifier[filter] (
identifier[user] = identifier[user] ,
identifier[session_key] = identifier[request] . identifier[session] . identifier[session_key]
). identifier[delete] ()
keyword[try] :
identifier[pgt] = identifier[ProxyGrantingTicket] . identifier[objects] . identifier[get] ( identifier[pgtiou] = identifier[pgtiou] )
identifier[pgt] . identifier[user] = identifier[user]
identifier[pgt] . identifier[session_key] = identifier[request] . identifier[session] . identifier[session_key]
identifier[pgt] . identifier[save] ()
keyword[except] identifier[ProxyGrantingTicket] . identifier[DoesNotExist] :
keyword[pass]
keyword[if] identifier[settings] . identifier[CAS_LOGIN_MSG] keyword[is] keyword[not] keyword[None] :
identifier[name] = identifier[user] . identifier[get_username] ()
identifier[message] = identifier[settings] . identifier[CAS_LOGIN_MSG] % identifier[name]
identifier[messages] . identifier[success] ( identifier[request] , identifier[message] )
keyword[return] identifier[self] . identifier[successful_login] ( identifier[request] = identifier[request] , identifier[next_page] = identifier[next_page] )
keyword[elif] identifier[settings] . identifier[CAS_RETRY_LOGIN] keyword[or] identifier[required] :
keyword[return] identifier[HttpResponseRedirect] ( identifier[client] . identifier[get_login_url] ())
keyword[else] :
keyword[raise] identifier[PermissionDenied] ( identifier[_] ( literal[string] ))
keyword[else] :
keyword[if] identifier[settings] . identifier[CAS_STORE_NEXT] :
identifier[request] . identifier[session] [ literal[string] ]= identifier[next_page]
keyword[return] identifier[HttpResponseRedirect] ( identifier[client] . identifier[get_login_url] ()) | def get(self, request):
"""
Forwards to CAS login URL or verifies CAS ticket
:param request:
:return:
"""
next_page = request.GET.get('next')
required = request.GET.get('required', False)
service_url = get_service_url(request, next_page)
client = get_cas_client(service_url=service_url, request=request)
if not next_page and settings.CAS_STORE_NEXT and ('CASNEXT' in request.session):
next_page = request.session['CASNEXT']
del request.session['CASNEXT'] # depends on [control=['if'], data=[]]
if not next_page:
next_page = get_redirect_url(request) # depends on [control=['if'], data=[]]
if request.user.is_authenticated:
if settings.CAS_LOGGED_MSG is not None:
message = settings.CAS_LOGGED_MSG % request.user.get_username()
messages.success(request, message) # depends on [control=['if'], data=[]]
return self.successful_login(request=request, next_page=next_page) # depends on [control=['if'], data=[]]
ticket = request.GET.get('ticket')
if ticket:
user = authenticate(ticket=ticket, service=service_url, request=request)
pgtiou = request.session.get('pgtiou')
if user is not None:
auth_login(request, user)
if not request.session.exists(request.session.session_key):
request.session.create() # depends on [control=['if'], data=[]]
SessionTicket.objects.create(session_key=request.session.session_key, ticket=ticket)
if pgtiou and settings.CAS_PROXY_CALLBACK:
# Delete old PGT
ProxyGrantingTicket.objects.filter(user=user, session_key=request.session.session_key).delete()
# Set new PGT ticket
try:
pgt = ProxyGrantingTicket.objects.get(pgtiou=pgtiou)
pgt.user = user
pgt.session_key = request.session.session_key
pgt.save() # depends on [control=['try'], data=[]]
except ProxyGrantingTicket.DoesNotExist:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if settings.CAS_LOGIN_MSG is not None:
name = user.get_username()
message = settings.CAS_LOGIN_MSG % name
messages.success(request, message) # depends on [control=['if'], data=[]]
return self.successful_login(request=request, next_page=next_page) # depends on [control=['if'], data=['user']]
elif settings.CAS_RETRY_LOGIN or required:
return HttpResponseRedirect(client.get_login_url()) # depends on [control=['if'], data=[]]
else:
raise PermissionDenied(_('Login failed.')) # depends on [control=['if'], data=[]]
else:
if settings.CAS_STORE_NEXT:
request.session['CASNEXT'] = next_page # depends on [control=['if'], data=[]]
return HttpResponseRedirect(client.get_login_url()) |
def load_output_meta(self):
"""
Load descriptive output meta data from a JSON file in the input directory.
"""
options = self.options
file_path = os.path.join(options.inputdir, 'output.meta.json')
with open(file_path) as infile:
return json.load(infile) | def function[load_output_meta, parameter[self]]:
constant[
Load descriptive output meta data from a JSON file in the input directory.
]
variable[options] assign[=] name[self].options
variable[file_path] assign[=] call[name[os].path.join, parameter[name[options].inputdir, constant[output.meta.json]]]
with call[name[open], parameter[name[file_path]]] begin[:]
return[call[name[json].load, parameter[name[infile]]]] | keyword[def] identifier[load_output_meta] ( identifier[self] ):
literal[string]
identifier[options] = identifier[self] . identifier[options]
identifier[file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[options] . identifier[inputdir] , literal[string] )
keyword[with] identifier[open] ( identifier[file_path] ) keyword[as] identifier[infile] :
keyword[return] identifier[json] . identifier[load] ( identifier[infile] ) | def load_output_meta(self):
"""
Load descriptive output meta data from a JSON file in the input directory.
"""
options = self.options
file_path = os.path.join(options.inputdir, 'output.meta.json')
with open(file_path) as infile:
return json.load(infile) # depends on [control=['with'], data=['infile']] |
def fullscreen(self):
''' Context Manager that enters full-screen mode and restores normal
mode on exit.
::
with screen.fullscreen():
print('Hello, world!')
'''
stream = self._stream
stream.write(self.alt_screen_enable)
stream.write(str(self.save_title(0))) # 0 = both icon, title
stream.flush()
try:
yield self
finally:
stream.write(self.alt_screen_disable)
stream.write(str(self.restore_title(0))) # 0 = icon & title
stream.flush() | def function[fullscreen, parameter[self]]:
constant[ Context Manager that enters full-screen mode and restores normal
mode on exit.
::
with screen.fullscreen():
print('Hello, world!')
]
variable[stream] assign[=] name[self]._stream
call[name[stream].write, parameter[name[self].alt_screen_enable]]
call[name[stream].write, parameter[call[name[str], parameter[call[name[self].save_title, parameter[constant[0]]]]]]]
call[name[stream].flush, parameter[]]
<ast.Try object at 0x7da1b26a3fa0> | keyword[def] identifier[fullscreen] ( identifier[self] ):
literal[string]
identifier[stream] = identifier[self] . identifier[_stream]
identifier[stream] . identifier[write] ( identifier[self] . identifier[alt_screen_enable] )
identifier[stream] . identifier[write] ( identifier[str] ( identifier[self] . identifier[save_title] ( literal[int] )))
identifier[stream] . identifier[flush] ()
keyword[try] :
keyword[yield] identifier[self]
keyword[finally] :
identifier[stream] . identifier[write] ( identifier[self] . identifier[alt_screen_disable] )
identifier[stream] . identifier[write] ( identifier[str] ( identifier[self] . identifier[restore_title] ( literal[int] )))
identifier[stream] . identifier[flush] () | def fullscreen(self):
""" Context Manager that enters full-screen mode and restores normal
mode on exit.
::
with screen.fullscreen():
print('Hello, world!')
"""
stream = self._stream
stream.write(self.alt_screen_enable)
stream.write(str(self.save_title(0))) # 0 = both icon, title
stream.flush()
try:
yield self # depends on [control=['try'], data=[]]
finally:
stream.write(self.alt_screen_disable)
stream.write(str(self.restore_title(0))) # 0 = icon & title
stream.flush() |
def build_from_developer_settings(api_name: str, api_version: str):
"""
:param api_name: Example hello
:param api_version: Example v1, v2alpha
:return: ApiClient
"""
developer_settings = read_developer_settings()
api_host = "http://" + api_name + ".apis.devision.io"
return ApiClient(
host=api_host,
api_version=api_version,
access_token=None,
refresh_token=developer_settings['refreshToken'],
client_id=developer_settings['clientId'],
client_secret=developer_settings['clientSecret'],
) | def function[build_from_developer_settings, parameter[api_name, api_version]]:
constant[
:param api_name: Example hello
:param api_version: Example v1, v2alpha
:return: ApiClient
]
variable[developer_settings] assign[=] call[name[read_developer_settings], parameter[]]
variable[api_host] assign[=] binary_operation[binary_operation[constant[http://] + name[api_name]] + constant[.apis.devision.io]]
return[call[name[ApiClient], parameter[]]] | keyword[def] identifier[build_from_developer_settings] ( identifier[api_name] : identifier[str] , identifier[api_version] : identifier[str] ):
literal[string]
identifier[developer_settings] = identifier[read_developer_settings] ()
identifier[api_host] = literal[string] + identifier[api_name] + literal[string]
keyword[return] identifier[ApiClient] (
identifier[host] = identifier[api_host] ,
identifier[api_version] = identifier[api_version] ,
identifier[access_token] = keyword[None] ,
identifier[refresh_token] = identifier[developer_settings] [ literal[string] ],
identifier[client_id] = identifier[developer_settings] [ literal[string] ],
identifier[client_secret] = identifier[developer_settings] [ literal[string] ],
) | def build_from_developer_settings(api_name: str, api_version: str):
"""
:param api_name: Example hello
:param api_version: Example v1, v2alpha
:return: ApiClient
"""
developer_settings = read_developer_settings()
api_host = 'http://' + api_name + '.apis.devision.io'
return ApiClient(host=api_host, api_version=api_version, access_token=None, refresh_token=developer_settings['refreshToken'], client_id=developer_settings['clientId'], client_secret=developer_settings['clientSecret']) |
def _parse_contract_wages(self, table):
"""
Parse the wages on the contract.
The wages are listed as the data points in the contract table. Any
values that don't have a value which starts with a '$' sign are likely
not valid and should be dropped.
Parameters
----------
table : PyQuery object
A PyQuery object containing the contract table.
Returns
-------
list
Returns a list of all wages where each element is a string denoting
the dollar amount, such as '$40,000,000'.
"""
wages = [i.text() if i.text().startswith('$') else ''
for i in table('td').items()]
wages.remove('')
return wages | def function[_parse_contract_wages, parameter[self, table]]:
constant[
Parse the wages on the contract.
The wages are listed as the data points in the contract table. Any
values that don't have a value which starts with a '$' sign are likely
not valid and should be dropped.
Parameters
----------
table : PyQuery object
A PyQuery object containing the contract table.
Returns
-------
list
Returns a list of all wages where each element is a string denoting
the dollar amount, such as '$40,000,000'.
]
variable[wages] assign[=] <ast.ListComp object at 0x7da1b0bca080>
call[name[wages].remove, parameter[constant[]]]
return[name[wages]] | keyword[def] identifier[_parse_contract_wages] ( identifier[self] , identifier[table] ):
literal[string]
identifier[wages] =[ identifier[i] . identifier[text] () keyword[if] identifier[i] . identifier[text] (). identifier[startswith] ( literal[string] ) keyword[else] literal[string]
keyword[for] identifier[i] keyword[in] identifier[table] ( literal[string] ). identifier[items] ()]
identifier[wages] . identifier[remove] ( literal[string] )
keyword[return] identifier[wages] | def _parse_contract_wages(self, table):
"""
Parse the wages on the contract.
The wages are listed as the data points in the contract table. Any
values that don't have a value which starts with a '$' sign are likely
not valid and should be dropped.
Parameters
----------
table : PyQuery object
A PyQuery object containing the contract table.
Returns
-------
list
Returns a list of all wages where each element is a string denoting
the dollar amount, such as '$40,000,000'.
"""
wages = [i.text() if i.text().startswith('$') else '' for i in table('td').items()]
wages.remove('')
return wages |
def proximal(self):
"""Return the `proximal factory` of the functional.
See Also
--------
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_l1 :
`proximal factory` for convex conjuagte of L1-norm.
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_l2 :
`proximal factory` for convex conjuagte of L2-norm.
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_linfty :
`proximal factory` for convex conjuagte of Linfty-norm.
"""
if self.exponent == np.inf:
return proximal_convex_conj_l1(space=self.domain)
elif self.exponent == 2:
return proximal_convex_conj_l2(space=self.domain)
elif self.exponent == 1:
return proximal_convex_conj_linfty(space=self.domain)
else:
raise NotImplementedError('`proximal` only implemented for p=1, '
'p=2 or p=inf') | def function[proximal, parameter[self]]:
constant[Return the `proximal factory` of the functional.
See Also
--------
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_l1 :
`proximal factory` for convex conjuagte of L1-norm.
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_l2 :
`proximal factory` for convex conjuagte of L2-norm.
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_linfty :
`proximal factory` for convex conjuagte of Linfty-norm.
]
if compare[name[self].exponent equal[==] name[np].inf] begin[:]
return[call[name[proximal_convex_conj_l1], parameter[]]] | keyword[def] identifier[proximal] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[exponent] == identifier[np] . identifier[inf] :
keyword[return] identifier[proximal_convex_conj_l1] ( identifier[space] = identifier[self] . identifier[domain] )
keyword[elif] identifier[self] . identifier[exponent] == literal[int] :
keyword[return] identifier[proximal_convex_conj_l2] ( identifier[space] = identifier[self] . identifier[domain] )
keyword[elif] identifier[self] . identifier[exponent] == literal[int] :
keyword[return] identifier[proximal_convex_conj_linfty] ( identifier[space] = identifier[self] . identifier[domain] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string]
literal[string] ) | def proximal(self):
"""Return the `proximal factory` of the functional.
See Also
--------
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_l1 :
`proximal factory` for convex conjuagte of L1-norm.
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_l2 :
`proximal factory` for convex conjuagte of L2-norm.
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj_linfty :
`proximal factory` for convex conjuagte of Linfty-norm.
"""
if self.exponent == np.inf:
return proximal_convex_conj_l1(space=self.domain) # depends on [control=['if'], data=[]]
elif self.exponent == 2:
return proximal_convex_conj_l2(space=self.domain) # depends on [control=['if'], data=[]]
elif self.exponent == 1:
return proximal_convex_conj_linfty(space=self.domain) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('`proximal` only implemented for p=1, p=2 or p=inf') |
def dataframe( self, only_successful = True ):
"""Return the results as a pandas DataFrame. Note that there is a danger
of duplicate labels here, for example if the results contain a value
with the same name as one of the parameters. To resolve this, parameter names
take precedence over metadata values, and result names take precedence over
parameter names.
If the only_successful flag is set (the default), then the DataFrame will
only include results that completed without an exception; if it is set to
False, the DataFrame will include all results and also the exception details.
:param only_successful: include only successful experiments (defaults to True)
:returns: the parameters, results, and metadata in a DataFrame"""
def extract( r ):
if r[Experiment.METADATA][Experiment.STATUS]:
# experiment was a success, include it
rd = r[Experiment.METADATA].copy()
rd.update(r[Experiment.PARAMETERS])
rd.update(r[Experiment.RESULTS])
else:
# experiment returned an exception
if not only_successful:
# ...but we want it anyway
rd = r[Experiment.METADATA].copy()
rd.update(r[Experiment.PARAMETERS])
# ...and there are no results to add
else:
rd = None
return rd
records = [ r for r in map(extract, self.results()) if r is not None ]
return DataFrame.from_records(records) | def function[dataframe, parameter[self, only_successful]]:
constant[Return the results as a pandas DataFrame. Note that there is a danger
of duplicate labels here, for example if the results contain a value
with the same name as one of the parameters. To resolve this, parameter names
take precedence over metadata values, and result names take precedence over
parameter names.
If the only_successful flag is set (the default), then the DataFrame will
only include results that completed without an exception; if it is set to
False, the DataFrame will include all results and also the exception details.
:param only_successful: include only successful experiments (defaults to True)
:returns: the parameters, results, and metadata in a DataFrame]
def function[extract, parameter[r]]:
if call[call[name[r]][name[Experiment].METADATA]][name[Experiment].STATUS] begin[:]
variable[rd] assign[=] call[call[name[r]][name[Experiment].METADATA].copy, parameter[]]
call[name[rd].update, parameter[call[name[r]][name[Experiment].PARAMETERS]]]
call[name[rd].update, parameter[call[name[r]][name[Experiment].RESULTS]]]
return[name[rd]]
variable[records] assign[=] <ast.ListComp object at 0x7da1b0a79510>
return[call[name[DataFrame].from_records, parameter[name[records]]]] | keyword[def] identifier[dataframe] ( identifier[self] , identifier[only_successful] = keyword[True] ):
literal[string]
keyword[def] identifier[extract] ( identifier[r] ):
keyword[if] identifier[r] [ identifier[Experiment] . identifier[METADATA] ][ identifier[Experiment] . identifier[STATUS] ]:
identifier[rd] = identifier[r] [ identifier[Experiment] . identifier[METADATA] ]. identifier[copy] ()
identifier[rd] . identifier[update] ( identifier[r] [ identifier[Experiment] . identifier[PARAMETERS] ])
identifier[rd] . identifier[update] ( identifier[r] [ identifier[Experiment] . identifier[RESULTS] ])
keyword[else] :
keyword[if] keyword[not] identifier[only_successful] :
identifier[rd] = identifier[r] [ identifier[Experiment] . identifier[METADATA] ]. identifier[copy] ()
identifier[rd] . identifier[update] ( identifier[r] [ identifier[Experiment] . identifier[PARAMETERS] ])
keyword[else] :
identifier[rd] = keyword[None]
keyword[return] identifier[rd]
identifier[records] =[ identifier[r] keyword[for] identifier[r] keyword[in] identifier[map] ( identifier[extract] , identifier[self] . identifier[results] ()) keyword[if] identifier[r] keyword[is] keyword[not] keyword[None] ]
keyword[return] identifier[DataFrame] . identifier[from_records] ( identifier[records] ) | def dataframe(self, only_successful=True):
"""Return the results as a pandas DataFrame. Note that there is a danger
of duplicate labels here, for example if the results contain a value
with the same name as one of the parameters. To resolve this, parameter names
take precedence over metadata values, and result names take precedence over
parameter names.
If the only_successful flag is set (the default), then the DataFrame will
only include results that completed without an exception; if it is set to
False, the DataFrame will include all results and also the exception details.
:param only_successful: include only successful experiments (defaults to True)
:returns: the parameters, results, and metadata in a DataFrame"""
def extract(r):
if r[Experiment.METADATA][Experiment.STATUS]:
# experiment was a success, include it
rd = r[Experiment.METADATA].copy()
rd.update(r[Experiment.PARAMETERS])
rd.update(r[Experiment.RESULTS]) # depends on [control=['if'], data=[]]
# experiment returned an exception
elif not only_successful:
# ...but we want it anyway
rd = r[Experiment.METADATA].copy()
rd.update(r[Experiment.PARAMETERS]) # depends on [control=['if'], data=[]]
else:
# ...and there are no results to add
rd = None
return rd
records = [r for r in map(extract, self.results()) if r is not None]
return DataFrame.from_records(records) |
def load_renderers_into_events(events, mediums, context_renderers, default_rendering_style):
"""
Given the events and the context renderers, load the renderers into the event objects
so that they may be able to call the 'render' method later on.
"""
# Make a mapping of source groups and rendering styles to context renderers. Do
# the same for sources and rendering styles to context renderers
source_group_style_to_renderer = {
(cr.source_group_id, cr.rendering_style_id): cr
for cr in context_renderers if cr.source_group_id
}
source_style_to_renderer = {
(cr.source_id, cr.rendering_style_id): cr
for cr in context_renderers if cr.source_id
}
for e in events:
for m in mediums:
# Try the following when loading a context renderer for a medium in an event.
# 1. Try to look up the renderer based on the source group and medium rendering style
# 2. If step 1 doesn't work, look up based on the source and medium rendering style
# 3. If step 2 doesn't work, look up based on the source group and default rendering style
# 4. if step 3 doesn't work, look up based on the source and default rendering style
# If none of those steps work, this event will not be able to be rendered for the mediun
cr = source_group_style_to_renderer.get((e.source.group_id, m.rendering_style_id))
if not cr:
cr = source_style_to_renderer.get((e.source_id, m.rendering_style_id))
if not cr and default_rendering_style:
cr = source_group_style_to_renderer.get((e.source.group_id, default_rendering_style.id))
if not cr and default_rendering_style:
cr = source_style_to_renderer.get((e.source_id, default_rendering_style.id))
if cr:
e._context_renderers[m] = cr | def function[load_renderers_into_events, parameter[events, mediums, context_renderers, default_rendering_style]]:
constant[
Given the events and the context renderers, load the renderers into the event objects
so that they may be able to call the 'render' method later on.
]
variable[source_group_style_to_renderer] assign[=] <ast.DictComp object at 0x7da1b0b1b610>
variable[source_style_to_renderer] assign[=] <ast.DictComp object at 0x7da1b0b1b4f0>
for taget[name[e]] in starred[name[events]] begin[:]
for taget[name[m]] in starred[name[mediums]] begin[:]
variable[cr] assign[=] call[name[source_group_style_to_renderer].get, parameter[tuple[[<ast.Attribute object at 0x7da1b0b1a4d0>, <ast.Attribute object at 0x7da1b0b1a500>]]]]
if <ast.UnaryOp object at 0x7da1b0b1b8e0> begin[:]
variable[cr] assign[=] call[name[source_style_to_renderer].get, parameter[tuple[[<ast.Attribute object at 0x7da1b0b1bcd0>, <ast.Attribute object at 0x7da1b0b1bd00>]]]]
if <ast.BoolOp object at 0x7da1b0b1b790> begin[:]
variable[cr] assign[=] call[name[source_group_style_to_renderer].get, parameter[tuple[[<ast.Attribute object at 0x7da1b0b1b7c0>, <ast.Attribute object at 0x7da1b0b1beb0>]]]]
if <ast.BoolOp object at 0x7da1b0b1ae00> begin[:]
variable[cr] assign[=] call[name[source_style_to_renderer].get, parameter[tuple[[<ast.Attribute object at 0x7da1b2344a30>, <ast.Attribute object at 0x7da1b2347220>]]]]
if name[cr] begin[:]
call[name[e]._context_renderers][name[m]] assign[=] name[cr] | keyword[def] identifier[load_renderers_into_events] ( identifier[events] , identifier[mediums] , identifier[context_renderers] , identifier[default_rendering_style] ):
literal[string]
identifier[source_group_style_to_renderer] ={
( identifier[cr] . identifier[source_group_id] , identifier[cr] . identifier[rendering_style_id] ): identifier[cr]
keyword[for] identifier[cr] keyword[in] identifier[context_renderers] keyword[if] identifier[cr] . identifier[source_group_id]
}
identifier[source_style_to_renderer] ={
( identifier[cr] . identifier[source_id] , identifier[cr] . identifier[rendering_style_id] ): identifier[cr]
keyword[for] identifier[cr] keyword[in] identifier[context_renderers] keyword[if] identifier[cr] . identifier[source_id]
}
keyword[for] identifier[e] keyword[in] identifier[events] :
keyword[for] identifier[m] keyword[in] identifier[mediums] :
identifier[cr] = identifier[source_group_style_to_renderer] . identifier[get] (( identifier[e] . identifier[source] . identifier[group_id] , identifier[m] . identifier[rendering_style_id] ))
keyword[if] keyword[not] identifier[cr] :
identifier[cr] = identifier[source_style_to_renderer] . identifier[get] (( identifier[e] . identifier[source_id] , identifier[m] . identifier[rendering_style_id] ))
keyword[if] keyword[not] identifier[cr] keyword[and] identifier[default_rendering_style] :
identifier[cr] = identifier[source_group_style_to_renderer] . identifier[get] (( identifier[e] . identifier[source] . identifier[group_id] , identifier[default_rendering_style] . identifier[id] ))
keyword[if] keyword[not] identifier[cr] keyword[and] identifier[default_rendering_style] :
identifier[cr] = identifier[source_style_to_renderer] . identifier[get] (( identifier[e] . identifier[source_id] , identifier[default_rendering_style] . identifier[id] ))
keyword[if] identifier[cr] :
identifier[e] . identifier[_context_renderers] [ identifier[m] ]= identifier[cr] | def load_renderers_into_events(events, mediums, context_renderers, default_rendering_style):
"""
Given the events and the context renderers, load the renderers into the event objects
so that they may be able to call the 'render' method later on.
"""
# Make a mapping of source groups and rendering styles to context renderers. Do
# the same for sources and rendering styles to context renderers
source_group_style_to_renderer = {(cr.source_group_id, cr.rendering_style_id): cr for cr in context_renderers if cr.source_group_id}
source_style_to_renderer = {(cr.source_id, cr.rendering_style_id): cr for cr in context_renderers if cr.source_id}
for e in events:
for m in mediums:
# Try the following when loading a context renderer for a medium in an event.
# 1. Try to look up the renderer based on the source group and medium rendering style
# 2. If step 1 doesn't work, look up based on the source and medium rendering style
# 3. If step 2 doesn't work, look up based on the source group and default rendering style
# 4. if step 3 doesn't work, look up based on the source and default rendering style
# If none of those steps work, this event will not be able to be rendered for the mediun
cr = source_group_style_to_renderer.get((e.source.group_id, m.rendering_style_id))
if not cr:
cr = source_style_to_renderer.get((e.source_id, m.rendering_style_id)) # depends on [control=['if'], data=[]]
if not cr and default_rendering_style:
cr = source_group_style_to_renderer.get((e.source.group_id, default_rendering_style.id)) # depends on [control=['if'], data=[]]
if not cr and default_rendering_style:
cr = source_style_to_renderer.get((e.source_id, default_rendering_style.id)) # depends on [control=['if'], data=[]]
if cr:
e._context_renderers[m] = cr # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] # depends on [control=['for'], data=['e']] |
def nextobject(self):
"""Yields a list of objects.
Returns keywords, literals, strings, numbers, arrays and dictionaries.
Arrays and dictionaries are represented as Python lists and dictionaries.
"""
while not self.results:
(pos, token) = self.nexttoken()
#print (pos,token), (self.curtype, self.curstack)
if isinstance(token, (int, long, float, bool, str, PSLiteral)):
# normal token
self.push((pos, token))
elif token == KEYWORD_ARRAY_BEGIN:
# begin array
self.start_type(pos, 'a')
elif token == KEYWORD_ARRAY_END:
# end array
try:
self.push(self.end_type('a'))
except PSTypeError:
if STRICT:
raise
elif token == KEYWORD_DICT_BEGIN:
# begin dictionary
self.start_type(pos, 'd')
elif token == KEYWORD_DICT_END:
# end dictionary
try:
(pos, objs) = self.end_type('d')
if len(objs) % 2 != 0:
raise PSSyntaxError('Invalid dictionary construct: %r' % (objs,))
# construct a Python dictionary.
d = dict((literal_name(k), v) for (k, v) in choplist(2, objs) if v is not None)
self.push((pos, d))
except PSTypeError:
if STRICT:
raise
elif token == KEYWORD_PROC_BEGIN:
# begin proc
self.start_type(pos, 'p')
elif token == KEYWORD_PROC_END:
# end proc
try:
self.push(self.end_type('p'))
except PSTypeError:
if STRICT:
raise
else:
if self.debug:
logging.debug('do_keyword: pos=%r, token=%r, stack=%r' % \
(pos, token, self.curstack))
self.do_keyword(pos, token)
if self.context:
continue
else:
self.flush()
obj = self.results.pop(0)
if self.debug:
logging.debug('nextobject: %r' % (obj,))
return obj | def function[nextobject, parameter[self]]:
constant[Yields a list of objects.
Returns keywords, literals, strings, numbers, arrays and dictionaries.
Arrays and dictionaries are represented as Python lists and dictionaries.
]
while <ast.UnaryOp object at 0x7da1b170cf10> begin[:]
<ast.Tuple object at 0x7da1b170cb20> assign[=] call[name[self].nexttoken, parameter[]]
if call[name[isinstance], parameter[name[token], tuple[[<ast.Name object at 0x7da1b170d120>, <ast.Name object at 0x7da1b170cc70>, <ast.Name object at 0x7da1b170f7c0>, <ast.Name object at 0x7da1b170d5a0>, <ast.Name object at 0x7da1b170cd90>, <ast.Name object at 0x7da1b170d5d0>]]]] begin[:]
call[name[self].push, parameter[tuple[[<ast.Name object at 0x7da1b170cf40>, <ast.Name object at 0x7da1b170ccd0>]]]]
if name[self].context begin[:]
continue
variable[obj] assign[=] call[name[self].results.pop, parameter[constant[0]]]
if name[self].debug begin[:]
call[name[logging].debug, parameter[binary_operation[constant[nextobject: %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00e710>]]]]]
return[name[obj]] | keyword[def] identifier[nextobject] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[results] :
( identifier[pos] , identifier[token] )= identifier[self] . identifier[nexttoken] ()
keyword[if] identifier[isinstance] ( identifier[token] ,( identifier[int] , identifier[long] , identifier[float] , identifier[bool] , identifier[str] , identifier[PSLiteral] )):
identifier[self] . identifier[push] (( identifier[pos] , identifier[token] ))
keyword[elif] identifier[token] == identifier[KEYWORD_ARRAY_BEGIN] :
identifier[self] . identifier[start_type] ( identifier[pos] , literal[string] )
keyword[elif] identifier[token] == identifier[KEYWORD_ARRAY_END] :
keyword[try] :
identifier[self] . identifier[push] ( identifier[self] . identifier[end_type] ( literal[string] ))
keyword[except] identifier[PSTypeError] :
keyword[if] identifier[STRICT] :
keyword[raise]
keyword[elif] identifier[token] == identifier[KEYWORD_DICT_BEGIN] :
identifier[self] . identifier[start_type] ( identifier[pos] , literal[string] )
keyword[elif] identifier[token] == identifier[KEYWORD_DICT_END] :
keyword[try] :
( identifier[pos] , identifier[objs] )= identifier[self] . identifier[end_type] ( literal[string] )
keyword[if] identifier[len] ( identifier[objs] )% literal[int] != literal[int] :
keyword[raise] identifier[PSSyntaxError] ( literal[string] %( identifier[objs] ,))
identifier[d] = identifier[dict] (( identifier[literal_name] ( identifier[k] ), identifier[v] ) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[choplist] ( literal[int] , identifier[objs] ) keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] )
identifier[self] . identifier[push] (( identifier[pos] , identifier[d] ))
keyword[except] identifier[PSTypeError] :
keyword[if] identifier[STRICT] :
keyword[raise]
keyword[elif] identifier[token] == identifier[KEYWORD_PROC_BEGIN] :
identifier[self] . identifier[start_type] ( identifier[pos] , literal[string] )
keyword[elif] identifier[token] == identifier[KEYWORD_PROC_END] :
keyword[try] :
identifier[self] . identifier[push] ( identifier[self] . identifier[end_type] ( literal[string] ))
keyword[except] identifier[PSTypeError] :
keyword[if] identifier[STRICT] :
keyword[raise]
keyword[else] :
keyword[if] identifier[self] . identifier[debug] :
identifier[logging] . identifier[debug] ( literal[string] %( identifier[pos] , identifier[token] , identifier[self] . identifier[curstack] ))
identifier[self] . identifier[do_keyword] ( identifier[pos] , identifier[token] )
keyword[if] identifier[self] . identifier[context] :
keyword[continue]
keyword[else] :
identifier[self] . identifier[flush] ()
identifier[obj] = identifier[self] . identifier[results] . identifier[pop] ( literal[int] )
keyword[if] identifier[self] . identifier[debug] :
identifier[logging] . identifier[debug] ( literal[string] %( identifier[obj] ,))
keyword[return] identifier[obj] | def nextobject(self):
"""Yields a list of objects.
Returns keywords, literals, strings, numbers, arrays and dictionaries.
Arrays and dictionaries are represented as Python lists and dictionaries.
"""
while not self.results:
(pos, token) = self.nexttoken()
#print (pos,token), (self.curtype, self.curstack)
if isinstance(token, (int, long, float, bool, str, PSLiteral)):
# normal token
self.push((pos, token)) # depends on [control=['if'], data=[]]
elif token == KEYWORD_ARRAY_BEGIN:
# begin array
self.start_type(pos, 'a') # depends on [control=['if'], data=[]]
elif token == KEYWORD_ARRAY_END:
# end array
try:
self.push(self.end_type('a')) # depends on [control=['try'], data=[]]
except PSTypeError:
if STRICT:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif token == KEYWORD_DICT_BEGIN:
# begin dictionary
self.start_type(pos, 'd') # depends on [control=['if'], data=[]]
elif token == KEYWORD_DICT_END:
# end dictionary
try:
(pos, objs) = self.end_type('d')
if len(objs) % 2 != 0:
raise PSSyntaxError('Invalid dictionary construct: %r' % (objs,)) # depends on [control=['if'], data=[]]
# construct a Python dictionary.
d = dict(((literal_name(k), v) for (k, v) in choplist(2, objs) if v is not None))
self.push((pos, d)) # depends on [control=['try'], data=[]]
except PSTypeError:
if STRICT:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif token == KEYWORD_PROC_BEGIN:
# begin proc
self.start_type(pos, 'p') # depends on [control=['if'], data=[]]
elif token == KEYWORD_PROC_END:
# end proc
try:
self.push(self.end_type('p')) # depends on [control=['try'], data=[]]
except PSTypeError:
if STRICT:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
if self.debug:
logging.debug('do_keyword: pos=%r, token=%r, stack=%r' % (pos, token, self.curstack)) # depends on [control=['if'], data=[]]
self.do_keyword(pos, token)
if self.context:
continue # depends on [control=['if'], data=[]]
else:
self.flush() # depends on [control=['while'], data=[]]
obj = self.results.pop(0)
if self.debug:
logging.debug('nextobject: %r' % (obj,)) # depends on [control=['if'], data=[]]
return obj |
def get_iterator(data_shape, use_caffe_data):
"""Generate the iterator of mnist dataset"""
def get_iterator_impl_mnist(args, kv):
"""return train and val iterators for mnist"""
# download data
get_mnist_ubyte()
flat = False if len(data_shape) != 1 else True
train = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
label="data/train-labels-idx1-ubyte",
input_shape=data_shape,
batch_size=args.batch_size,
shuffle=True,
flat=flat,
num_parts=kv.num_workers,
part_index=kv.rank)
val = mx.io.MNISTIter(
image="data/t10k-images-idx3-ubyte",
label="data/t10k-labels-idx1-ubyte",
input_shape=data_shape,
batch_size=args.batch_size,
flat=flat,
num_parts=kv.num_workers,
part_index=kv.rank)
return (train, val)
def get_iterator_impl_caffe(args, kv):
flat = False if len(data_shape) != 1 else True
train = mx.io.CaffeDataIter(
prototxt=
'layer { \
name: "mnist" \
type: "Data" \
top: "data" \
top: "label" \
include { \
phase: TRAIN \
} \
transform_param { \
scale: 0.00390625 \
} \
data_param { \
source: "mnist_train_lmdb" \
batch_size: 64 \
backend: LMDB \
} \
}',
flat=flat,
num_examples=60000
# float32 is the default, so left out here in order to illustrate
)
val = mx.io.CaffeDataIter(
prototxt=
'layer { \
name: "mnist" \
type: "Data" \
top: "data" \
top: "label" \
include { \
phase: TEST \
} \
transform_param { \
scale: 0.00390625 \
} \
data_param { \
source: "mnist_test_lmdb" \
batch_size: 100 \
backend: LMDB \
} \
}',
flat=flat,
num_examples=10000,
dtype="float32" # float32 is the default
)
return train, val
if use_caffe_data:
return get_iterator_impl_caffe
else:
return get_iterator_impl_mnist | def function[get_iterator, parameter[data_shape, use_caffe_data]]:
constant[Generate the iterator of mnist dataset]
def function[get_iterator_impl_mnist, parameter[args, kv]]:
constant[return train and val iterators for mnist]
call[name[get_mnist_ubyte], parameter[]]
variable[flat] assign[=] <ast.IfExp object at 0x7da18fe90f10>
variable[train] assign[=] call[name[mx].io.MNISTIter, parameter[]]
variable[val] assign[=] call[name[mx].io.MNISTIter, parameter[]]
return[tuple[[<ast.Name object at 0x7da18fe929e0>, <ast.Name object at 0x7da18fe92770>]]]
def function[get_iterator_impl_caffe, parameter[args, kv]]:
variable[flat] assign[=] <ast.IfExp object at 0x7da18fe92c80>
variable[train] assign[=] call[name[mx].io.CaffeDataIter, parameter[]]
variable[val] assign[=] call[name[mx].io.CaffeDataIter, parameter[]]
return[tuple[[<ast.Name object at 0x7da20e955f30>, <ast.Name object at 0x7da20e957370>]]]
if name[use_caffe_data] begin[:]
return[name[get_iterator_impl_caffe]] | keyword[def] identifier[get_iterator] ( identifier[data_shape] , identifier[use_caffe_data] ):
literal[string]
keyword[def] identifier[get_iterator_impl_mnist] ( identifier[args] , identifier[kv] ):
literal[string]
identifier[get_mnist_ubyte] ()
identifier[flat] = keyword[False] keyword[if] identifier[len] ( identifier[data_shape] )!= literal[int] keyword[else] keyword[True]
identifier[train] = identifier[mx] . identifier[io] . identifier[MNISTIter] (
identifier[image] = literal[string] ,
identifier[label] = literal[string] ,
identifier[input_shape] = identifier[data_shape] ,
identifier[batch_size] = identifier[args] . identifier[batch_size] ,
identifier[shuffle] = keyword[True] ,
identifier[flat] = identifier[flat] ,
identifier[num_parts] = identifier[kv] . identifier[num_workers] ,
identifier[part_index] = identifier[kv] . identifier[rank] )
identifier[val] = identifier[mx] . identifier[io] . identifier[MNISTIter] (
identifier[image] = literal[string] ,
identifier[label] = literal[string] ,
identifier[input_shape] = identifier[data_shape] ,
identifier[batch_size] = identifier[args] . identifier[batch_size] ,
identifier[flat] = identifier[flat] ,
identifier[num_parts] = identifier[kv] . identifier[num_workers] ,
identifier[part_index] = identifier[kv] . identifier[rank] )
keyword[return] ( identifier[train] , identifier[val] )
keyword[def] identifier[get_iterator_impl_caffe] ( identifier[args] , identifier[kv] ):
identifier[flat] = keyword[False] keyword[if] identifier[len] ( identifier[data_shape] )!= literal[int] keyword[else] keyword[True]
identifier[train] = identifier[mx] . identifier[io] . identifier[CaffeDataIter] (
identifier[prototxt] =
literal[string] ,
identifier[flat] = identifier[flat] ,
identifier[num_examples] = literal[int]
)
identifier[val] = identifier[mx] . identifier[io] . identifier[CaffeDataIter] (
identifier[prototxt] =
literal[string] ,
identifier[flat] = identifier[flat] ,
identifier[num_examples] = literal[int] ,
identifier[dtype] = literal[string]
)
keyword[return] identifier[train] , identifier[val]
keyword[if] identifier[use_caffe_data] :
keyword[return] identifier[get_iterator_impl_caffe]
keyword[else] :
keyword[return] identifier[get_iterator_impl_mnist] | def get_iterator(data_shape, use_caffe_data):
"""Generate the iterator of mnist dataset"""
def get_iterator_impl_mnist(args, kv):
"""return train and val iterators for mnist"""
# download data
get_mnist_ubyte()
flat = False if len(data_shape) != 1 else True
train = mx.io.MNISTIter(image='data/train-images-idx3-ubyte', label='data/train-labels-idx1-ubyte', input_shape=data_shape, batch_size=args.batch_size, shuffle=True, flat=flat, num_parts=kv.num_workers, part_index=kv.rank)
val = mx.io.MNISTIter(image='data/t10k-images-idx3-ubyte', label='data/t10k-labels-idx1-ubyte', input_shape=data_shape, batch_size=args.batch_size, flat=flat, num_parts=kv.num_workers, part_index=kv.rank)
return (train, val)
def get_iterator_impl_caffe(args, kv):
flat = False if len(data_shape) != 1 else True
# float32 is the default, so left out here in order to illustrate
train = mx.io.CaffeDataIter(prototxt='layer { name: "mnist" type: "Data" top: "data" top: "label" include { phase: TRAIN } transform_param { scale: 0.00390625 } data_param { source: "mnist_train_lmdb" batch_size: 64 backend: LMDB } }', flat=flat, num_examples=60000) # float32 is the default
val = mx.io.CaffeDataIter(prototxt='layer { name: "mnist" type: "Data" top: "data" top: "label" include { phase: TEST } transform_param { scale: 0.00390625 } data_param { source: "mnist_test_lmdb" batch_size: 100 backend: LMDB } }', flat=flat, num_examples=10000, dtype='float32')
return (train, val)
if use_caffe_data:
return get_iterator_impl_caffe # depends on [control=['if'], data=[]]
else:
return get_iterator_impl_mnist |
def get_credentials(options, environment):
""" Get credentials or prompt for them from options """
if options['--username'] or options['--auth']:
if not options['--username']:
options['<username>'] = lib.prompt("Please enter the username for %s..." % environment)
if not options['--password']:
options['<password>'] = lib.prompt("Please enter the password for %s..." % environment, secret=True)
return options | def function[get_credentials, parameter[options, environment]]:
constant[ Get credentials or prompt for them from options ]
if <ast.BoolOp object at 0x7da18ede5030> begin[:]
if <ast.UnaryOp object at 0x7da18ede5510> begin[:]
call[name[options]][constant[<username>]] assign[=] call[name[lib].prompt, parameter[binary_operation[constant[Please enter the username for %s...] <ast.Mod object at 0x7da2590d6920> name[environment]]]]
if <ast.UnaryOp object at 0x7da18ede4820> begin[:]
call[name[options]][constant[<password>]] assign[=] call[name[lib].prompt, parameter[binary_operation[constant[Please enter the password for %s...] <ast.Mod object at 0x7da2590d6920> name[environment]]]]
return[name[options]] | keyword[def] identifier[get_credentials] ( identifier[options] , identifier[environment] ):
literal[string]
keyword[if] identifier[options] [ literal[string] ] keyword[or] identifier[options] [ literal[string] ]:
keyword[if] keyword[not] identifier[options] [ literal[string] ]:
identifier[options] [ literal[string] ]= identifier[lib] . identifier[prompt] ( literal[string] % identifier[environment] )
keyword[if] keyword[not] identifier[options] [ literal[string] ]:
identifier[options] [ literal[string] ]= identifier[lib] . identifier[prompt] ( literal[string] % identifier[environment] , identifier[secret] = keyword[True] )
keyword[return] identifier[options] | def get_credentials(options, environment):
""" Get credentials or prompt for them from options """
if options['--username'] or options['--auth']:
if not options['--username']:
options['<username>'] = lib.prompt('Please enter the username for %s...' % environment) # depends on [control=['if'], data=[]]
if not options['--password']:
options['<password>'] = lib.prompt('Please enter the password for %s...' % environment, secret=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return options |
def creators_grouped_by_role(self):
"""
:return: A generator yielding 2-tuples of (role, [creators]) where
adjacent creators who share the same role are grouped together.
"""
role = -1
creators = []
for wc in self:
if wc.role != role:
if creators:
yield (role, creators)
role = wc.role
creators = []
creators.append(wc.creator)
if creators:
yield (role, creators) | def function[creators_grouped_by_role, parameter[self]]:
constant[
:return: A generator yielding 2-tuples of (role, [creators]) where
adjacent creators who share the same role are grouped together.
]
variable[role] assign[=] <ast.UnaryOp object at 0x7da20c6a8fa0>
variable[creators] assign[=] list[[]]
for taget[name[wc]] in starred[name[self]] begin[:]
if compare[name[wc].role not_equal[!=] name[role]] begin[:]
if name[creators] begin[:]
<ast.Yield object at 0x7da20c6a95d0>
variable[role] assign[=] name[wc].role
variable[creators] assign[=] list[[]]
call[name[creators].append, parameter[name[wc].creator]]
if name[creators] begin[:]
<ast.Yield object at 0x7da20c6a9660> | keyword[def] identifier[creators_grouped_by_role] ( identifier[self] ):
literal[string]
identifier[role] =- literal[int]
identifier[creators] =[]
keyword[for] identifier[wc] keyword[in] identifier[self] :
keyword[if] identifier[wc] . identifier[role] != identifier[role] :
keyword[if] identifier[creators] :
keyword[yield] ( identifier[role] , identifier[creators] )
identifier[role] = identifier[wc] . identifier[role]
identifier[creators] =[]
identifier[creators] . identifier[append] ( identifier[wc] . identifier[creator] )
keyword[if] identifier[creators] :
keyword[yield] ( identifier[role] , identifier[creators] ) | def creators_grouped_by_role(self):
"""
:return: A generator yielding 2-tuples of (role, [creators]) where
adjacent creators who share the same role are grouped together.
"""
role = -1
creators = []
for wc in self:
if wc.role != role:
if creators:
yield (role, creators) # depends on [control=['if'], data=[]]
role = wc.role
creators = [] # depends on [control=['if'], data=['role']]
creators.append(wc.creator) # depends on [control=['for'], data=['wc']]
if creators:
yield (role, creators) # depends on [control=['if'], data=[]] |
def parse_at_element(
self,
element, # type: ET.Element
state # type: _ProcessorState
):
# type: (...) -> Any
"""Parse the provided element as an aggregate."""
parsed_dict = self._dictionary.parse_at_element(element, state)
return self._converter.from_dict(parsed_dict) | def function[parse_at_element, parameter[self, element, state]]:
constant[Parse the provided element as an aggregate.]
variable[parsed_dict] assign[=] call[name[self]._dictionary.parse_at_element, parameter[name[element], name[state]]]
return[call[name[self]._converter.from_dict, parameter[name[parsed_dict]]]] | keyword[def] identifier[parse_at_element] (
identifier[self] ,
identifier[element] ,
identifier[state]
):
literal[string]
identifier[parsed_dict] = identifier[self] . identifier[_dictionary] . identifier[parse_at_element] ( identifier[element] , identifier[state] )
keyword[return] identifier[self] . identifier[_converter] . identifier[from_dict] ( identifier[parsed_dict] ) | def parse_at_element(self, element, state): # type: ET.Element
# type: _ProcessorState
# type: (...) -> Any
'Parse the provided element as an aggregate.'
parsed_dict = self._dictionary.parse_at_element(element, state)
return self._converter.from_dict(parsed_dict) |
def render_to_response(self, obj, **response_kwargs):
"""
Returns an ``HttpResponse`` object instance with Content-Type:
application/json.
The response body will be the return value of ``self.serialize(obj)``
"""
return HttpResponse(self.serialize(obj), content_type='application/json', **response_kwargs) | def function[render_to_response, parameter[self, obj]]:
constant[
Returns an ``HttpResponse`` object instance with Content-Type:
application/json.
The response body will be the return value of ``self.serialize(obj)``
]
return[call[name[HttpResponse], parameter[call[name[self].serialize, parameter[name[obj]]]]]] | keyword[def] identifier[render_to_response] ( identifier[self] , identifier[obj] ,** identifier[response_kwargs] ):
literal[string]
keyword[return] identifier[HttpResponse] ( identifier[self] . identifier[serialize] ( identifier[obj] ), identifier[content_type] = literal[string] ,** identifier[response_kwargs] ) | def render_to_response(self, obj, **response_kwargs):
"""
Returns an ``HttpResponse`` object instance with Content-Type:
application/json.
The response body will be the return value of ``self.serialize(obj)``
"""
return HttpResponse(self.serialize(obj), content_type='application/json', **response_kwargs) |
def apply_new_scoped_variable_default_value(self, path, new_default_value_str):
"""Applies the new default value of the scoped variable defined by path
:param str path: The path identifying the edited variable
:param str new_default_value_str: New default value as string
"""
data_port_id = self.get_list_store_row_from_cursor_selection()[self.ID_STORAGE_ID]
try:
if str(self.model.state.scoped_variables[data_port_id].default_value) != new_default_value_str:
self.model.state.scoped_variables[data_port_id].default_value = new_default_value_str
except (TypeError, AttributeError) as e:
logger.error("Error while changing default value: {0}".format(e)) | def function[apply_new_scoped_variable_default_value, parameter[self, path, new_default_value_str]]:
constant[Applies the new default value of the scoped variable defined by path
:param str path: The path identifying the edited variable
:param str new_default_value_str: New default value as string
]
variable[data_port_id] assign[=] call[call[name[self].get_list_store_row_from_cursor_selection, parameter[]]][name[self].ID_STORAGE_ID]
<ast.Try object at 0x7da1b1bbb9a0> | keyword[def] identifier[apply_new_scoped_variable_default_value] ( identifier[self] , identifier[path] , identifier[new_default_value_str] ):
literal[string]
identifier[data_port_id] = identifier[self] . identifier[get_list_store_row_from_cursor_selection] ()[ identifier[self] . identifier[ID_STORAGE_ID] ]
keyword[try] :
keyword[if] identifier[str] ( identifier[self] . identifier[model] . identifier[state] . identifier[scoped_variables] [ identifier[data_port_id] ]. identifier[default_value] )!= identifier[new_default_value_str] :
identifier[self] . identifier[model] . identifier[state] . identifier[scoped_variables] [ identifier[data_port_id] ]. identifier[default_value] = identifier[new_default_value_str]
keyword[except] ( identifier[TypeError] , identifier[AttributeError] ) keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] )) | def apply_new_scoped_variable_default_value(self, path, new_default_value_str):
"""Applies the new default value of the scoped variable defined by path
:param str path: The path identifying the edited variable
:param str new_default_value_str: New default value as string
"""
data_port_id = self.get_list_store_row_from_cursor_selection()[self.ID_STORAGE_ID]
try:
if str(self.model.state.scoped_variables[data_port_id].default_value) != new_default_value_str:
self.model.state.scoped_variables[data_port_id].default_value = new_default_value_str # depends on [control=['if'], data=['new_default_value_str']] # depends on [control=['try'], data=[]]
except (TypeError, AttributeError) as e:
logger.error('Error while changing default value: {0}'.format(e)) # depends on [control=['except'], data=['e']] |
def convert_pdf_to_txt(pdf, startpage=None):
"""Convert a pdf file to text and return the text.
This method requires pdftotext to be installed.
Parameters
----------
pdf : str
path to pdf file
startpage : int, optional
the first page we try to convert
Returns
-------
str
the converted text
"""
if startpage is not None:
startpageargs = ['-f', str(startpage)]
else:
startpageargs = []
stdout = subprocess.Popen(["pdftotext", "-q"] + startpageargs + [pdf, "-"],
stdout=subprocess.PIPE).communicate()[0]
# python2 and 3
if not isinstance(stdout, str):
stdout = stdout.decode()
return stdout | def function[convert_pdf_to_txt, parameter[pdf, startpage]]:
constant[Convert a pdf file to text and return the text.
This method requires pdftotext to be installed.
Parameters
----------
pdf : str
path to pdf file
startpage : int, optional
the first page we try to convert
Returns
-------
str
the converted text
]
if compare[name[startpage] is_not constant[None]] begin[:]
variable[startpageargs] assign[=] list[[<ast.Constant object at 0x7da20e956560>, <ast.Call object at 0x7da20e9578b0>]]
variable[stdout] assign[=] call[call[call[name[subprocess].Popen, parameter[binary_operation[binary_operation[list[[<ast.Constant object at 0x7da20e9566b0>, <ast.Constant object at 0x7da20e957fa0>]] + name[startpageargs]] + list[[<ast.Name object at 0x7da20e954e50>, <ast.Constant object at 0x7da20e957a00>]]]]].communicate, parameter[]]][constant[0]]
if <ast.UnaryOp object at 0x7da20c6e4ca0> begin[:]
variable[stdout] assign[=] call[name[stdout].decode, parameter[]]
return[name[stdout]] | keyword[def] identifier[convert_pdf_to_txt] ( identifier[pdf] , identifier[startpage] = keyword[None] ):
literal[string]
keyword[if] identifier[startpage] keyword[is] keyword[not] keyword[None] :
identifier[startpageargs] =[ literal[string] , identifier[str] ( identifier[startpage] )]
keyword[else] :
identifier[startpageargs] =[]
identifier[stdout] = identifier[subprocess] . identifier[Popen] ([ literal[string] , literal[string] ]+ identifier[startpageargs] +[ identifier[pdf] , literal[string] ],
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ). identifier[communicate] ()[ literal[int] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[stdout] , identifier[str] ):
identifier[stdout] = identifier[stdout] . identifier[decode] ()
keyword[return] identifier[stdout] | def convert_pdf_to_txt(pdf, startpage=None):
"""Convert a pdf file to text and return the text.
This method requires pdftotext to be installed.
Parameters
----------
pdf : str
path to pdf file
startpage : int, optional
the first page we try to convert
Returns
-------
str
the converted text
"""
if startpage is not None:
startpageargs = ['-f', str(startpage)] # depends on [control=['if'], data=['startpage']]
else:
startpageargs = []
stdout = subprocess.Popen(['pdftotext', '-q'] + startpageargs + [pdf, '-'], stdout=subprocess.PIPE).communicate()[0]
# python2 and 3
if not isinstance(stdout, str):
stdout = stdout.decode() # depends on [control=['if'], data=[]]
return stdout |
def replace_termcodes(self, string, from_part=False, do_lt=True,
special=True):
r"""Replace any terminal code strings by byte sequences.
The returned sequences are Nvim's internal representation of keys,
for example:
<esc> -> '\x1b'
<cr> -> '\r'
<c-l> -> '\x0c'
<up> -> '\x80ku'
The returned sequences can be used as input to `feedkeys`.
"""
return self.request('nvim_replace_termcodes', string,
from_part, do_lt, special) | def function[replace_termcodes, parameter[self, string, from_part, do_lt, special]]:
constant[Replace any terminal code strings by byte sequences.
The returned sequences are Nvim's internal representation of keys,
for example:
<esc> -> '\x1b'
<cr> -> '\r'
<c-l> -> '\x0c'
<up> -> '\x80ku'
The returned sequences can be used as input to `feedkeys`.
]
return[call[name[self].request, parameter[constant[nvim_replace_termcodes], name[string], name[from_part], name[do_lt], name[special]]]] | keyword[def] identifier[replace_termcodes] ( identifier[self] , identifier[string] , identifier[from_part] = keyword[False] , identifier[do_lt] = keyword[True] ,
identifier[special] = keyword[True] ):
literal[string]
keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[string] ,
identifier[from_part] , identifier[do_lt] , identifier[special] ) | def replace_termcodes(self, string, from_part=False, do_lt=True, special=True):
"""Replace any terminal code strings by byte sequences.
The returned sequences are Nvim's internal representation of keys,
for example:
<esc> -> '\\x1b'
<cr> -> '\\r'
<c-l> -> '\\x0c'
<up> -> '\\x80ku'
The returned sequences can be used as input to `feedkeys`.
"""
return self.request('nvim_replace_termcodes', string, from_part, do_lt, special) |
def _quote(data):
"""Prepare a string for quoting for DIGEST-MD5 challenge or response.
Don't add the quotes, only escape '"' and "\\" with backslashes.
:Parameters:
- `data`: a raw string.
:Types:
- `data`: `bytes`
:return: `data` with '"' and "\\" escaped using "\\".
:returntype: `bytes`
"""
data = data.replace(b'\\', b'\\\\')
data = data.replace(b'"', b'\\"')
return data | def function[_quote, parameter[data]]:
constant[Prepare a string for quoting for DIGEST-MD5 challenge or response.
Don't add the quotes, only escape '"' and "\" with backslashes.
:Parameters:
- `data`: a raw string.
:Types:
- `data`: `bytes`
:return: `data` with '"' and "\" escaped using "\".
:returntype: `bytes`
]
variable[data] assign[=] call[name[data].replace, parameter[constant[b'\\'], constant[b'\\\\']]]
variable[data] assign[=] call[name[data].replace, parameter[constant[b'"'], constant[b'\\"']]]
return[name[data]] | keyword[def] identifier[_quote] ( identifier[data] ):
literal[string]
identifier[data] = identifier[data] . identifier[replace] ( literal[string] , literal[string] )
identifier[data] = identifier[data] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[data] | def _quote(data):
"""Prepare a string for quoting for DIGEST-MD5 challenge or response.
Don't add the quotes, only escape '"' and "\\" with backslashes.
:Parameters:
- `data`: a raw string.
:Types:
- `data`: `bytes`
:return: `data` with '"' and "\\" escaped using "\\".
:returntype: `bytes`
"""
data = data.replace(b'\\', b'\\\\')
data = data.replace(b'"', b'\\"')
return data |
def remove_oxidation_states(self):
"""
Removes oxidation states from a structure.
"""
for site in self.sites:
new_sp = collections.defaultdict(float)
for el, occu in site.species.items():
sym = el.symbol
new_sp[Element(sym)] += occu
site.species = new_sp | def function[remove_oxidation_states, parameter[self]]:
constant[
Removes oxidation states from a structure.
]
for taget[name[site]] in starred[name[self].sites] begin[:]
variable[new_sp] assign[=] call[name[collections].defaultdict, parameter[name[float]]]
for taget[tuple[[<ast.Name object at 0x7da207f9bcd0>, <ast.Name object at 0x7da207f999c0>]]] in starred[call[name[site].species.items, parameter[]]] begin[:]
variable[sym] assign[=] name[el].symbol
<ast.AugAssign object at 0x7da207f9b4c0>
name[site].species assign[=] name[new_sp] | keyword[def] identifier[remove_oxidation_states] ( identifier[self] ):
literal[string]
keyword[for] identifier[site] keyword[in] identifier[self] . identifier[sites] :
identifier[new_sp] = identifier[collections] . identifier[defaultdict] ( identifier[float] )
keyword[for] identifier[el] , identifier[occu] keyword[in] identifier[site] . identifier[species] . identifier[items] ():
identifier[sym] = identifier[el] . identifier[symbol]
identifier[new_sp] [ identifier[Element] ( identifier[sym] )]+= identifier[occu]
identifier[site] . identifier[species] = identifier[new_sp] | def remove_oxidation_states(self):
"""
Removes oxidation states from a structure.
"""
for site in self.sites:
new_sp = collections.defaultdict(float)
for (el, occu) in site.species.items():
sym = el.symbol
new_sp[Element(sym)] += occu # depends on [control=['for'], data=[]]
site.species = new_sp # depends on [control=['for'], data=['site']] |
def validate_registry_uri(uri: str) -> None:
"""
Raise an exception if the URI does not conform to the registry URI scheme.
"""
parsed = parse.urlparse(uri)
scheme, authority, pkg_name, query = (
parsed.scheme,
parsed.netloc,
parsed.path,
parsed.query,
)
validate_registry_uri_scheme(scheme)
validate_registry_uri_authority(authority)
if query:
validate_registry_uri_version(query)
validate_package_name(pkg_name[1:]) | def function[validate_registry_uri, parameter[uri]]:
constant[
Raise an exception if the URI does not conform to the registry URI scheme.
]
variable[parsed] assign[=] call[name[parse].urlparse, parameter[name[uri]]]
<ast.Tuple object at 0x7da204961570> assign[=] tuple[[<ast.Attribute object at 0x7da2049602e0>, <ast.Attribute object at 0x7da204960e80>, <ast.Attribute object at 0x7da204962380>, <ast.Attribute object at 0x7da204963430>]]
call[name[validate_registry_uri_scheme], parameter[name[scheme]]]
call[name[validate_registry_uri_authority], parameter[name[authority]]]
if name[query] begin[:]
call[name[validate_registry_uri_version], parameter[name[query]]]
call[name[validate_package_name], parameter[call[name[pkg_name]][<ast.Slice object at 0x7da2044c2140>]]] | keyword[def] identifier[validate_registry_uri] ( identifier[uri] : identifier[str] )-> keyword[None] :
literal[string]
identifier[parsed] = identifier[parse] . identifier[urlparse] ( identifier[uri] )
identifier[scheme] , identifier[authority] , identifier[pkg_name] , identifier[query] =(
identifier[parsed] . identifier[scheme] ,
identifier[parsed] . identifier[netloc] ,
identifier[parsed] . identifier[path] ,
identifier[parsed] . identifier[query] ,
)
identifier[validate_registry_uri_scheme] ( identifier[scheme] )
identifier[validate_registry_uri_authority] ( identifier[authority] )
keyword[if] identifier[query] :
identifier[validate_registry_uri_version] ( identifier[query] )
identifier[validate_package_name] ( identifier[pkg_name] [ literal[int] :]) | def validate_registry_uri(uri: str) -> None:
"""
Raise an exception if the URI does not conform to the registry URI scheme.
"""
parsed = parse.urlparse(uri)
(scheme, authority, pkg_name, query) = (parsed.scheme, parsed.netloc, parsed.path, parsed.query)
validate_registry_uri_scheme(scheme)
validate_registry_uri_authority(authority)
if query:
validate_registry_uri_version(query) # depends on [control=['if'], data=[]]
validate_package_name(pkg_name[1:]) |
def concat(input_layer, concat_dim, other_tensors=None):
"""Concatenates input PrettyTensor with other_tensors along the specified dim.
This adds the Pretty Tensor passed via input_layer to the front of the list of
tensors to concat.
Args:
input_layer: The input layer.
concat_dim: The dimension along which to concat.
other_tensors: The tensors to concatenate with as an iterable or None if
this is called on a sequence.
Returns:
A new PrettyTensor.
Raises:
ValueError: If other_tensors is None and this is not a sequence.
"""
if input_layer.is_sequence():
all_tensors = input_layer.sequence
all_tensors.extend(other_tensors or [])
else:
all_tensors = [input_layer]
if other_tensors is None:
raise ValueError('Other Tensors must be supplied.')
all_tensors.extend(other_tensors)
# Edge cases really only apply when this is a sequence with 0 or 1 element.
if not all_tensors:
return prettytensor.wrap_sequence([])
else:
return tf.concat(all_tensors, concat_dim) | def function[concat, parameter[input_layer, concat_dim, other_tensors]]:
constant[Concatenates input PrettyTensor with other_tensors along the specified dim.
This adds the Pretty Tensor passed via input_layer to the front of the list of
tensors to concat.
Args:
input_layer: The input layer.
concat_dim: The dimension along which to concat.
other_tensors: The tensors to concatenate with as an iterable or None if
this is called on a sequence.
Returns:
A new PrettyTensor.
Raises:
ValueError: If other_tensors is None and this is not a sequence.
]
if call[name[input_layer].is_sequence, parameter[]] begin[:]
variable[all_tensors] assign[=] name[input_layer].sequence
call[name[all_tensors].extend, parameter[<ast.BoolOp object at 0x7da20c6c76a0>]]
if <ast.UnaryOp object at 0x7da2044c3610> begin[:]
return[call[name[prettytensor].wrap_sequence, parameter[list[[]]]]] | keyword[def] identifier[concat] ( identifier[input_layer] , identifier[concat_dim] , identifier[other_tensors] = keyword[None] ):
literal[string]
keyword[if] identifier[input_layer] . identifier[is_sequence] ():
identifier[all_tensors] = identifier[input_layer] . identifier[sequence]
identifier[all_tensors] . identifier[extend] ( identifier[other_tensors] keyword[or] [])
keyword[else] :
identifier[all_tensors] =[ identifier[input_layer] ]
keyword[if] identifier[other_tensors] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[all_tensors] . identifier[extend] ( identifier[other_tensors] )
keyword[if] keyword[not] identifier[all_tensors] :
keyword[return] identifier[prettytensor] . identifier[wrap_sequence] ([])
keyword[else] :
keyword[return] identifier[tf] . identifier[concat] ( identifier[all_tensors] , identifier[concat_dim] ) | def concat(input_layer, concat_dim, other_tensors=None):
"""Concatenates input PrettyTensor with other_tensors along the specified dim.
This adds the Pretty Tensor passed via input_layer to the front of the list of
tensors to concat.
Args:
input_layer: The input layer.
concat_dim: The dimension along which to concat.
other_tensors: The tensors to concatenate with as an iterable or None if
this is called on a sequence.
Returns:
A new PrettyTensor.
Raises:
ValueError: If other_tensors is None and this is not a sequence.
"""
if input_layer.is_sequence():
all_tensors = input_layer.sequence
all_tensors.extend(other_tensors or []) # depends on [control=['if'], data=[]]
else:
all_tensors = [input_layer]
if other_tensors is None:
raise ValueError('Other Tensors must be supplied.') # depends on [control=['if'], data=[]]
all_tensors.extend(other_tensors)
# Edge cases really only apply when this is a sequence with 0 or 1 element.
if not all_tensors:
return prettytensor.wrap_sequence([]) # depends on [control=['if'], data=[]]
else:
return tf.concat(all_tensors, concat_dim) |
def ijk_ljk_to_ilk(A, B):
"""
Faster version of einsum np.einsum('ijk,ljk->ilk', A, B)
I.e A.dot(B.T) for every dimension
"""
res = np.zeros((A.shape[-1], A.shape[0], B.shape[0]))
[np.dot(A[:,:,i], B[:,:,i].T, out=res[i,:,:]) for i in range(A.shape[-1])]
res = res.swapaxes(0, 2).swapaxes(0,1)
return res | def function[ijk_ljk_to_ilk, parameter[A, B]]:
constant[
Faster version of einsum np.einsum('ijk,ljk->ilk', A, B)
I.e A.dot(B.T) for every dimension
]
variable[res] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da20c6e7790>, <ast.Subscript object at 0x7da20c6e7bb0>, <ast.Subscript object at 0x7da20c6e7400>]]]]
<ast.ListComp object at 0x7da20c6e7d30>
variable[res] assign[=] call[call[name[res].swapaxes, parameter[constant[0], constant[2]]].swapaxes, parameter[constant[0], constant[1]]]
return[name[res]] | keyword[def] identifier[ijk_ljk_to_ilk] ( identifier[A] , identifier[B] ):
literal[string]
identifier[res] = identifier[np] . identifier[zeros] (( identifier[A] . identifier[shape] [- literal[int] ], identifier[A] . identifier[shape] [ literal[int] ], identifier[B] . identifier[shape] [ literal[int] ]))
[ identifier[np] . identifier[dot] ( identifier[A] [:,:, identifier[i] ], identifier[B] [:,:, identifier[i] ]. identifier[T] , identifier[out] = identifier[res] [ identifier[i] ,:,:]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[A] . identifier[shape] [- literal[int] ])]
identifier[res] = identifier[res] . identifier[swapaxes] ( literal[int] , literal[int] ). identifier[swapaxes] ( literal[int] , literal[int] )
keyword[return] identifier[res] | def ijk_ljk_to_ilk(A, B):
"""
Faster version of einsum np.einsum('ijk,ljk->ilk', A, B)
I.e A.dot(B.T) for every dimension
"""
res = np.zeros((A.shape[-1], A.shape[0], B.shape[0]))
[np.dot(A[:, :, i], B[:, :, i].T, out=res[i, :, :]) for i in range(A.shape[-1])]
res = res.swapaxes(0, 2).swapaxes(0, 1)
return res |
def read_file(self, start_year=None, end_year=None, use_centroid=None):
"""
Reads the file
"""
raw_data = getlines(self.filename)
num_lines = len(raw_data)
if ((float(num_lines) / 5.) - float(num_lines / 5)) > 1E-9:
raise IOError('GCMT represented by 5 lines - number in file not'
' a multiple of 5!')
self.catalogue.number_gcmts = num_lines // 5
self.catalogue.gcmts = [None] * self.catalogue.number_gcmts
# Pre-allocates list
id0 = 0
print('Parsing catalogue ...')
for iloc in range(0, self.catalogue.number_gcmts):
self.catalogue.gcmts[iloc] = self.read_ndk_event(raw_data, id0)
id0 += 5
print('complete. Contains %s moment tensors'
% self.catalogue.get_number_tensors())
if not start_year:
min_years = []
min_years = [cent.centroid.date.year
for cent in self.catalogue.gcmts]
self.catalogue.start_year = np.min(min_years)
if not end_year:
max_years = []
max_years = [cent.centroid.date.year
for cent in self.catalogue.gcmts]
self.catalogue.end_year = np.max(max_years)
self.to_hmtk(use_centroid)
return self.catalogue | def function[read_file, parameter[self, start_year, end_year, use_centroid]]:
constant[
Reads the file
]
variable[raw_data] assign[=] call[name[getlines], parameter[name[self].filename]]
variable[num_lines] assign[=] call[name[len], parameter[name[raw_data]]]
if compare[binary_operation[binary_operation[call[name[float], parameter[name[num_lines]]] / constant[5.0]] - call[name[float], parameter[binary_operation[name[num_lines] / constant[5]]]]] greater[>] constant[1e-09]] begin[:]
<ast.Raise object at 0x7da18f00dc60>
name[self].catalogue.number_gcmts assign[=] binary_operation[name[num_lines] <ast.FloorDiv object at 0x7da2590d6bc0> constant[5]]
name[self].catalogue.gcmts assign[=] binary_operation[list[[<ast.Constant object at 0x7da18f58c580>]] * name[self].catalogue.number_gcmts]
variable[id0] assign[=] constant[0]
call[name[print], parameter[constant[Parsing catalogue ...]]]
for taget[name[iloc]] in starred[call[name[range], parameter[constant[0], name[self].catalogue.number_gcmts]]] begin[:]
call[name[self].catalogue.gcmts][name[iloc]] assign[=] call[name[self].read_ndk_event, parameter[name[raw_data], name[id0]]]
<ast.AugAssign object at 0x7da20e956620>
call[name[print], parameter[binary_operation[constant[complete. Contains %s moment tensors] <ast.Mod object at 0x7da2590d6920> call[name[self].catalogue.get_number_tensors, parameter[]]]]]
if <ast.UnaryOp object at 0x7da20e9552d0> begin[:]
variable[min_years] assign[=] list[[]]
variable[min_years] assign[=] <ast.ListComp object at 0x7da20e957820>
name[self].catalogue.start_year assign[=] call[name[np].min, parameter[name[min_years]]]
if <ast.UnaryOp object at 0x7da204623040> begin[:]
variable[max_years] assign[=] list[[]]
variable[max_years] assign[=] <ast.ListComp object at 0x7da204621ea0>
name[self].catalogue.end_year assign[=] call[name[np].max, parameter[name[max_years]]]
call[name[self].to_hmtk, parameter[name[use_centroid]]]
return[name[self].catalogue] | keyword[def] identifier[read_file] ( identifier[self] , identifier[start_year] = keyword[None] , identifier[end_year] = keyword[None] , identifier[use_centroid] = keyword[None] ):
literal[string]
identifier[raw_data] = identifier[getlines] ( identifier[self] . identifier[filename] )
identifier[num_lines] = identifier[len] ( identifier[raw_data] )
keyword[if] (( identifier[float] ( identifier[num_lines] )/ literal[int] )- identifier[float] ( identifier[num_lines] / literal[int] ))> literal[int] :
keyword[raise] identifier[IOError] ( literal[string]
literal[string] )
identifier[self] . identifier[catalogue] . identifier[number_gcmts] = identifier[num_lines] // literal[int]
identifier[self] . identifier[catalogue] . identifier[gcmts] =[ keyword[None] ]* identifier[self] . identifier[catalogue] . identifier[number_gcmts]
identifier[id0] = literal[int]
identifier[print] ( literal[string] )
keyword[for] identifier[iloc] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[catalogue] . identifier[number_gcmts] ):
identifier[self] . identifier[catalogue] . identifier[gcmts] [ identifier[iloc] ]= identifier[self] . identifier[read_ndk_event] ( identifier[raw_data] , identifier[id0] )
identifier[id0] += literal[int]
identifier[print] ( literal[string]
% identifier[self] . identifier[catalogue] . identifier[get_number_tensors] ())
keyword[if] keyword[not] identifier[start_year] :
identifier[min_years] =[]
identifier[min_years] =[ identifier[cent] . identifier[centroid] . identifier[date] . identifier[year]
keyword[for] identifier[cent] keyword[in] identifier[self] . identifier[catalogue] . identifier[gcmts] ]
identifier[self] . identifier[catalogue] . identifier[start_year] = identifier[np] . identifier[min] ( identifier[min_years] )
keyword[if] keyword[not] identifier[end_year] :
identifier[max_years] =[]
identifier[max_years] =[ identifier[cent] . identifier[centroid] . identifier[date] . identifier[year]
keyword[for] identifier[cent] keyword[in] identifier[self] . identifier[catalogue] . identifier[gcmts] ]
identifier[self] . identifier[catalogue] . identifier[end_year] = identifier[np] . identifier[max] ( identifier[max_years] )
identifier[self] . identifier[to_hmtk] ( identifier[use_centroid] )
keyword[return] identifier[self] . identifier[catalogue] | def read_file(self, start_year=None, end_year=None, use_centroid=None):
"""
Reads the file
"""
raw_data = getlines(self.filename)
num_lines = len(raw_data)
if float(num_lines) / 5.0 - float(num_lines / 5) > 1e-09:
raise IOError('GCMT represented by 5 lines - number in file not a multiple of 5!') # depends on [control=['if'], data=[]]
self.catalogue.number_gcmts = num_lines // 5
self.catalogue.gcmts = [None] * self.catalogue.number_gcmts
# Pre-allocates list
id0 = 0
print('Parsing catalogue ...')
for iloc in range(0, self.catalogue.number_gcmts):
self.catalogue.gcmts[iloc] = self.read_ndk_event(raw_data, id0)
id0 += 5 # depends on [control=['for'], data=['iloc']]
print('complete. Contains %s moment tensors' % self.catalogue.get_number_tensors())
if not start_year:
min_years = []
min_years = [cent.centroid.date.year for cent in self.catalogue.gcmts]
self.catalogue.start_year = np.min(min_years) # depends on [control=['if'], data=[]]
if not end_year:
max_years = []
max_years = [cent.centroid.date.year for cent in self.catalogue.gcmts]
self.catalogue.end_year = np.max(max_years) # depends on [control=['if'], data=[]]
self.to_hmtk(use_centroid)
return self.catalogue |
def update_summary_qc(data, key, base=None, secondary=None):
"""
updates summary_qc with a new section, keyed by key.
stick files into summary_qc if you want them propagated forward
and available for multiqc
"""
summary = get_summary_qc(data, {})
if base and secondary:
summary[key] = {"base": base, "secondary": secondary}
elif base:
summary[key] = {"base": base}
elif secondary:
summary[key] = {"secondary": secondary}
data = set_summary_qc(data, summary)
return data | def function[update_summary_qc, parameter[data, key, base, secondary]]:
constant[
updates summary_qc with a new section, keyed by key.
stick files into summary_qc if you want them propagated forward
and available for multiqc
]
variable[summary] assign[=] call[name[get_summary_qc], parameter[name[data], dictionary[[], []]]]
if <ast.BoolOp object at 0x7da1b1894d60> begin[:]
call[name[summary]][name[key]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1895930>, <ast.Constant object at 0x7da1b1897f40>], [<ast.Name object at 0x7da1b1896860>, <ast.Name object at 0x7da1b1894df0>]]
variable[data] assign[=] call[name[set_summary_qc], parameter[name[data], name[summary]]]
return[name[data]] | keyword[def] identifier[update_summary_qc] ( identifier[data] , identifier[key] , identifier[base] = keyword[None] , identifier[secondary] = keyword[None] ):
literal[string]
identifier[summary] = identifier[get_summary_qc] ( identifier[data] ,{})
keyword[if] identifier[base] keyword[and] identifier[secondary] :
identifier[summary] [ identifier[key] ]={ literal[string] : identifier[base] , literal[string] : identifier[secondary] }
keyword[elif] identifier[base] :
identifier[summary] [ identifier[key] ]={ literal[string] : identifier[base] }
keyword[elif] identifier[secondary] :
identifier[summary] [ identifier[key] ]={ literal[string] : identifier[secondary] }
identifier[data] = identifier[set_summary_qc] ( identifier[data] , identifier[summary] )
keyword[return] identifier[data] | def update_summary_qc(data, key, base=None, secondary=None):
"""
updates summary_qc with a new section, keyed by key.
stick files into summary_qc if you want them propagated forward
and available for multiqc
"""
summary = get_summary_qc(data, {})
if base and secondary:
summary[key] = {'base': base, 'secondary': secondary} # depends on [control=['if'], data=[]]
elif base:
summary[key] = {'base': base} # depends on [control=['if'], data=[]]
elif secondary:
summary[key] = {'secondary': secondary} # depends on [control=['if'], data=[]]
data = set_summary_qc(data, summary)
return data |
def freeze_dict(dict_):
"""Freezes ``dict`` into ``tuple``.
A typical usage is packing ``dict`` into hashable.
e.g.::
>>> freeze_dict({'a': 1, 'b': 2})
(('a', 1), ('b', 2))
"""
pairs = dict_.items()
key_getter = operator.itemgetter(0)
return tuple(sorted(pairs, key=key_getter)) | def function[freeze_dict, parameter[dict_]]:
constant[Freezes ``dict`` into ``tuple``.
A typical usage is packing ``dict`` into hashable.
e.g.::
>>> freeze_dict({'a': 1, 'b': 2})
(('a', 1), ('b', 2))
]
variable[pairs] assign[=] call[name[dict_].items, parameter[]]
variable[key_getter] assign[=] call[name[operator].itemgetter, parameter[constant[0]]]
return[call[name[tuple], parameter[call[name[sorted], parameter[name[pairs]]]]]] | keyword[def] identifier[freeze_dict] ( identifier[dict_] ):
literal[string]
identifier[pairs] = identifier[dict_] . identifier[items] ()
identifier[key_getter] = identifier[operator] . identifier[itemgetter] ( literal[int] )
keyword[return] identifier[tuple] ( identifier[sorted] ( identifier[pairs] , identifier[key] = identifier[key_getter] )) | def freeze_dict(dict_):
"""Freezes ``dict`` into ``tuple``.
A typical usage is packing ``dict`` into hashable.
e.g.::
>>> freeze_dict({'a': 1, 'b': 2})
(('a', 1), ('b', 2))
"""
pairs = dict_.items()
key_getter = operator.itemgetter(0)
return tuple(sorted(pairs, key=key_getter)) |
def compute_BtBinv(B, C):
"""Create block inverses.
Helper function that creates inv(B_i.T B_i) for each block row i in C,
where B_i is B restricted to the sparsity pattern of block row i.
Parameters
----------
B : {array}
(M,k) array, typically near-nullspace modes for coarse grid, i.e., B_c.
C : {csr_matrix, bsr_matrix}
Sparse NxM matrix, whose sparsity structure (i.e., matrix graph)
is used to determine BtBinv.
Returns
-------
BtBinv : {array}
BtBinv[i] = inv(B_i.T B_i), where B_i is B restricted to the nonzero
pattern of block row i in C.
Examples
--------
>>> from numpy import array
>>> from scipy.sparse import bsr_matrix
>>> from pyamg.util.utils import compute_BtBinv
>>> T = array([[ 1., 0.],
... [ 1., 0.],
... [ 0., .5],
... [ 0., .25]])
>>> T = bsr_matrix(T)
>>> B = array([[1.],[2.]])
>>> compute_BtBinv(B, T)
array([[[ 1. ]],
<BLANKLINE>
[[ 1. ]],
<BLANKLINE>
[[ 0.25]],
<BLANKLINE>
[[ 0.25]]])
Notes
-----
The principal calling routines are
aggregation.smooth.energy_prolongation_smoother, and
util.utils.filter_operator.
BtBinv is used in the prolongation smoothing process that incorporates B
into the span of prolongation with row-wise projection operators. It is
these projection operators that BtBinv is part of.
"""
if not isspmatrix_bsr(C) and not isspmatrix_csr(C):
raise TypeError('Expected bsr_matrix or csr_matrix for C')
if C.shape[1] != B.shape[0]:
raise TypeError('Expected matching dimensions such that C*B')
# Problem parameters
if isspmatrix_bsr(C):
ColsPerBlock = C.blocksize[1]
RowsPerBlock = C.blocksize[0]
else:
ColsPerBlock = 1
RowsPerBlock = 1
Ncoarse = C.shape[1]
Nfine = C.shape[0]
NullDim = B.shape[1]
Nnodes = int(Nfine/RowsPerBlock)
# Construct BtB
BtBinv = np.zeros((Nnodes, NullDim, NullDim), dtype=B.dtype)
BsqCols = sum(range(NullDim+1))
Bsq = np.zeros((Ncoarse, BsqCols), dtype=B.dtype)
counter = 0
for i in range(NullDim):
for j in range(i, NullDim):
Bsq[:, counter] = np.conjugate(np.ravel(np.asarray(B[:, i]))) * \
np.ravel(np.asarray(B[:, j]))
counter = counter + 1
# This specialized C-routine calculates (B.T B) for each row using Bsq
pyamg.amg_core.calc_BtB(NullDim, Nnodes, ColsPerBlock,
np.ravel(np.asarray(Bsq)),
BsqCols, np.ravel(np.asarray(BtBinv)),
C.indptr, C.indices)
# Invert each block of BtBinv, noting that amg_core.calc_BtB(...) returns
# values in column-major form, thus necessitating the deep transpose
# This is the old call to a specialized routine, but lacks robustness
# pyamg.amg_core.pinv_array(np.ravel(BtBinv), Nnodes, NullDim, 'F')
BtBinv = BtBinv.transpose((0, 2, 1)).copy()
pinv_array(BtBinv)
return BtBinv | def function[compute_BtBinv, parameter[B, C]]:
constant[Create block inverses.
Helper function that creates inv(B_i.T B_i) for each block row i in C,
where B_i is B restricted to the sparsity pattern of block row i.
Parameters
----------
B : {array}
(M,k) array, typically near-nullspace modes for coarse grid, i.e., B_c.
C : {csr_matrix, bsr_matrix}
Sparse NxM matrix, whose sparsity structure (i.e., matrix graph)
is used to determine BtBinv.
Returns
-------
BtBinv : {array}
BtBinv[i] = inv(B_i.T B_i), where B_i is B restricted to the nonzero
pattern of block row i in C.
Examples
--------
>>> from numpy import array
>>> from scipy.sparse import bsr_matrix
>>> from pyamg.util.utils import compute_BtBinv
>>> T = array([[ 1., 0.],
... [ 1., 0.],
... [ 0., .5],
... [ 0., .25]])
>>> T = bsr_matrix(T)
>>> B = array([[1.],[2.]])
>>> compute_BtBinv(B, T)
array([[[ 1. ]],
<BLANKLINE>
[[ 1. ]],
<BLANKLINE>
[[ 0.25]],
<BLANKLINE>
[[ 0.25]]])
Notes
-----
The principal calling routines are
aggregation.smooth.energy_prolongation_smoother, and
util.utils.filter_operator.
BtBinv is used in the prolongation smoothing process that incorporates B
into the span of prolongation with row-wise projection operators. It is
these projection operators that BtBinv is part of.
]
if <ast.BoolOp object at 0x7da1b08d8400> begin[:]
<ast.Raise object at 0x7da1b08db3a0>
if compare[call[name[C].shape][constant[1]] not_equal[!=] call[name[B].shape][constant[0]]] begin[:]
<ast.Raise object at 0x7da1b08d86d0>
if call[name[isspmatrix_bsr], parameter[name[C]]] begin[:]
variable[ColsPerBlock] assign[=] call[name[C].blocksize][constant[1]]
variable[RowsPerBlock] assign[=] call[name[C].blocksize][constant[0]]
variable[Ncoarse] assign[=] call[name[C].shape][constant[1]]
variable[Nfine] assign[=] call[name[C].shape][constant[0]]
variable[NullDim] assign[=] call[name[B].shape][constant[1]]
variable[Nnodes] assign[=] call[name[int], parameter[binary_operation[name[Nfine] / name[RowsPerBlock]]]]
variable[BtBinv] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b08a6890>, <ast.Name object at 0x7da1b08a6410>, <ast.Name object at 0x7da1b08a5c60>]]]]
variable[BsqCols] assign[=] call[name[sum], parameter[call[name[range], parameter[binary_operation[name[NullDim] + constant[1]]]]]]
variable[Bsq] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b2347910>, <ast.Name object at 0x7da1b2345d80>]]]]
variable[counter] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[name[NullDim]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[i], name[NullDim]]]] begin[:]
call[name[Bsq]][tuple[[<ast.Slice object at 0x7da1b2347610>, <ast.Name object at 0x7da1b23446a0>]]] assign[=] binary_operation[call[name[np].conjugate, parameter[call[name[np].ravel, parameter[call[name[np].asarray, parameter[call[name[B]][tuple[[<ast.Slice object at 0x7da1b2344d30>, <ast.Name object at 0x7da1b2345780>]]]]]]]]] * call[name[np].ravel, parameter[call[name[np].asarray, parameter[call[name[B]][tuple[[<ast.Slice object at 0x7da1b2346fe0>, <ast.Name object at 0x7da1b2344dc0>]]]]]]]]
variable[counter] assign[=] binary_operation[name[counter] + constant[1]]
call[name[pyamg].amg_core.calc_BtB, parameter[name[NullDim], name[Nnodes], name[ColsPerBlock], call[name[np].ravel, parameter[call[name[np].asarray, parameter[name[Bsq]]]]], name[BsqCols], call[name[np].ravel, parameter[call[name[np].asarray, parameter[name[BtBinv]]]]], name[C].indptr, name[C].indices]]
variable[BtBinv] assign[=] call[call[name[BtBinv].transpose, parameter[tuple[[<ast.Constant object at 0x7da1b2347b50>, <ast.Constant object at 0x7da1b2345870>, <ast.Constant object at 0x7da1b2344d60>]]]].copy, parameter[]]
call[name[pinv_array], parameter[name[BtBinv]]]
return[name[BtBinv]] | keyword[def] identifier[compute_BtBinv] ( identifier[B] , identifier[C] ):
literal[string]
keyword[if] keyword[not] identifier[isspmatrix_bsr] ( identifier[C] ) keyword[and] keyword[not] identifier[isspmatrix_csr] ( identifier[C] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[C] . identifier[shape] [ literal[int] ]!= identifier[B] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[isspmatrix_bsr] ( identifier[C] ):
identifier[ColsPerBlock] = identifier[C] . identifier[blocksize] [ literal[int] ]
identifier[RowsPerBlock] = identifier[C] . identifier[blocksize] [ literal[int] ]
keyword[else] :
identifier[ColsPerBlock] = literal[int]
identifier[RowsPerBlock] = literal[int]
identifier[Ncoarse] = identifier[C] . identifier[shape] [ literal[int] ]
identifier[Nfine] = identifier[C] . identifier[shape] [ literal[int] ]
identifier[NullDim] = identifier[B] . identifier[shape] [ literal[int] ]
identifier[Nnodes] = identifier[int] ( identifier[Nfine] / identifier[RowsPerBlock] )
identifier[BtBinv] = identifier[np] . identifier[zeros] (( identifier[Nnodes] , identifier[NullDim] , identifier[NullDim] ), identifier[dtype] = identifier[B] . identifier[dtype] )
identifier[BsqCols] = identifier[sum] ( identifier[range] ( identifier[NullDim] + literal[int] ))
identifier[Bsq] = identifier[np] . identifier[zeros] (( identifier[Ncoarse] , identifier[BsqCols] ), identifier[dtype] = identifier[B] . identifier[dtype] )
identifier[counter] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[NullDim] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] , identifier[NullDim] ):
identifier[Bsq] [:, identifier[counter] ]= identifier[np] . identifier[conjugate] ( identifier[np] . identifier[ravel] ( identifier[np] . identifier[asarray] ( identifier[B] [:, identifier[i] ])))* identifier[np] . identifier[ravel] ( identifier[np] . identifier[asarray] ( identifier[B] [:, identifier[j] ]))
identifier[counter] = identifier[counter] + literal[int]
identifier[pyamg] . identifier[amg_core] . identifier[calc_BtB] ( identifier[NullDim] , identifier[Nnodes] , identifier[ColsPerBlock] ,
identifier[np] . identifier[ravel] ( identifier[np] . identifier[asarray] ( identifier[Bsq] )),
identifier[BsqCols] , identifier[np] . identifier[ravel] ( identifier[np] . identifier[asarray] ( identifier[BtBinv] )),
identifier[C] . identifier[indptr] , identifier[C] . identifier[indices] )
identifier[BtBinv] = identifier[BtBinv] . identifier[transpose] (( literal[int] , literal[int] , literal[int] )). identifier[copy] ()
identifier[pinv_array] ( identifier[BtBinv] )
keyword[return] identifier[BtBinv] | def compute_BtBinv(B, C):
"""Create block inverses.
Helper function that creates inv(B_i.T B_i) for each block row i in C,
where B_i is B restricted to the sparsity pattern of block row i.
Parameters
----------
B : {array}
(M,k) array, typically near-nullspace modes for coarse grid, i.e., B_c.
C : {csr_matrix, bsr_matrix}
Sparse NxM matrix, whose sparsity structure (i.e., matrix graph)
is used to determine BtBinv.
Returns
-------
BtBinv : {array}
BtBinv[i] = inv(B_i.T B_i), where B_i is B restricted to the nonzero
pattern of block row i in C.
Examples
--------
>>> from numpy import array
>>> from scipy.sparse import bsr_matrix
>>> from pyamg.util.utils import compute_BtBinv
>>> T = array([[ 1., 0.],
... [ 1., 0.],
... [ 0., .5],
... [ 0., .25]])
>>> T = bsr_matrix(T)
>>> B = array([[1.],[2.]])
>>> compute_BtBinv(B, T)
array([[[ 1. ]],
<BLANKLINE>
[[ 1. ]],
<BLANKLINE>
[[ 0.25]],
<BLANKLINE>
[[ 0.25]]])
Notes
-----
The principal calling routines are
aggregation.smooth.energy_prolongation_smoother, and
util.utils.filter_operator.
BtBinv is used in the prolongation smoothing process that incorporates B
into the span of prolongation with row-wise projection operators. It is
these projection operators that BtBinv is part of.
"""
if not isspmatrix_bsr(C) and (not isspmatrix_csr(C)):
raise TypeError('Expected bsr_matrix or csr_matrix for C') # depends on [control=['if'], data=[]]
if C.shape[1] != B.shape[0]:
raise TypeError('Expected matching dimensions such that C*B') # depends on [control=['if'], data=[]]
# Problem parameters
if isspmatrix_bsr(C):
ColsPerBlock = C.blocksize[1]
RowsPerBlock = C.blocksize[0] # depends on [control=['if'], data=[]]
else:
ColsPerBlock = 1
RowsPerBlock = 1
Ncoarse = C.shape[1]
Nfine = C.shape[0]
NullDim = B.shape[1]
Nnodes = int(Nfine / RowsPerBlock)
# Construct BtB
BtBinv = np.zeros((Nnodes, NullDim, NullDim), dtype=B.dtype)
BsqCols = sum(range(NullDim + 1))
Bsq = np.zeros((Ncoarse, BsqCols), dtype=B.dtype)
counter = 0
for i in range(NullDim):
for j in range(i, NullDim):
Bsq[:, counter] = np.conjugate(np.ravel(np.asarray(B[:, i]))) * np.ravel(np.asarray(B[:, j]))
counter = counter + 1 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
# This specialized C-routine calculates (B.T B) for each row using Bsq
pyamg.amg_core.calc_BtB(NullDim, Nnodes, ColsPerBlock, np.ravel(np.asarray(Bsq)), BsqCols, np.ravel(np.asarray(BtBinv)), C.indptr, C.indices)
# Invert each block of BtBinv, noting that amg_core.calc_BtB(...) returns
# values in column-major form, thus necessitating the deep transpose
# This is the old call to a specialized routine, but lacks robustness
# pyamg.amg_core.pinv_array(np.ravel(BtBinv), Nnodes, NullDim, 'F')
BtBinv = BtBinv.transpose((0, 2, 1)).copy()
pinv_array(BtBinv)
return BtBinv |
def import_freesurfer_LUT(fs_lut=None):
"""Import Look-up Table with colors and labels for anatomical regions.
It's necessary that Freesurfer is installed and that the environmental
variable 'FREESURFER_HOME' is present.
Parameters
----------
fs_lut : str or Path
path to file called FreeSurferColorLUT.txt
Returns
-------
idx : list of int
indices of regions
label : list of str
names of the brain regions
rgba : numpy.ndarray
one row is a brain region and the columns are the RGB + alpha colors
"""
if fs_lut is not None:
lg.info('Reading user-specified lookuptable {}'.format(fs_lut))
fs_lut = Path(fs_lut)
else:
try:
fs_home = environ['FREESURFER_HOME']
except KeyError:
raise OSError('Freesurfer is not installed or FREESURFER_HOME is '
'not defined as environmental variable')
else:
fs_lut = Path(fs_home) / 'FreeSurferColorLUT.txt'
lg.info('Reading lookuptable in FREESURFER_HOME {}'.format(fs_lut))
idx = []
label = []
rgba = empty((0, 4))
with fs_lut.open('r') as f:
for l in f:
if len(l) <= 1 or l[0] == '#' or l[0] == '\r':
continue
(t0, t1, t2, t3, t4, t5) = [t(s) for t, s in
zip((int, str, int, int, int, int),
l.split())]
idx.append(t0)
label.append(t1)
rgba = vstack((rgba, array([t2, t3, t4, t5])))
return idx, label, rgba | def function[import_freesurfer_LUT, parameter[fs_lut]]:
constant[Import Look-up Table with colors and labels for anatomical regions.
It's necessary that Freesurfer is installed and that the environmental
variable 'FREESURFER_HOME' is present.
Parameters
----------
fs_lut : str or Path
path to file called FreeSurferColorLUT.txt
Returns
-------
idx : list of int
indices of regions
label : list of str
names of the brain regions
rgba : numpy.ndarray
one row is a brain region and the columns are the RGB + alpha colors
]
if compare[name[fs_lut] is_not constant[None]] begin[:]
call[name[lg].info, parameter[call[constant[Reading user-specified lookuptable {}].format, parameter[name[fs_lut]]]]]
variable[fs_lut] assign[=] call[name[Path], parameter[name[fs_lut]]]
variable[idx] assign[=] list[[]]
variable[label] assign[=] list[[]]
variable[rgba] assign[=] call[name[empty], parameter[tuple[[<ast.Constant object at 0x7da1b0dee6e0>, <ast.Constant object at 0x7da18c4cf160>]]]]
with call[name[fs_lut].open, parameter[constant[r]]] begin[:]
for taget[name[l]] in starred[name[f]] begin[:]
if <ast.BoolOp object at 0x7da18c4cd4b0> begin[:]
continue
<ast.Tuple object at 0x7da18c4ccdc0> assign[=] <ast.ListComp object at 0x7da18c4cf820>
call[name[idx].append, parameter[name[t0]]]
call[name[label].append, parameter[name[t1]]]
variable[rgba] assign[=] call[name[vstack], parameter[tuple[[<ast.Name object at 0x7da18c4cc850>, <ast.Call object at 0x7da1b0e05420>]]]]
return[tuple[[<ast.Name object at 0x7da1b0e079d0>, <ast.Name object at 0x7da1b0e04130>, <ast.Name object at 0x7da1b0e071f0>]]] | keyword[def] identifier[import_freesurfer_LUT] ( identifier[fs_lut] = keyword[None] ):
literal[string]
keyword[if] identifier[fs_lut] keyword[is] keyword[not] keyword[None] :
identifier[lg] . identifier[info] ( literal[string] . identifier[format] ( identifier[fs_lut] ))
identifier[fs_lut] = identifier[Path] ( identifier[fs_lut] )
keyword[else] :
keyword[try] :
identifier[fs_home] = identifier[environ] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[OSError] ( literal[string]
literal[string] )
keyword[else] :
identifier[fs_lut] = identifier[Path] ( identifier[fs_home] )/ literal[string]
identifier[lg] . identifier[info] ( literal[string] . identifier[format] ( identifier[fs_lut] ))
identifier[idx] =[]
identifier[label] =[]
identifier[rgba] = identifier[empty] (( literal[int] , literal[int] ))
keyword[with] identifier[fs_lut] . identifier[open] ( literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[l] keyword[in] identifier[f] :
keyword[if] identifier[len] ( identifier[l] )<= literal[int] keyword[or] identifier[l] [ literal[int] ]== literal[string] keyword[or] identifier[l] [ literal[int] ]== literal[string] :
keyword[continue]
( identifier[t0] , identifier[t1] , identifier[t2] , identifier[t3] , identifier[t4] , identifier[t5] )=[ identifier[t] ( identifier[s] ) keyword[for] identifier[t] , identifier[s] keyword[in]
identifier[zip] (( identifier[int] , identifier[str] , identifier[int] , identifier[int] , identifier[int] , identifier[int] ),
identifier[l] . identifier[split] ())]
identifier[idx] . identifier[append] ( identifier[t0] )
identifier[label] . identifier[append] ( identifier[t1] )
identifier[rgba] = identifier[vstack] (( identifier[rgba] , identifier[array] ([ identifier[t2] , identifier[t3] , identifier[t4] , identifier[t5] ])))
keyword[return] identifier[idx] , identifier[label] , identifier[rgba] | def import_freesurfer_LUT(fs_lut=None):
"""Import Look-up Table with colors and labels for anatomical regions.
It's necessary that Freesurfer is installed and that the environmental
variable 'FREESURFER_HOME' is present.
Parameters
----------
fs_lut : str or Path
path to file called FreeSurferColorLUT.txt
Returns
-------
idx : list of int
indices of regions
label : list of str
names of the brain regions
rgba : numpy.ndarray
one row is a brain region and the columns are the RGB + alpha colors
"""
if fs_lut is not None:
lg.info('Reading user-specified lookuptable {}'.format(fs_lut))
fs_lut = Path(fs_lut) # depends on [control=['if'], data=['fs_lut']]
else:
try:
fs_home = environ['FREESURFER_HOME'] # depends on [control=['try'], data=[]]
except KeyError:
raise OSError('Freesurfer is not installed or FREESURFER_HOME is not defined as environmental variable') # depends on [control=['except'], data=[]]
else:
fs_lut = Path(fs_home) / 'FreeSurferColorLUT.txt'
lg.info('Reading lookuptable in FREESURFER_HOME {}'.format(fs_lut))
idx = []
label = []
rgba = empty((0, 4))
with fs_lut.open('r') as f:
for l in f:
if len(l) <= 1 or l[0] == '#' or l[0] == '\r':
continue # depends on [control=['if'], data=[]]
(t0, t1, t2, t3, t4, t5) = [t(s) for (t, s) in zip((int, str, int, int, int, int), l.split())]
idx.append(t0)
label.append(t1)
rgba = vstack((rgba, array([t2, t3, t4, t5]))) # depends on [control=['for'], data=['l']] # depends on [control=['with'], data=['f']]
return (idx, label, rgba) |
def _populate_cparams(self, img_array, mct=None, cratios=None, psnr=None,
cinema2k=None, cinema4k=None, irreversible=None,
cbsize=None, eph=None, grid_offset=None, modesw=None,
numres=None, prog=None, psizes=None, sop=None,
subsam=None, tilesize=None, colorspace=None):
"""Directs processing of write method arguments.
Parameters
----------
img_array : ndarray
Image data to be written to file.
kwargs : dictionary
Non-image keyword inputs provided to write method.
"""
other_args = (mct, cratios, psnr, irreversible, cbsize, eph,
grid_offset, modesw, numres, prog, psizes, sop, subsam)
if (((cinema2k is not None or cinema4k is not None) and
(not all([arg is None for arg in other_args])))):
msg = ("Cannot specify cinema2k/cinema4k along with any other "
"options.")
raise IOError(msg)
if cratios is not None and psnr is not None:
msg = "Cannot specify cratios and psnr options together."
raise IOError(msg)
if version.openjpeg_version_tuple[0] == 1:
cparams = opj.set_default_encoder_parameters()
else:
cparams = opj2.set_default_encoder_parameters()
outfile = self.filename.encode()
num_pad_bytes = opj2.PATH_LEN - len(outfile)
outfile += b'0' * num_pad_bytes
cparams.outfile = outfile
if self.filename[-4:].endswith(('.jp2', '.JP2')):
cparams.codec_fmt = opj2.CODEC_JP2
else:
cparams.codec_fmt = opj2.CODEC_J2K
# Set defaults to lossless to begin.
cparams.tcp_rates[0] = 0
cparams.tcp_numlayers = 1
cparams.cp_disto_alloc = 1
cparams.irreversible = 1 if irreversible else 0
if cinema2k is not None:
self._cparams = cparams
self._set_cinema_params('cinema2k', cinema2k)
return
if cinema4k is not None:
self._cparams = cparams
self._set_cinema_params('cinema4k', cinema4k)
return
if cbsize is not None:
cparams.cblockw_init = cbsize[1]
cparams.cblockh_init = cbsize[0]
if cratios is not None:
cparams.tcp_numlayers = len(cratios)
for j, cratio in enumerate(cratios):
cparams.tcp_rates[j] = cratio
cparams.cp_disto_alloc = 1
cparams.csty |= 0x02 if sop else 0
cparams.csty |= 0x04 if eph else 0
if grid_offset is not None:
cparams.image_offset_x0 = grid_offset[1]
cparams.image_offset_y0 = grid_offset[0]
if modesw is not None:
for shift in range(6):
power_of_two = 1 << shift
if modesw & power_of_two:
cparams.mode |= power_of_two
if numres is not None:
cparams.numresolution = numres
if prog is not None:
cparams.prog_order = core.PROGRESSION_ORDER[prog.upper()]
if psnr is not None:
cparams.tcp_numlayers = len(psnr)
for j, snr_layer in enumerate(psnr):
cparams.tcp_distoratio[j] = snr_layer
cparams.cp_fixed_quality = 1
if psizes is not None:
for j, (prch, prcw) in enumerate(psizes):
cparams.prcw_init[j] = prcw
cparams.prch_init[j] = prch
cparams.csty |= 0x01
cparams.res_spec = len(psizes)
if subsam is not None:
cparams.subsampling_dy = subsam[0]
cparams.subsampling_dx = subsam[1]
if tilesize is not None:
cparams.cp_tdx = tilesize[1]
cparams.cp_tdy = tilesize[0]
cparams.tile_size_on = opj2.TRUE
if mct is None:
# If the multi component transform was not specified, we infer
# that it should be used if the color space is RGB.
cparams.tcp_mct = 1 if self._colorspace == opj2.CLRSPC_SRGB else 0
else:
if self._colorspace == opj2.CLRSPC_GRAY:
msg = ("Cannot specify usage of the multi component transform "
"if the colorspace is gray.")
raise IOError(msg)
cparams.tcp_mct = 1 if mct else 0
self._validate_compression_params(img_array, cparams, colorspace)
self._cparams = cparams | def function[_populate_cparams, parameter[self, img_array, mct, cratios, psnr, cinema2k, cinema4k, irreversible, cbsize, eph, grid_offset, modesw, numres, prog, psizes, sop, subsam, tilesize, colorspace]]:
constant[Directs processing of write method arguments.
Parameters
----------
img_array : ndarray
Image data to be written to file.
kwargs : dictionary
Non-image keyword inputs provided to write method.
]
variable[other_args] assign[=] tuple[[<ast.Name object at 0x7da18bccac80>, <ast.Name object at 0x7da18bcc9c30>, <ast.Name object at 0x7da18bccbeb0>, <ast.Name object at 0x7da18bccbe50>, <ast.Name object at 0x7da18bccb040>, <ast.Name object at 0x7da18bcc84c0>, <ast.Name object at 0x7da18bcca290>, <ast.Name object at 0x7da18bcc9330>, <ast.Name object at 0x7da18bcc9570>, <ast.Name object at 0x7da18bcc9d50>, <ast.Name object at 0x7da18bcc99f0>, <ast.Name object at 0x7da18bcc89a0>, <ast.Name object at 0x7da18bcc92d0>]]
if <ast.BoolOp object at 0x7da18bcc9ae0> begin[:]
variable[msg] assign[=] constant[Cannot specify cinema2k/cinema4k along with any other options.]
<ast.Raise object at 0x7da18bccb550>
if <ast.BoolOp object at 0x7da18bcca710> begin[:]
variable[msg] assign[=] constant[Cannot specify cratios and psnr options together.]
<ast.Raise object at 0x7da18bcca230>
if compare[call[name[version].openjpeg_version_tuple][constant[0]] equal[==] constant[1]] begin[:]
variable[cparams] assign[=] call[name[opj].set_default_encoder_parameters, parameter[]]
variable[outfile] assign[=] call[name[self].filename.encode, parameter[]]
variable[num_pad_bytes] assign[=] binary_operation[name[opj2].PATH_LEN - call[name[len], parameter[name[outfile]]]]
<ast.AugAssign object at 0x7da18bccb8b0>
name[cparams].outfile assign[=] name[outfile]
if call[call[name[self].filename][<ast.Slice object at 0x7da18bccb4f0>].endswith, parameter[tuple[[<ast.Constant object at 0x7da18bcc8610>, <ast.Constant object at 0x7da18bccaec0>]]]] begin[:]
name[cparams].codec_fmt assign[=] name[opj2].CODEC_JP2
call[name[cparams].tcp_rates][constant[0]] assign[=] constant[0]
name[cparams].tcp_numlayers assign[=] constant[1]
name[cparams].cp_disto_alloc assign[=] constant[1]
name[cparams].irreversible assign[=] <ast.IfExp object at 0x7da18bcc98a0>
if compare[name[cinema2k] is_not constant[None]] begin[:]
name[self]._cparams assign[=] name[cparams]
call[name[self]._set_cinema_params, parameter[constant[cinema2k], name[cinema2k]]]
return[None]
if compare[name[cinema4k] is_not constant[None]] begin[:]
name[self]._cparams assign[=] name[cparams]
call[name[self]._set_cinema_params, parameter[constant[cinema4k], name[cinema4k]]]
return[None]
if compare[name[cbsize] is_not constant[None]] begin[:]
name[cparams].cblockw_init assign[=] call[name[cbsize]][constant[1]]
name[cparams].cblockh_init assign[=] call[name[cbsize]][constant[0]]
if compare[name[cratios] is_not constant[None]] begin[:]
name[cparams].tcp_numlayers assign[=] call[name[len], parameter[name[cratios]]]
for taget[tuple[[<ast.Name object at 0x7da2046225c0>, <ast.Name object at 0x7da204620400>]]] in starred[call[name[enumerate], parameter[name[cratios]]]] begin[:]
call[name[cparams].tcp_rates][name[j]] assign[=] name[cratio]
name[cparams].cp_disto_alloc assign[=] constant[1]
<ast.AugAssign object at 0x7da2046221d0>
<ast.AugAssign object at 0x7da2046234c0>
if compare[name[grid_offset] is_not constant[None]] begin[:]
name[cparams].image_offset_x0 assign[=] call[name[grid_offset]][constant[1]]
name[cparams].image_offset_y0 assign[=] call[name[grid_offset]][constant[0]]
if compare[name[modesw] is_not constant[None]] begin[:]
for taget[name[shift]] in starred[call[name[range], parameter[constant[6]]]] begin[:]
variable[power_of_two] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> name[shift]]
if binary_operation[name[modesw] <ast.BitAnd object at 0x7da2590d6b60> name[power_of_two]] begin[:]
<ast.AugAssign object at 0x7da204621ab0>
if compare[name[numres] is_not constant[None]] begin[:]
name[cparams].numresolution assign[=] name[numres]
if compare[name[prog] is_not constant[None]] begin[:]
name[cparams].prog_order assign[=] call[name[core].PROGRESSION_ORDER][call[name[prog].upper, parameter[]]]
if compare[name[psnr] is_not constant[None]] begin[:]
name[cparams].tcp_numlayers assign[=] call[name[len], parameter[name[psnr]]]
for taget[tuple[[<ast.Name object at 0x7da204623df0>, <ast.Name object at 0x7da2046219f0>]]] in starred[call[name[enumerate], parameter[name[psnr]]]] begin[:]
call[name[cparams].tcp_distoratio][name[j]] assign[=] name[snr_layer]
name[cparams].cp_fixed_quality assign[=] constant[1]
if compare[name[psizes] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2046217b0>, <ast.Tuple object at 0x7da204621f60>]]] in starred[call[name[enumerate], parameter[name[psizes]]]] begin[:]
call[name[cparams].prcw_init][name[j]] assign[=] name[prcw]
call[name[cparams].prch_init][name[j]] assign[=] name[prch]
<ast.AugAssign object at 0x7da204622b90>
name[cparams].res_spec assign[=] call[name[len], parameter[name[psizes]]]
if compare[name[subsam] is_not constant[None]] begin[:]
name[cparams].subsampling_dy assign[=] call[name[subsam]][constant[0]]
name[cparams].subsampling_dx assign[=] call[name[subsam]][constant[1]]
if compare[name[tilesize] is_not constant[None]] begin[:]
name[cparams].cp_tdx assign[=] call[name[tilesize]][constant[1]]
name[cparams].cp_tdy assign[=] call[name[tilesize]][constant[0]]
name[cparams].tile_size_on assign[=] name[opj2].TRUE
if compare[name[mct] is constant[None]] begin[:]
name[cparams].tcp_mct assign[=] <ast.IfExp object at 0x7da204623430>
call[name[self]._validate_compression_params, parameter[name[img_array], name[cparams], name[colorspace]]]
name[self]._cparams assign[=] name[cparams] | keyword[def] identifier[_populate_cparams] ( identifier[self] , identifier[img_array] , identifier[mct] = keyword[None] , identifier[cratios] = keyword[None] , identifier[psnr] = keyword[None] ,
identifier[cinema2k] = keyword[None] , identifier[cinema4k] = keyword[None] , identifier[irreversible] = keyword[None] ,
identifier[cbsize] = keyword[None] , identifier[eph] = keyword[None] , identifier[grid_offset] = keyword[None] , identifier[modesw] = keyword[None] ,
identifier[numres] = keyword[None] , identifier[prog] = keyword[None] , identifier[psizes] = keyword[None] , identifier[sop] = keyword[None] ,
identifier[subsam] = keyword[None] , identifier[tilesize] = keyword[None] , identifier[colorspace] = keyword[None] ):
literal[string]
identifier[other_args] =( identifier[mct] , identifier[cratios] , identifier[psnr] , identifier[irreversible] , identifier[cbsize] , identifier[eph] ,
identifier[grid_offset] , identifier[modesw] , identifier[numres] , identifier[prog] , identifier[psizes] , identifier[sop] , identifier[subsam] )
keyword[if] ((( identifier[cinema2k] keyword[is] keyword[not] keyword[None] keyword[or] identifier[cinema4k] keyword[is] keyword[not] keyword[None] ) keyword[and]
( keyword[not] identifier[all] ([ identifier[arg] keyword[is] keyword[None] keyword[for] identifier[arg] keyword[in] identifier[other_args] ])))):
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[IOError] ( identifier[msg] )
keyword[if] identifier[cratios] keyword[is] keyword[not] keyword[None] keyword[and] identifier[psnr] keyword[is] keyword[not] keyword[None] :
identifier[msg] = literal[string]
keyword[raise] identifier[IOError] ( identifier[msg] )
keyword[if] identifier[version] . identifier[openjpeg_version_tuple] [ literal[int] ]== literal[int] :
identifier[cparams] = identifier[opj] . identifier[set_default_encoder_parameters] ()
keyword[else] :
identifier[cparams] = identifier[opj2] . identifier[set_default_encoder_parameters] ()
identifier[outfile] = identifier[self] . identifier[filename] . identifier[encode] ()
identifier[num_pad_bytes] = identifier[opj2] . identifier[PATH_LEN] - identifier[len] ( identifier[outfile] )
identifier[outfile] += literal[string] * identifier[num_pad_bytes]
identifier[cparams] . identifier[outfile] = identifier[outfile]
keyword[if] identifier[self] . identifier[filename] [- literal[int] :]. identifier[endswith] (( literal[string] , literal[string] )):
identifier[cparams] . identifier[codec_fmt] = identifier[opj2] . identifier[CODEC_JP2]
keyword[else] :
identifier[cparams] . identifier[codec_fmt] = identifier[opj2] . identifier[CODEC_J2K]
identifier[cparams] . identifier[tcp_rates] [ literal[int] ]= literal[int]
identifier[cparams] . identifier[tcp_numlayers] = literal[int]
identifier[cparams] . identifier[cp_disto_alloc] = literal[int]
identifier[cparams] . identifier[irreversible] = literal[int] keyword[if] identifier[irreversible] keyword[else] literal[int]
keyword[if] identifier[cinema2k] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_cparams] = identifier[cparams]
identifier[self] . identifier[_set_cinema_params] ( literal[string] , identifier[cinema2k] )
keyword[return]
keyword[if] identifier[cinema4k] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_cparams] = identifier[cparams]
identifier[self] . identifier[_set_cinema_params] ( literal[string] , identifier[cinema4k] )
keyword[return]
keyword[if] identifier[cbsize] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[cblockw_init] = identifier[cbsize] [ literal[int] ]
identifier[cparams] . identifier[cblockh_init] = identifier[cbsize] [ literal[int] ]
keyword[if] identifier[cratios] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[tcp_numlayers] = identifier[len] ( identifier[cratios] )
keyword[for] identifier[j] , identifier[cratio] keyword[in] identifier[enumerate] ( identifier[cratios] ):
identifier[cparams] . identifier[tcp_rates] [ identifier[j] ]= identifier[cratio]
identifier[cparams] . identifier[cp_disto_alloc] = literal[int]
identifier[cparams] . identifier[csty] |= literal[int] keyword[if] identifier[sop] keyword[else] literal[int]
identifier[cparams] . identifier[csty] |= literal[int] keyword[if] identifier[eph] keyword[else] literal[int]
keyword[if] identifier[grid_offset] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[image_offset_x0] = identifier[grid_offset] [ literal[int] ]
identifier[cparams] . identifier[image_offset_y0] = identifier[grid_offset] [ literal[int] ]
keyword[if] identifier[modesw] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[shift] keyword[in] identifier[range] ( literal[int] ):
identifier[power_of_two] = literal[int] << identifier[shift]
keyword[if] identifier[modesw] & identifier[power_of_two] :
identifier[cparams] . identifier[mode] |= identifier[power_of_two]
keyword[if] identifier[numres] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[numresolution] = identifier[numres]
keyword[if] identifier[prog] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[prog_order] = identifier[core] . identifier[PROGRESSION_ORDER] [ identifier[prog] . identifier[upper] ()]
keyword[if] identifier[psnr] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[tcp_numlayers] = identifier[len] ( identifier[psnr] )
keyword[for] identifier[j] , identifier[snr_layer] keyword[in] identifier[enumerate] ( identifier[psnr] ):
identifier[cparams] . identifier[tcp_distoratio] [ identifier[j] ]= identifier[snr_layer]
identifier[cparams] . identifier[cp_fixed_quality] = literal[int]
keyword[if] identifier[psizes] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[j] ,( identifier[prch] , identifier[prcw] ) keyword[in] identifier[enumerate] ( identifier[psizes] ):
identifier[cparams] . identifier[prcw_init] [ identifier[j] ]= identifier[prcw]
identifier[cparams] . identifier[prch_init] [ identifier[j] ]= identifier[prch]
identifier[cparams] . identifier[csty] |= literal[int]
identifier[cparams] . identifier[res_spec] = identifier[len] ( identifier[psizes] )
keyword[if] identifier[subsam] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[subsampling_dy] = identifier[subsam] [ literal[int] ]
identifier[cparams] . identifier[subsampling_dx] = identifier[subsam] [ literal[int] ]
keyword[if] identifier[tilesize] keyword[is] keyword[not] keyword[None] :
identifier[cparams] . identifier[cp_tdx] = identifier[tilesize] [ literal[int] ]
identifier[cparams] . identifier[cp_tdy] = identifier[tilesize] [ literal[int] ]
identifier[cparams] . identifier[tile_size_on] = identifier[opj2] . identifier[TRUE]
keyword[if] identifier[mct] keyword[is] keyword[None] :
identifier[cparams] . identifier[tcp_mct] = literal[int] keyword[if] identifier[self] . identifier[_colorspace] == identifier[opj2] . identifier[CLRSPC_SRGB] keyword[else] literal[int]
keyword[else] :
keyword[if] identifier[self] . identifier[_colorspace] == identifier[opj2] . identifier[CLRSPC_GRAY] :
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[IOError] ( identifier[msg] )
identifier[cparams] . identifier[tcp_mct] = literal[int] keyword[if] identifier[mct] keyword[else] literal[int]
identifier[self] . identifier[_validate_compression_params] ( identifier[img_array] , identifier[cparams] , identifier[colorspace] )
identifier[self] . identifier[_cparams] = identifier[cparams] | def _populate_cparams(self, img_array, mct=None, cratios=None, psnr=None, cinema2k=None, cinema4k=None, irreversible=None, cbsize=None, eph=None, grid_offset=None, modesw=None, numres=None, prog=None, psizes=None, sop=None, subsam=None, tilesize=None, colorspace=None):
"""Directs processing of write method arguments.
Parameters
----------
img_array : ndarray
Image data to be written to file.
kwargs : dictionary
Non-image keyword inputs provided to write method.
"""
other_args = (mct, cratios, psnr, irreversible, cbsize, eph, grid_offset, modesw, numres, prog, psizes, sop, subsam)
if (cinema2k is not None or cinema4k is not None) and (not all([arg is None for arg in other_args])):
msg = 'Cannot specify cinema2k/cinema4k along with any other options.'
raise IOError(msg) # depends on [control=['if'], data=[]]
if cratios is not None and psnr is not None:
msg = 'Cannot specify cratios and psnr options together.'
raise IOError(msg) # depends on [control=['if'], data=[]]
if version.openjpeg_version_tuple[0] == 1:
cparams = opj.set_default_encoder_parameters() # depends on [control=['if'], data=[]]
else:
cparams = opj2.set_default_encoder_parameters()
outfile = self.filename.encode()
num_pad_bytes = opj2.PATH_LEN - len(outfile)
outfile += b'0' * num_pad_bytes
cparams.outfile = outfile
if self.filename[-4:].endswith(('.jp2', '.JP2')):
cparams.codec_fmt = opj2.CODEC_JP2 # depends on [control=['if'], data=[]]
else:
cparams.codec_fmt = opj2.CODEC_J2K
# Set defaults to lossless to begin.
cparams.tcp_rates[0] = 0
cparams.tcp_numlayers = 1
cparams.cp_disto_alloc = 1
cparams.irreversible = 1 if irreversible else 0
if cinema2k is not None:
self._cparams = cparams
self._set_cinema_params('cinema2k', cinema2k)
return # depends on [control=['if'], data=['cinema2k']]
if cinema4k is not None:
self._cparams = cparams
self._set_cinema_params('cinema4k', cinema4k)
return # depends on [control=['if'], data=['cinema4k']]
if cbsize is not None:
cparams.cblockw_init = cbsize[1]
cparams.cblockh_init = cbsize[0] # depends on [control=['if'], data=['cbsize']]
if cratios is not None:
cparams.tcp_numlayers = len(cratios)
for (j, cratio) in enumerate(cratios):
cparams.tcp_rates[j] = cratio # depends on [control=['for'], data=[]]
cparams.cp_disto_alloc = 1 # depends on [control=['if'], data=['cratios']]
cparams.csty |= 2 if sop else 0
cparams.csty |= 4 if eph else 0
if grid_offset is not None:
cparams.image_offset_x0 = grid_offset[1]
cparams.image_offset_y0 = grid_offset[0] # depends on [control=['if'], data=['grid_offset']]
if modesw is not None:
for shift in range(6):
power_of_two = 1 << shift
if modesw & power_of_two:
cparams.mode |= power_of_two # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['shift']] # depends on [control=['if'], data=['modesw']]
if numres is not None:
cparams.numresolution = numres # depends on [control=['if'], data=['numres']]
if prog is not None:
cparams.prog_order = core.PROGRESSION_ORDER[prog.upper()] # depends on [control=['if'], data=['prog']]
if psnr is not None:
cparams.tcp_numlayers = len(psnr)
for (j, snr_layer) in enumerate(psnr):
cparams.tcp_distoratio[j] = snr_layer # depends on [control=['for'], data=[]]
cparams.cp_fixed_quality = 1 # depends on [control=['if'], data=['psnr']]
if psizes is not None:
for (j, (prch, prcw)) in enumerate(psizes):
cparams.prcw_init[j] = prcw
cparams.prch_init[j] = prch # depends on [control=['for'], data=[]]
cparams.csty |= 1
cparams.res_spec = len(psizes) # depends on [control=['if'], data=['psizes']]
if subsam is not None:
cparams.subsampling_dy = subsam[0]
cparams.subsampling_dx = subsam[1] # depends on [control=['if'], data=['subsam']]
if tilesize is not None:
cparams.cp_tdx = tilesize[1]
cparams.cp_tdy = tilesize[0]
cparams.tile_size_on = opj2.TRUE # depends on [control=['if'], data=['tilesize']]
if mct is None:
# If the multi component transform was not specified, we infer
# that it should be used if the color space is RGB.
cparams.tcp_mct = 1 if self._colorspace == opj2.CLRSPC_SRGB else 0 # depends on [control=['if'], data=[]]
else:
if self._colorspace == opj2.CLRSPC_GRAY:
msg = 'Cannot specify usage of the multi component transform if the colorspace is gray.'
raise IOError(msg) # depends on [control=['if'], data=[]]
cparams.tcp_mct = 1 if mct else 0
self._validate_compression_params(img_array, cparams, colorspace)
self._cparams = cparams |
def run(self):
"""Run FastGapFill command"""
# Create solver
solver = self._get_solver()
# Load compound information
def compound_name(id):
if id not in self._model.compounds:
return id
return self._model.compounds[id].properties.get('name', id)
# TODO: The exchange and transport reactions have tuple names. This
# means that in Python 3 the reactions can no longer be directly
# compared (e.g. while sorting) so define this helper function as a
# workaround.
def reaction_key(r):
return r if isinstance(r, tuple) else (r,)
# Calculate penalty if penalty file exists
penalties = {}
if self._args.penalty is not None:
for line in self._args.penalty:
line, _, comment = line.partition('#')
line = line.strip()
if line == '':
continue
rxnid, penalty = line.split(None, 1)
penalties[rxnid] = float(penalty)
model_extended, weights = create_extended_model(
self._model,
db_penalty=self._args.db_penalty,
ex_penalty=self._args.ex_penalty,
tp_penalty=self._args.tp_penalty,
penalties=penalties)
epsilon = self._args.epsilon
core = set()
if self._args.subset is None:
for r in self._mm.reactions:
if not self._mm.is_exchange(r):
core.add(r)
else:
for line in self._args.subset:
line = line.strip()
if line == '':
continue
core.add(line)
induced = fastgapfill(model_extended, core, weights=weights,
epsilon=epsilon, solver=solver)
for reaction_id in sorted(self._mm.reactions):
rx = self._mm.get_reaction(reaction_id)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(reaction_id, 'Model', 0, rxt))
for rxnid in sorted(induced, key=reaction_key):
if self._mm.has_reaction(rxnid):
continue
rx = model_extended.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(
rxnid, 'Add', weights.get(rxnid, 1), rxt)) | def function[run, parameter[self]]:
constant[Run FastGapFill command]
variable[solver] assign[=] call[name[self]._get_solver, parameter[]]
def function[compound_name, parameter[id]]:
if compare[name[id] <ast.NotIn object at 0x7da2590d7190> name[self]._model.compounds] begin[:]
return[name[id]]
return[call[call[name[self]._model.compounds][name[id]].properties.get, parameter[constant[name], name[id]]]]
def function[reaction_key, parameter[r]]:
return[<ast.IfExp object at 0x7da18eb57be0>]
variable[penalties] assign[=] dictionary[[], []]
if compare[name[self]._args.penalty is_not constant[None]] begin[:]
for taget[name[line]] in starred[name[self]._args.penalty] begin[:]
<ast.Tuple object at 0x7da18eb56b30> assign[=] call[name[line].partition, parameter[constant[#]]]
variable[line] assign[=] call[name[line].strip, parameter[]]
if compare[name[line] equal[==] constant[]] begin[:]
continue
<ast.Tuple object at 0x7da18eb57370> assign[=] call[name[line].split, parameter[constant[None], constant[1]]]
call[name[penalties]][name[rxnid]] assign[=] call[name[float], parameter[name[penalty]]]
<ast.Tuple object at 0x7da18eb54eb0> assign[=] call[name[create_extended_model], parameter[name[self]._model]]
variable[epsilon] assign[=] name[self]._args.epsilon
variable[core] assign[=] call[name[set], parameter[]]
if compare[name[self]._args.subset is constant[None]] begin[:]
for taget[name[r]] in starred[name[self]._mm.reactions] begin[:]
if <ast.UnaryOp object at 0x7da18eb54bb0> begin[:]
call[name[core].add, parameter[name[r]]]
variable[induced] assign[=] call[name[fastgapfill], parameter[name[model_extended], name[core]]]
for taget[name[reaction_id]] in starred[call[name[sorted], parameter[name[self]._mm.reactions]]] begin[:]
variable[rx] assign[=] call[name[self]._mm.get_reaction, parameter[name[reaction_id]]]
variable[rxt] assign[=] call[name[rx].translated_compounds, parameter[name[compound_name]]]
call[name[print], parameter[call[constant[{} {} {} {}].format, parameter[name[reaction_id], constant[Model], constant[0], name[rxt]]]]]
for taget[name[rxnid]] in starred[call[name[sorted], parameter[name[induced]]]] begin[:]
if call[name[self]._mm.has_reaction, parameter[name[rxnid]]] begin[:]
continue
variable[rx] assign[=] call[name[model_extended].get_reaction, parameter[name[rxnid]]]
variable[rxt] assign[=] call[name[rx].translated_compounds, parameter[name[compound_name]]]
call[name[print], parameter[call[constant[{} {} {} {}].format, parameter[name[rxnid], constant[Add], call[name[weights].get, parameter[name[rxnid], constant[1]]], name[rxt]]]]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[solver] = identifier[self] . identifier[_get_solver] ()
keyword[def] identifier[compound_name] ( identifier[id] ):
keyword[if] identifier[id] keyword[not] keyword[in] identifier[self] . identifier[_model] . identifier[compounds] :
keyword[return] identifier[id]
keyword[return] identifier[self] . identifier[_model] . identifier[compounds] [ identifier[id] ]. identifier[properties] . identifier[get] ( literal[string] , identifier[id] )
keyword[def] identifier[reaction_key] ( identifier[r] ):
keyword[return] identifier[r] keyword[if] identifier[isinstance] ( identifier[r] , identifier[tuple] ) keyword[else] ( identifier[r] ,)
identifier[penalties] ={}
keyword[if] identifier[self] . identifier[_args] . identifier[penalty] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[_args] . identifier[penalty] :
identifier[line] , identifier[_] , identifier[comment] = identifier[line] . identifier[partition] ( literal[string] )
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] == literal[string] :
keyword[continue]
identifier[rxnid] , identifier[penalty] = identifier[line] . identifier[split] ( keyword[None] , literal[int] )
identifier[penalties] [ identifier[rxnid] ]= identifier[float] ( identifier[penalty] )
identifier[model_extended] , identifier[weights] = identifier[create_extended_model] (
identifier[self] . identifier[_model] ,
identifier[db_penalty] = identifier[self] . identifier[_args] . identifier[db_penalty] ,
identifier[ex_penalty] = identifier[self] . identifier[_args] . identifier[ex_penalty] ,
identifier[tp_penalty] = identifier[self] . identifier[_args] . identifier[tp_penalty] ,
identifier[penalties] = identifier[penalties] )
identifier[epsilon] = identifier[self] . identifier[_args] . identifier[epsilon]
identifier[core] = identifier[set] ()
keyword[if] identifier[self] . identifier[_args] . identifier[subset] keyword[is] keyword[None] :
keyword[for] identifier[r] keyword[in] identifier[self] . identifier[_mm] . identifier[reactions] :
keyword[if] keyword[not] identifier[self] . identifier[_mm] . identifier[is_exchange] ( identifier[r] ):
identifier[core] . identifier[add] ( identifier[r] )
keyword[else] :
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[_args] . identifier[subset] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] == literal[string] :
keyword[continue]
identifier[core] . identifier[add] ( identifier[line] )
identifier[induced] = identifier[fastgapfill] ( identifier[model_extended] , identifier[core] , identifier[weights] = identifier[weights] ,
identifier[epsilon] = identifier[epsilon] , identifier[solver] = identifier[solver] )
keyword[for] identifier[reaction_id] keyword[in] identifier[sorted] ( identifier[self] . identifier[_mm] . identifier[reactions] ):
identifier[rx] = identifier[self] . identifier[_mm] . identifier[get_reaction] ( identifier[reaction_id] )
identifier[rxt] = identifier[rx] . identifier[translated_compounds] ( identifier[compound_name] )
identifier[print] ( literal[string] . identifier[format] ( identifier[reaction_id] , literal[string] , literal[int] , identifier[rxt] ))
keyword[for] identifier[rxnid] keyword[in] identifier[sorted] ( identifier[induced] , identifier[key] = identifier[reaction_key] ):
keyword[if] identifier[self] . identifier[_mm] . identifier[has_reaction] ( identifier[rxnid] ):
keyword[continue]
identifier[rx] = identifier[model_extended] . identifier[get_reaction] ( identifier[rxnid] )
identifier[rxt] = identifier[rx] . identifier[translated_compounds] ( identifier[compound_name] )
identifier[print] ( literal[string] . identifier[format] (
identifier[rxnid] , literal[string] , identifier[weights] . identifier[get] ( identifier[rxnid] , literal[int] ), identifier[rxt] )) | def run(self):
"""Run FastGapFill command"""
# Create solver
solver = self._get_solver()
# Load compound information
def compound_name(id):
if id not in self._model.compounds:
return id # depends on [control=['if'], data=['id']]
return self._model.compounds[id].properties.get('name', id)
# TODO: The exchange and transport reactions have tuple names. This
# means that in Python 3 the reactions can no longer be directly
# compared (e.g. while sorting) so define this helper function as a
# workaround.
def reaction_key(r):
return r if isinstance(r, tuple) else (r,)
# Calculate penalty if penalty file exists
penalties = {}
if self._args.penalty is not None:
for line in self._args.penalty:
(line, _, comment) = line.partition('#')
line = line.strip()
if line == '':
continue # depends on [control=['if'], data=[]]
(rxnid, penalty) = line.split(None, 1)
penalties[rxnid] = float(penalty) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
(model_extended, weights) = create_extended_model(self._model, db_penalty=self._args.db_penalty, ex_penalty=self._args.ex_penalty, tp_penalty=self._args.tp_penalty, penalties=penalties)
epsilon = self._args.epsilon
core = set()
if self._args.subset is None:
for r in self._mm.reactions:
if not self._mm.is_exchange(r):
core.add(r) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']] # depends on [control=['if'], data=[]]
else:
for line in self._args.subset:
line = line.strip()
if line == '':
continue # depends on [control=['if'], data=[]]
core.add(line) # depends on [control=['for'], data=['line']]
induced = fastgapfill(model_extended, core, weights=weights, epsilon=epsilon, solver=solver)
for reaction_id in sorted(self._mm.reactions):
rx = self._mm.get_reaction(reaction_id)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(reaction_id, 'Model', 0, rxt)) # depends on [control=['for'], data=['reaction_id']]
for rxnid in sorted(induced, key=reaction_key):
if self._mm.has_reaction(rxnid):
continue # depends on [control=['if'], data=[]]
rx = model_extended.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(rxnid, 'Add', weights.get(rxnid, 1), rxt)) # depends on [control=['for'], data=['rxnid']] |
def query_mongo(
database_name,
collection_name,
query={},
include_num_results="0",
skip=0,
sort=None,
limit=getattr(
settings,
'MONGO_LIMIT',
200),
cast_strings_to_integers=False,
return_keys=()):
"""return a response_dict with a list of search results"""
l = []
response_dict = {}
try:
mongodb_client_url = getattr(settings, 'MONGODB_CLIENT',
'mongodb://localhost:27017/')
mc = MongoClient(mongodb_client_url,document_class=OrderedDict)
db = mc[str(database_name)]
collection = db[str(collection_name)]
# Cast the query to integers
if cast_strings_to_integers:
query = cast_number_strings_to_integers(query)
# print query
if return_keys:
return_dict = {}
for k in return_keys:
return_dict[k] = 1
# print "returndict=",return_dict
mysearchresult = collection.find(
query, return_dict).skip(skip).limit(limit)
else:
mysearchresult = collection.find(query).skip(skip).limit(limit)
if sort:
mysearchresult.sort(sort)
response_dict['code'] = 200
if include_num_results == "1":
response_dict['num_results'] = response_dict['num_results'] = int(
mysearchresult.count(with_limit_and_skip=False))
if include_num_results == "2":
response_dict['num_results'] = response_dict['num_results'] = int(
mysearchresult.count(with_limit_and_skip=True))
response_dict['type'] = "search-results"
for d in mysearchresult:
d['id'] = d['_id'].__str__()
del d['_id']
l.append(d)
response_dict['results'] = l
except:
print("Error reading from Mongo")
print(str(sys.exc_info()))
response_dict['num_results'] = 0
response_dict['code'] = 500
response_dict['type'] = "Error"
response_dict['results'] = []
response_dict['message'] = str(sys.exc_info())
return response_dict | def function[query_mongo, parameter[database_name, collection_name, query, include_num_results, skip, sort, limit, cast_strings_to_integers, return_keys]]:
constant[return a response_dict with a list of search results]
variable[l] assign[=] list[[]]
variable[response_dict] assign[=] dictionary[[], []]
<ast.Try object at 0x7da20e961750>
return[name[response_dict]] | keyword[def] identifier[query_mongo] (
identifier[database_name] ,
identifier[collection_name] ,
identifier[query] ={},
identifier[include_num_results] = literal[string] ,
identifier[skip] = literal[int] ,
identifier[sort] = keyword[None] ,
identifier[limit] = identifier[getattr] (
identifier[settings] ,
literal[string] ,
literal[int] ),
identifier[cast_strings_to_integers] = keyword[False] ,
identifier[return_keys] =()):
literal[string]
identifier[l] =[]
identifier[response_dict] ={}
keyword[try] :
identifier[mongodb_client_url] = identifier[getattr] ( identifier[settings] , literal[string] ,
literal[string] )
identifier[mc] = identifier[MongoClient] ( identifier[mongodb_client_url] , identifier[document_class] = identifier[OrderedDict] )
identifier[db] = identifier[mc] [ identifier[str] ( identifier[database_name] )]
identifier[collection] = identifier[db] [ identifier[str] ( identifier[collection_name] )]
keyword[if] identifier[cast_strings_to_integers] :
identifier[query] = identifier[cast_number_strings_to_integers] ( identifier[query] )
keyword[if] identifier[return_keys] :
identifier[return_dict] ={}
keyword[for] identifier[k] keyword[in] identifier[return_keys] :
identifier[return_dict] [ identifier[k] ]= literal[int]
identifier[mysearchresult] = identifier[collection] . identifier[find] (
identifier[query] , identifier[return_dict] ). identifier[skip] ( identifier[skip] ). identifier[limit] ( identifier[limit] )
keyword[else] :
identifier[mysearchresult] = identifier[collection] . identifier[find] ( identifier[query] ). identifier[skip] ( identifier[skip] ). identifier[limit] ( identifier[limit] )
keyword[if] identifier[sort] :
identifier[mysearchresult] . identifier[sort] ( identifier[sort] )
identifier[response_dict] [ literal[string] ]= literal[int]
keyword[if] identifier[include_num_results] == literal[string] :
identifier[response_dict] [ literal[string] ]= identifier[response_dict] [ literal[string] ]= identifier[int] (
identifier[mysearchresult] . identifier[count] ( identifier[with_limit_and_skip] = keyword[False] ))
keyword[if] identifier[include_num_results] == literal[string] :
identifier[response_dict] [ literal[string] ]= identifier[response_dict] [ literal[string] ]= identifier[int] (
identifier[mysearchresult] . identifier[count] ( identifier[with_limit_and_skip] = keyword[True] ))
identifier[response_dict] [ literal[string] ]= literal[string]
keyword[for] identifier[d] keyword[in] identifier[mysearchresult] :
identifier[d] [ literal[string] ]= identifier[d] [ literal[string] ]. identifier[__str__] ()
keyword[del] identifier[d] [ literal[string] ]
identifier[l] . identifier[append] ( identifier[d] )
identifier[response_dict] [ literal[string] ]= identifier[l]
keyword[except] :
identifier[print] ( literal[string] )
identifier[print] ( identifier[str] ( identifier[sys] . identifier[exc_info] ()))
identifier[response_dict] [ literal[string] ]= literal[int]
identifier[response_dict] [ literal[string] ]= literal[int]
identifier[response_dict] [ literal[string] ]= literal[string]
identifier[response_dict] [ literal[string] ]=[]
identifier[response_dict] [ literal[string] ]= identifier[str] ( identifier[sys] . identifier[exc_info] ())
keyword[return] identifier[response_dict] | def query_mongo(database_name, collection_name, query={}, include_num_results='0', skip=0, sort=None, limit=getattr(settings, 'MONGO_LIMIT', 200), cast_strings_to_integers=False, return_keys=()):
"""return a response_dict with a list of search results"""
l = []
response_dict = {}
try:
mongodb_client_url = getattr(settings, 'MONGODB_CLIENT', 'mongodb://localhost:27017/')
mc = MongoClient(mongodb_client_url, document_class=OrderedDict)
db = mc[str(database_name)]
collection = db[str(collection_name)]
# Cast the query to integers
if cast_strings_to_integers:
query = cast_number_strings_to_integers(query) # depends on [control=['if'], data=[]]
# print query
if return_keys:
return_dict = {}
for k in return_keys:
return_dict[k] = 1 # depends on [control=['for'], data=['k']]
# print "returndict=",return_dict
mysearchresult = collection.find(query, return_dict).skip(skip).limit(limit) # depends on [control=['if'], data=[]]
else:
mysearchresult = collection.find(query).skip(skip).limit(limit)
if sort:
mysearchresult.sort(sort) # depends on [control=['if'], data=[]]
response_dict['code'] = 200
if include_num_results == '1':
response_dict['num_results'] = response_dict['num_results'] = int(mysearchresult.count(with_limit_and_skip=False)) # depends on [control=['if'], data=[]]
if include_num_results == '2':
response_dict['num_results'] = response_dict['num_results'] = int(mysearchresult.count(with_limit_and_skip=True)) # depends on [control=['if'], data=[]]
response_dict['type'] = 'search-results'
for d in mysearchresult:
d['id'] = d['_id'].__str__()
del d['_id']
l.append(d) # depends on [control=['for'], data=['d']]
response_dict['results'] = l # depends on [control=['try'], data=[]]
except:
print('Error reading from Mongo')
print(str(sys.exc_info()))
response_dict['num_results'] = 0
response_dict['code'] = 500
response_dict['type'] = 'Error'
response_dict['results'] = []
response_dict['message'] = str(sys.exc_info()) # depends on [control=['except'], data=[]]
return response_dict |
def _create_formsets(self, request, obj, change, index, is_template):
"Helper function to generate formsets for add/change_view."
formsets = []
inline_instances = []
prefixes = defaultdict(int)
get_formsets_args = [request]
if change:
get_formsets_args.append(obj)
base_prefix = self.get_formset(request).get_default_prefix()
for FormSet, inline in self.get_formsets_with_inlines(
*get_formsets_args):
prefix = base_prefix + '-' + FormSet.get_default_prefix()
if not is_template:
prefix += '-%s' % index
prefixes[prefix] += 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset_params = {
'instance': obj,
'prefix': prefix,
'queryset': inline.get_queryset(request),
}
if request.method == 'POST':
formset_params.update({
'data': request.POST,
'files': request.FILES,
'save_as_new': '_saveasnew' in request.POST
})
formset = FormSet(**formset_params)
formset.has_parent = True
formsets.append(formset)
inline_instances.append(inline)
return formsets, inline_instances | def function[_create_formsets, parameter[self, request, obj, change, index, is_template]]:
constant[Helper function to generate formsets for add/change_view.]
variable[formsets] assign[=] list[[]]
variable[inline_instances] assign[=] list[[]]
variable[prefixes] assign[=] call[name[defaultdict], parameter[name[int]]]
variable[get_formsets_args] assign[=] list[[<ast.Name object at 0x7da1afe53490>]]
if name[change] begin[:]
call[name[get_formsets_args].append, parameter[name[obj]]]
variable[base_prefix] assign[=] call[call[name[self].get_formset, parameter[name[request]]].get_default_prefix, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1afe52dd0>, <ast.Name object at 0x7da1afe51bd0>]]] in starred[call[name[self].get_formsets_with_inlines, parameter[<ast.Starred object at 0x7da1afe50490>]]] begin[:]
variable[prefix] assign[=] binary_operation[binary_operation[name[base_prefix] + constant[-]] + call[name[FormSet].get_default_prefix, parameter[]]]
if <ast.UnaryOp object at 0x7da1afe524a0> begin[:]
<ast.AugAssign object at 0x7da1afe53100>
<ast.AugAssign object at 0x7da1afe52320>
if <ast.BoolOp object at 0x7da1afe520b0> begin[:]
variable[prefix] assign[=] binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1afe539d0>, <ast.Subscript object at 0x7da1afe53940>]]]
variable[formset_params] assign[=] dictionary[[<ast.Constant object at 0x7da1afe53fa0>, <ast.Constant object at 0x7da1afe53850>, <ast.Constant object at 0x7da1afe53ca0>], [<ast.Name object at 0x7da1afe53d90>, <ast.Name object at 0x7da1afe53f40>, <ast.Call object at 0x7da1afe50970>]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
call[name[formset_params].update, parameter[dictionary[[<ast.Constant object at 0x7da1afe51270>, <ast.Constant object at 0x7da1afe529e0>, <ast.Constant object at 0x7da1afe52d70>], [<ast.Attribute object at 0x7da1afe52f50>, <ast.Attribute object at 0x7da1afe512a0>, <ast.Compare object at 0x7da1afe52e00>]]]]
variable[formset] assign[=] call[name[FormSet], parameter[]]
name[formset].has_parent assign[=] constant[True]
call[name[formsets].append, parameter[name[formset]]]
call[name[inline_instances].append, parameter[name[inline]]]
return[tuple[[<ast.Name object at 0x7da1afe50f10>, <ast.Name object at 0x7da1afe53550>]]] | keyword[def] identifier[_create_formsets] ( identifier[self] , identifier[request] , identifier[obj] , identifier[change] , identifier[index] , identifier[is_template] ):
literal[string]
identifier[formsets] =[]
identifier[inline_instances] =[]
identifier[prefixes] = identifier[defaultdict] ( identifier[int] )
identifier[get_formsets_args] =[ identifier[request] ]
keyword[if] identifier[change] :
identifier[get_formsets_args] . identifier[append] ( identifier[obj] )
identifier[base_prefix] = identifier[self] . identifier[get_formset] ( identifier[request] ). identifier[get_default_prefix] ()
keyword[for] identifier[FormSet] , identifier[inline] keyword[in] identifier[self] . identifier[get_formsets_with_inlines] (
* identifier[get_formsets_args] ):
identifier[prefix] = identifier[base_prefix] + literal[string] + identifier[FormSet] . identifier[get_default_prefix] ()
keyword[if] keyword[not] identifier[is_template] :
identifier[prefix] += literal[string] % identifier[index]
identifier[prefixes] [ identifier[prefix] ]+= literal[int]
keyword[if] identifier[prefixes] [ identifier[prefix] ]!= literal[int] keyword[or] keyword[not] identifier[prefix] :
identifier[prefix] = literal[string] %( identifier[prefix] , identifier[prefixes] [ identifier[prefix] ])
identifier[formset_params] ={
literal[string] : identifier[obj] ,
literal[string] : identifier[prefix] ,
literal[string] : identifier[inline] . identifier[get_queryset] ( identifier[request] ),
}
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[formset_params] . identifier[update] ({
literal[string] : identifier[request] . identifier[POST] ,
literal[string] : identifier[request] . identifier[FILES] ,
literal[string] : literal[string] keyword[in] identifier[request] . identifier[POST]
})
identifier[formset] = identifier[FormSet] (** identifier[formset_params] )
identifier[formset] . identifier[has_parent] = keyword[True]
identifier[formsets] . identifier[append] ( identifier[formset] )
identifier[inline_instances] . identifier[append] ( identifier[inline] )
keyword[return] identifier[formsets] , identifier[inline_instances] | def _create_formsets(self, request, obj, change, index, is_template):
"""Helper function to generate formsets for add/change_view."""
formsets = []
inline_instances = []
prefixes = defaultdict(int)
get_formsets_args = [request]
if change:
get_formsets_args.append(obj) # depends on [control=['if'], data=[]]
base_prefix = self.get_formset(request).get_default_prefix()
for (FormSet, inline) in self.get_formsets_with_inlines(*get_formsets_args):
prefix = base_prefix + '-' + FormSet.get_default_prefix()
if not is_template:
prefix += '-%s' % index # depends on [control=['if'], data=[]]
prefixes[prefix] += 1
if prefixes[prefix] != 1 or not prefix:
prefix = '%s-%s' % (prefix, prefixes[prefix]) # depends on [control=['if'], data=[]]
formset_params = {'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request)}
if request.method == 'POST':
formset_params.update({'data': request.POST, 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST}) # depends on [control=['if'], data=[]]
formset = FormSet(**formset_params)
formset.has_parent = True
formsets.append(formset)
inline_instances.append(inline) # depends on [control=['for'], data=[]]
return (formsets, inline_instances) |
def create_loadbalancer(call=None, kwargs=None):
'''
Creates a loadbalancer within the datacenter from the provider config.
CLI Example:
.. code-block:: bash
salt-cloud -f create_loadbalancer profitbricks name=mylb
'''
if call != 'function':
raise SaltCloudSystemExit(
'The create_address function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
conn = get_conn()
datacenter_id = get_datacenter_id()
loadbalancer = LoadBalancer(name=kwargs.get('name'),
ip=kwargs.get('ip'),
dhcp=kwargs.get('dhcp'))
response = conn.create_loadbalancer(datacenter_id, loadbalancer)
_wait_for_completion(conn, response, 60, 'loadbalancer')
return response | def function[create_loadbalancer, parameter[call, kwargs]]:
constant[
Creates a loadbalancer within the datacenter from the provider config.
CLI Example:
.. code-block:: bash
salt-cloud -f create_loadbalancer profitbricks name=mylb
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da1b2168400>
if compare[name[kwargs] is constant[None]] begin[:]
variable[kwargs] assign[=] dictionary[[], []]
variable[conn] assign[=] call[name[get_conn], parameter[]]
variable[datacenter_id] assign[=] call[name[get_datacenter_id], parameter[]]
variable[loadbalancer] assign[=] call[name[LoadBalancer], parameter[]]
variable[response] assign[=] call[name[conn].create_loadbalancer, parameter[name[datacenter_id], name[loadbalancer]]]
call[name[_wait_for_completion], parameter[name[conn], name[response], constant[60], constant[loadbalancer]]]
return[name[response]] | keyword[def] identifier[create_loadbalancer] ( identifier[call] = keyword[None] , identifier[kwargs] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] identifier[kwargs] keyword[is] keyword[None] :
identifier[kwargs] ={}
identifier[conn] = identifier[get_conn] ()
identifier[datacenter_id] = identifier[get_datacenter_id] ()
identifier[loadbalancer] = identifier[LoadBalancer] ( identifier[name] = identifier[kwargs] . identifier[get] ( literal[string] ),
identifier[ip] = identifier[kwargs] . identifier[get] ( literal[string] ),
identifier[dhcp] = identifier[kwargs] . identifier[get] ( literal[string] ))
identifier[response] = identifier[conn] . identifier[create_loadbalancer] ( identifier[datacenter_id] , identifier[loadbalancer] )
identifier[_wait_for_completion] ( identifier[conn] , identifier[response] , literal[int] , literal[string] )
keyword[return] identifier[response] | def create_loadbalancer(call=None, kwargs=None):
"""
Creates a loadbalancer within the datacenter from the provider config.
CLI Example:
.. code-block:: bash
salt-cloud -f create_loadbalancer profitbricks name=mylb
"""
if call != 'function':
raise SaltCloudSystemExit('The create_address function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if kwargs is None:
kwargs = {} # depends on [control=['if'], data=['kwargs']]
conn = get_conn()
datacenter_id = get_datacenter_id()
loadbalancer = LoadBalancer(name=kwargs.get('name'), ip=kwargs.get('ip'), dhcp=kwargs.get('dhcp'))
response = conn.create_loadbalancer(datacenter_id, loadbalancer)
_wait_for_completion(conn, response, 60, 'loadbalancer')
return response |
def collapse(cls, holomap, ranges=None, mode='data'):
"""
Given a map of Overlays, apply all applicable compositors.
"""
# No potential compositors
if cls.definitions == []:
return holomap
# Apply compositors
clone = holomap.clone(shared_data=False)
data = zip(ranges[1], holomap.data.values()) if ranges else holomap.data.items()
for key, overlay in data:
clone[key] = cls.collapse_element(overlay, ranges, mode)
return clone | def function[collapse, parameter[cls, holomap, ranges, mode]]:
constant[
Given a map of Overlays, apply all applicable compositors.
]
if compare[name[cls].definitions equal[==] list[[]]] begin[:]
return[name[holomap]]
variable[clone] assign[=] call[name[holomap].clone, parameter[]]
variable[data] assign[=] <ast.IfExp object at 0x7da204567970>
for taget[tuple[[<ast.Name object at 0x7da2054a7cd0>, <ast.Name object at 0x7da2054a62c0>]]] in starred[name[data]] begin[:]
call[name[clone]][name[key]] assign[=] call[name[cls].collapse_element, parameter[name[overlay], name[ranges], name[mode]]]
return[name[clone]] | keyword[def] identifier[collapse] ( identifier[cls] , identifier[holomap] , identifier[ranges] = keyword[None] , identifier[mode] = literal[string] ):
literal[string]
keyword[if] identifier[cls] . identifier[definitions] ==[]:
keyword[return] identifier[holomap]
identifier[clone] = identifier[holomap] . identifier[clone] ( identifier[shared_data] = keyword[False] )
identifier[data] = identifier[zip] ( identifier[ranges] [ literal[int] ], identifier[holomap] . identifier[data] . identifier[values] ()) keyword[if] identifier[ranges] keyword[else] identifier[holomap] . identifier[data] . identifier[items] ()
keyword[for] identifier[key] , identifier[overlay] keyword[in] identifier[data] :
identifier[clone] [ identifier[key] ]= identifier[cls] . identifier[collapse_element] ( identifier[overlay] , identifier[ranges] , identifier[mode] )
keyword[return] identifier[clone] | def collapse(cls, holomap, ranges=None, mode='data'):
"""
Given a map of Overlays, apply all applicable compositors.
"""
# No potential compositors
if cls.definitions == []:
return holomap # depends on [control=['if'], data=[]]
# Apply compositors
clone = holomap.clone(shared_data=False)
data = zip(ranges[1], holomap.data.values()) if ranges else holomap.data.items()
for (key, overlay) in data:
clone[key] = cls.collapse_element(overlay, ranges, mode) # depends on [control=['for'], data=[]]
return clone |
def calculate_alive_path(model, transactions, datetime_col, t, freq="D"):
"""
Calculate alive path for plotting alive history of user.
Parameters
----------
model:
A fitted lifetimes model
transactions: DataFrame
a Pandas DataFrame containing the transactions history of the customer_id
datetime_col: string
the column in the transactions that denotes the datetime the purchase was made
t: array_like
the number of time units since the birth for which we want to draw the p_alive
freq: string
Default 'D' for days. Other examples= 'W' for weekly
Returns
-------
:obj: Series
A pandas Series containing the p_alive as a function of T (age of the customer)
"""
customer_history = transactions[[datetime_col]].copy()
customer_history[datetime_col] = pd.to_datetime(customer_history[datetime_col])
customer_history = customer_history.set_index(datetime_col)
# Add transactions column
customer_history["transactions"] = 1
# for some reason fillna(0) not working for resample in pandas with python 3.x,
# changed to replace
purchase_history = customer_history.resample(freq).sum().replace(np.nan, 0)["transactions"].values
extra_columns = t + 1 - len(purchase_history)
customer_history = pd.DataFrame(np.append(purchase_history, [0] * extra_columns), columns=["transactions"])
# add T column
customer_history["T"] = np.arange(customer_history.shape[0])
# add cumulative transactions column
customer_history["transactions"] = customer_history["transactions"].apply(lambda t: int(t > 0))
customer_history["frequency"] = customer_history["transactions"].cumsum() - 1 # first purchase is ignored
# Add t_x column
customer_history["recency"] = customer_history.apply(
lambda row: row["T"] if row["transactions"] != 0 else np.nan, axis=1
)
customer_history["recency"] = customer_history["recency"].fillna(method="ffill").fillna(0)
return customer_history.apply(
lambda row: model.conditional_probability_alive(row["frequency"], row["recency"], row["T"]), axis=1
) | def function[calculate_alive_path, parameter[model, transactions, datetime_col, t, freq]]:
constant[
Calculate alive path for plotting alive history of user.
Parameters
----------
model:
A fitted lifetimes model
transactions: DataFrame
a Pandas DataFrame containing the transactions history of the customer_id
datetime_col: string
the column in the transactions that denotes the datetime the purchase was made
t: array_like
the number of time units since the birth for which we want to draw the p_alive
freq: string
Default 'D' for days. Other examples= 'W' for weekly
Returns
-------
:obj: Series
A pandas Series containing the p_alive as a function of T (age of the customer)
]
variable[customer_history] assign[=] call[call[name[transactions]][list[[<ast.Name object at 0x7da1b22afdc0>]]].copy, parameter[]]
call[name[customer_history]][name[datetime_col]] assign[=] call[name[pd].to_datetime, parameter[call[name[customer_history]][name[datetime_col]]]]
variable[customer_history] assign[=] call[name[customer_history].set_index, parameter[name[datetime_col]]]
call[name[customer_history]][constant[transactions]] assign[=] constant[1]
variable[purchase_history] assign[=] call[call[call[call[name[customer_history].resample, parameter[name[freq]]].sum, parameter[]].replace, parameter[name[np].nan, constant[0]]]][constant[transactions]].values
variable[extra_columns] assign[=] binary_operation[binary_operation[name[t] + constant[1]] - call[name[len], parameter[name[purchase_history]]]]
variable[customer_history] assign[=] call[name[pd].DataFrame, parameter[call[name[np].append, parameter[name[purchase_history], binary_operation[list[[<ast.Constant object at 0x7da1b1d5d4e0>]] * name[extra_columns]]]]]]
call[name[customer_history]][constant[T]] assign[=] call[name[np].arange, parameter[call[name[customer_history].shape][constant[0]]]]
call[name[customer_history]][constant[transactions]] assign[=] call[call[name[customer_history]][constant[transactions]].apply, parameter[<ast.Lambda object at 0x7da1b1d5d2a0>]]
call[name[customer_history]][constant[frequency]] assign[=] binary_operation[call[call[name[customer_history]][constant[transactions]].cumsum, parameter[]] - constant[1]]
call[name[customer_history]][constant[recency]] assign[=] call[name[customer_history].apply, parameter[<ast.Lambda object at 0x7da1b1d5c9a0>]]
call[name[customer_history]][constant[recency]] assign[=] call[call[call[name[customer_history]][constant[recency]].fillna, parameter[]].fillna, parameter[constant[0]]]
return[call[name[customer_history].apply, parameter[<ast.Lambda object at 0x7da1b1d5c970>]]] | keyword[def] identifier[calculate_alive_path] ( identifier[model] , identifier[transactions] , identifier[datetime_col] , identifier[t] , identifier[freq] = literal[string] ):
literal[string]
identifier[customer_history] = identifier[transactions] [[ identifier[datetime_col] ]]. identifier[copy] ()
identifier[customer_history] [ identifier[datetime_col] ]= identifier[pd] . identifier[to_datetime] ( identifier[customer_history] [ identifier[datetime_col] ])
identifier[customer_history] = identifier[customer_history] . identifier[set_index] ( identifier[datetime_col] )
identifier[customer_history] [ literal[string] ]= literal[int]
identifier[purchase_history] = identifier[customer_history] . identifier[resample] ( identifier[freq] ). identifier[sum] (). identifier[replace] ( identifier[np] . identifier[nan] , literal[int] )[ literal[string] ]. identifier[values]
identifier[extra_columns] = identifier[t] + literal[int] - identifier[len] ( identifier[purchase_history] )
identifier[customer_history] = identifier[pd] . identifier[DataFrame] ( identifier[np] . identifier[append] ( identifier[purchase_history] ,[ literal[int] ]* identifier[extra_columns] ), identifier[columns] =[ literal[string] ])
identifier[customer_history] [ literal[string] ]= identifier[np] . identifier[arange] ( identifier[customer_history] . identifier[shape] [ literal[int] ])
identifier[customer_history] [ literal[string] ]= identifier[customer_history] [ literal[string] ]. identifier[apply] ( keyword[lambda] identifier[t] : identifier[int] ( identifier[t] > literal[int] ))
identifier[customer_history] [ literal[string] ]= identifier[customer_history] [ literal[string] ]. identifier[cumsum] ()- literal[int]
identifier[customer_history] [ literal[string] ]= identifier[customer_history] . identifier[apply] (
keyword[lambda] identifier[row] : identifier[row] [ literal[string] ] keyword[if] identifier[row] [ literal[string] ]!= literal[int] keyword[else] identifier[np] . identifier[nan] , identifier[axis] = literal[int]
)
identifier[customer_history] [ literal[string] ]= identifier[customer_history] [ literal[string] ]. identifier[fillna] ( identifier[method] = literal[string] ). identifier[fillna] ( literal[int] )
keyword[return] identifier[customer_history] . identifier[apply] (
keyword[lambda] identifier[row] : identifier[model] . identifier[conditional_probability_alive] ( identifier[row] [ literal[string] ], identifier[row] [ literal[string] ], identifier[row] [ literal[string] ]), identifier[axis] = literal[int]
) | def calculate_alive_path(model, transactions, datetime_col, t, freq='D'):
"""
Calculate alive path for plotting alive history of user.
Parameters
----------
model:
A fitted lifetimes model
transactions: DataFrame
a Pandas DataFrame containing the transactions history of the customer_id
datetime_col: string
the column in the transactions that denotes the datetime the purchase was made
t: array_like
the number of time units since the birth for which we want to draw the p_alive
freq: string
Default 'D' for days. Other examples= 'W' for weekly
Returns
-------
:obj: Series
A pandas Series containing the p_alive as a function of T (age of the customer)
"""
customer_history = transactions[[datetime_col]].copy()
customer_history[datetime_col] = pd.to_datetime(customer_history[datetime_col])
customer_history = customer_history.set_index(datetime_col)
# Add transactions column
customer_history['transactions'] = 1
# for some reason fillna(0) not working for resample in pandas with python 3.x,
# changed to replace
purchase_history = customer_history.resample(freq).sum().replace(np.nan, 0)['transactions'].values
extra_columns = t + 1 - len(purchase_history)
customer_history = pd.DataFrame(np.append(purchase_history, [0] * extra_columns), columns=['transactions'])
# add T column
customer_history['T'] = np.arange(customer_history.shape[0])
# add cumulative transactions column
customer_history['transactions'] = customer_history['transactions'].apply(lambda t: int(t > 0))
customer_history['frequency'] = customer_history['transactions'].cumsum() - 1 # first purchase is ignored
# Add t_x column
customer_history['recency'] = customer_history.apply(lambda row: row['T'] if row['transactions'] != 0 else np.nan, axis=1)
customer_history['recency'] = customer_history['recency'].fillna(method='ffill').fillna(0)
return customer_history.apply(lambda row: model.conditional_probability_alive(row['frequency'], row['recency'], row['T']), axis=1) |
def reduce_sum_square(attrs, inputs, proto_obj):
"""Reduce the array along a given axis by sum square value"""
square_op = symbol.square(inputs[0])
sum_op = symbol.sum(square_op, axis=attrs.get('axes'),
keepdims=attrs.get('keepdims'))
return sum_op, attrs, inputs | def function[reduce_sum_square, parameter[attrs, inputs, proto_obj]]:
constant[Reduce the array along a given axis by sum square value]
variable[square_op] assign[=] call[name[symbol].square, parameter[call[name[inputs]][constant[0]]]]
variable[sum_op] assign[=] call[name[symbol].sum, parameter[name[square_op]]]
return[tuple[[<ast.Name object at 0x7da1b2065150>, <ast.Name object at 0x7da1b20660e0>, <ast.Name object at 0x7da1b2067040>]]] | keyword[def] identifier[reduce_sum_square] ( identifier[attrs] , identifier[inputs] , identifier[proto_obj] ):
literal[string]
identifier[square_op] = identifier[symbol] . identifier[square] ( identifier[inputs] [ literal[int] ])
identifier[sum_op] = identifier[symbol] . identifier[sum] ( identifier[square_op] , identifier[axis] = identifier[attrs] . identifier[get] ( literal[string] ),
identifier[keepdims] = identifier[attrs] . identifier[get] ( literal[string] ))
keyword[return] identifier[sum_op] , identifier[attrs] , identifier[inputs] | def reduce_sum_square(attrs, inputs, proto_obj):
"""Reduce the array along a given axis by sum square value"""
square_op = symbol.square(inputs[0])
sum_op = symbol.sum(square_op, axis=attrs.get('axes'), keepdims=attrs.get('keepdims'))
return (sum_op, attrs, inputs) |
def retry_failed(FailAdmin, request, queryset):
"""Submit selected tasks back to the queue."""
for task in queryset:
async_task(task.func, *task.args or (), hook=task.hook, **task.kwargs or {})
task.delete() | def function[retry_failed, parameter[FailAdmin, request, queryset]]:
constant[Submit selected tasks back to the queue.]
for taget[name[task]] in starred[name[queryset]] begin[:]
call[name[async_task], parameter[name[task].func, <ast.Starred object at 0x7da1b175e3e0>]]
call[name[task].delete, parameter[]] | keyword[def] identifier[retry_failed] ( identifier[FailAdmin] , identifier[request] , identifier[queryset] ):
literal[string]
keyword[for] identifier[task] keyword[in] identifier[queryset] :
identifier[async_task] ( identifier[task] . identifier[func] ,* identifier[task] . identifier[args] keyword[or] (), identifier[hook] = identifier[task] . identifier[hook] ,** identifier[task] . identifier[kwargs] keyword[or] {})
identifier[task] . identifier[delete] () | def retry_failed(FailAdmin, request, queryset):
"""Submit selected tasks back to the queue."""
for task in queryset:
async_task(task.func, *(task.args or ()), hook=task.hook, **task.kwargs or {})
task.delete() # depends on [control=['for'], data=['task']] |
def list(self, limit=None, marker=None, name=None, visibility=None,
member_status=None, owner=None, tag=None, status=None,
size_min=None, size_max=None, sort_key=None, sort_dir=None,
return_raw=False):
"""
Returns a list of resource objects. Pagination is supported through the
optional 'marker' and 'limit' parameters. Filtering the returned value
is possible by specifying values for any of the other parameters.
"""
uri = "/%s" % self.uri_base
qs = utils.dict_to_qs(dict(limit=limit, marker=marker, name=name,
visibility=visibility, member_status=member_status,
owner=owner, tag=tag, status=status, size_min=size_min,
size_max=size_max, sort_key=sort_key, sort_dir=sort_dir))
if qs:
uri = "%s?%s" % (uri, qs)
return self._list(uri, return_raw=return_raw) | def function[list, parameter[self, limit, marker, name, visibility, member_status, owner, tag, status, size_min, size_max, sort_key, sort_dir, return_raw]]:
constant[
Returns a list of resource objects. Pagination is supported through the
optional 'marker' and 'limit' parameters. Filtering the returned value
is possible by specifying values for any of the other parameters.
]
variable[uri] assign[=] binary_operation[constant[/%s] <ast.Mod object at 0x7da2590d6920> name[self].uri_base]
variable[qs] assign[=] call[name[utils].dict_to_qs, parameter[call[name[dict], parameter[]]]]
if name[qs] begin[:]
variable[uri] assign[=] binary_operation[constant[%s?%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05581f0>, <ast.Name object at 0x7da1b0559d80>]]]
return[call[name[self]._list, parameter[name[uri]]]] | keyword[def] identifier[list] ( identifier[self] , identifier[limit] = keyword[None] , identifier[marker] = keyword[None] , identifier[name] = keyword[None] , identifier[visibility] = keyword[None] ,
identifier[member_status] = keyword[None] , identifier[owner] = keyword[None] , identifier[tag] = keyword[None] , identifier[status] = keyword[None] ,
identifier[size_min] = keyword[None] , identifier[size_max] = keyword[None] , identifier[sort_key] = keyword[None] , identifier[sort_dir] = keyword[None] ,
identifier[return_raw] = keyword[False] ):
literal[string]
identifier[uri] = literal[string] % identifier[self] . identifier[uri_base]
identifier[qs] = identifier[utils] . identifier[dict_to_qs] ( identifier[dict] ( identifier[limit] = identifier[limit] , identifier[marker] = identifier[marker] , identifier[name] = identifier[name] ,
identifier[visibility] = identifier[visibility] , identifier[member_status] = identifier[member_status] ,
identifier[owner] = identifier[owner] , identifier[tag] = identifier[tag] , identifier[status] = identifier[status] , identifier[size_min] = identifier[size_min] ,
identifier[size_max] = identifier[size_max] , identifier[sort_key] = identifier[sort_key] , identifier[sort_dir] = identifier[sort_dir] ))
keyword[if] identifier[qs] :
identifier[uri] = literal[string] %( identifier[uri] , identifier[qs] )
keyword[return] identifier[self] . identifier[_list] ( identifier[uri] , identifier[return_raw] = identifier[return_raw] ) | def list(self, limit=None, marker=None, name=None, visibility=None, member_status=None, owner=None, tag=None, status=None, size_min=None, size_max=None, sort_key=None, sort_dir=None, return_raw=False):
"""
Returns a list of resource objects. Pagination is supported through the
optional 'marker' and 'limit' parameters. Filtering the returned value
is possible by specifying values for any of the other parameters.
"""
uri = '/%s' % self.uri_base
qs = utils.dict_to_qs(dict(limit=limit, marker=marker, name=name, visibility=visibility, member_status=member_status, owner=owner, tag=tag, status=status, size_min=size_min, size_max=size_max, sort_key=sort_key, sort_dir=sort_dir))
if qs:
uri = '%s?%s' % (uri, qs) # depends on [control=['if'], data=[]]
return self._list(uri, return_raw=return_raw) |
def feature_importances(data, top_n=None, feature_names=None):
"""
Get and order feature importances from a scikit-learn model
or from an array-like structure.
If data is a scikit-learn model with sub-estimators (e.g. RandomForest,
AdaBoost) the function will compute the standard deviation of each
feature.
Parameters
----------
data : sklearn model or array-like structure
Object to get the data from.
top_n : int
Only get results for the top_n features.
feature_names : array-like
Feature_names
Returns
-------
table
Table object with the data. Columns are
feature_name, importance (`std_` only included for models with
sub-estimators)
"""
if data is None:
raise ValueError('data is needed to tabulate feature importances. '
'When plotting using the evaluator you need to pass '
'an estimator ')
res = compute.feature_importances(data, top_n, feature_names)
return Table(res, res.dtype.names) | def function[feature_importances, parameter[data, top_n, feature_names]]:
constant[
Get and order feature importances from a scikit-learn model
or from an array-like structure.
If data is a scikit-learn model with sub-estimators (e.g. RandomForest,
AdaBoost) the function will compute the standard deviation of each
feature.
Parameters
----------
data : sklearn model or array-like structure
Object to get the data from.
top_n : int
Only get results for the top_n features.
feature_names : array-like
Feature_names
Returns
-------
table
Table object with the data. Columns are
feature_name, importance (`std_` only included for models with
sub-estimators)
]
if compare[name[data] is constant[None]] begin[:]
<ast.Raise object at 0x7da2045652a0>
variable[res] assign[=] call[name[compute].feature_importances, parameter[name[data], name[top_n], name[feature_names]]]
return[call[name[Table], parameter[name[res], name[res].dtype.names]]] | keyword[def] identifier[feature_importances] ( identifier[data] , identifier[top_n] = keyword[None] , identifier[feature_names] = keyword[None] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] )
identifier[res] = identifier[compute] . identifier[feature_importances] ( identifier[data] , identifier[top_n] , identifier[feature_names] )
keyword[return] identifier[Table] ( identifier[res] , identifier[res] . identifier[dtype] . identifier[names] ) | def feature_importances(data, top_n=None, feature_names=None):
"""
Get and order feature importances from a scikit-learn model
or from an array-like structure.
If data is a scikit-learn model with sub-estimators (e.g. RandomForest,
AdaBoost) the function will compute the standard deviation of each
feature.
Parameters
----------
data : sklearn model or array-like structure
Object to get the data from.
top_n : int
Only get results for the top_n features.
feature_names : array-like
Feature_names
Returns
-------
table
Table object with the data. Columns are
feature_name, importance (`std_` only included for models with
sub-estimators)
"""
if data is None:
raise ValueError('data is needed to tabulate feature importances. When plotting using the evaluator you need to pass an estimator ') # depends on [control=['if'], data=[]]
res = compute.feature_importances(data, top_n, feature_names)
return Table(res, res.dtype.names) |
def _normalize(d):
'''
The above parse function generates output of list in dict form
i.e. {'abc' : {0: 'xyz', 1: 'pqr'}}. This function normalize it and turn
them into proper data type, i.e. {'abc': ['xyz', 'pqr']}
Note: if dict has element starts with 10, 11 etc.. this function won't fill
blanks.
for eg: {'abc': {10: 'xyz', 12: 'pqr'}} will convert to
{'abc': ['xyz', 'pqr']}
'''
newd = {}
if isinstance(d, dict) == False:
return d
# if dictionary. iterate over each element and append to newd
for k, v in six.iteritems(d):
if isinstance(v, dict):
first_key = next(iter(six.viewkeys(v)))
if isinstance(first_key, int):
temp_new = []
for k1, v1 in v.items():
temp_new.append(_normalize(v1))
newd[k] = temp_new
elif first_key == '':
newd[k] = v.values()[0]
else:
newd[k] = _normalize(v)
else:
newd[k] = v
return newd | def function[_normalize, parameter[d]]:
constant[
The above parse function generates output of list in dict form
i.e. {'abc' : {0: 'xyz', 1: 'pqr'}}. This function normalize it and turn
them into proper data type, i.e. {'abc': ['xyz', 'pqr']}
Note: if dict has element starts with 10, 11 etc.. this function won't fill
blanks.
for eg: {'abc': {10: 'xyz', 12: 'pqr'}} will convert to
{'abc': ['xyz', 'pqr']}
]
variable[newd] assign[=] dictionary[[], []]
if compare[call[name[isinstance], parameter[name[d], name[dict]]] equal[==] constant[False]] begin[:]
return[name[d]]
for taget[tuple[[<ast.Name object at 0x7da1b287c2b0>, <ast.Name object at 0x7da1b287f130>]]] in starred[call[name[six].iteritems, parameter[name[d]]]] begin[:]
if call[name[isinstance], parameter[name[v], name[dict]]] begin[:]
variable[first_key] assign[=] call[name[next], parameter[call[name[iter], parameter[call[name[six].viewkeys, parameter[name[v]]]]]]]
if call[name[isinstance], parameter[name[first_key], name[int]]] begin[:]
variable[temp_new] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b287de70>, <ast.Name object at 0x7da1b287e620>]]] in starred[call[name[v].items, parameter[]]] begin[:]
call[name[temp_new].append, parameter[call[name[_normalize], parameter[name[v1]]]]]
call[name[newd]][name[k]] assign[=] name[temp_new]
return[name[newd]] | keyword[def] identifier[_normalize] ( identifier[d] ):
literal[string]
identifier[newd] ={}
keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] )== keyword[False] :
keyword[return] identifier[d]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[d] ):
keyword[if] identifier[isinstance] ( identifier[v] , identifier[dict] ):
identifier[first_key] = identifier[next] ( identifier[iter] ( identifier[six] . identifier[viewkeys] ( identifier[v] )))
keyword[if] identifier[isinstance] ( identifier[first_key] , identifier[int] ):
identifier[temp_new] =[]
keyword[for] identifier[k1] , identifier[v1] keyword[in] identifier[v] . identifier[items] ():
identifier[temp_new] . identifier[append] ( identifier[_normalize] ( identifier[v1] ))
identifier[newd] [ identifier[k] ]= identifier[temp_new]
keyword[elif] identifier[first_key] == literal[string] :
identifier[newd] [ identifier[k] ]= identifier[v] . identifier[values] ()[ literal[int] ]
keyword[else] :
identifier[newd] [ identifier[k] ]= identifier[_normalize] ( identifier[v] )
keyword[else] :
identifier[newd] [ identifier[k] ]= identifier[v]
keyword[return] identifier[newd] | def _normalize(d):
"""
The above parse function generates output of list in dict form
i.e. {'abc' : {0: 'xyz', 1: 'pqr'}}. This function normalize it and turn
them into proper data type, i.e. {'abc': ['xyz', 'pqr']}
Note: if dict has element starts with 10, 11 etc.. this function won't fill
blanks.
for eg: {'abc': {10: 'xyz', 12: 'pqr'}} will convert to
{'abc': ['xyz', 'pqr']}
"""
newd = {}
if isinstance(d, dict) == False:
return d # depends on [control=['if'], data=[]] # if dictionary. iterate over each element and append to newd
for (k, v) in six.iteritems(d):
if isinstance(v, dict):
first_key = next(iter(six.viewkeys(v)))
if isinstance(first_key, int):
temp_new = []
for (k1, v1) in v.items():
temp_new.append(_normalize(v1)) # depends on [control=['for'], data=[]]
newd[k] = temp_new # depends on [control=['if'], data=[]]
elif first_key == '':
newd[k] = v.values()[0] # depends on [control=['if'], data=[]]
else:
newd[k] = _normalize(v) # depends on [control=['if'], data=[]]
else:
newd[k] = v # depends on [control=['for'], data=[]]
return newd |
def get_assessments_taken_by_genus_type(self, assessment_taken_genus_type):
"""Gets an ``AssessmentTakenList`` corresponding to the given assessment taken genus ``Type`` which does not include assessments of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
assessments taken or an error results. Otherwise, the returned
list may contain only those assessments taken that are
accessible through this session.
arg: assessment_taken_genus_type (osid.type.Type): an
assessment taken genus type
return: (osid.assessment.AssessmentTakenList) - the returned
``AssessmentTaken list``
raise: NullArgument - ``assessment_taken_genus_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('assessment',
collection='AssessmentTaken',
runtime=self._runtime)
result = collection.find(
dict({'genusTypeId': str(assessment_taken_genus_type)},
**self._view_filter())).sort('_id', DESCENDING)
return objects.AssessmentTakenList(result, runtime=self._runtime, proxy=self._proxy) | def function[get_assessments_taken_by_genus_type, parameter[self, assessment_taken_genus_type]]:
constant[Gets an ``AssessmentTakenList`` corresponding to the given assessment taken genus ``Type`` which does not include assessments of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
assessments taken or an error results. Otherwise, the returned
list may contain only those assessments taken that are
accessible through this session.
arg: assessment_taken_genus_type (osid.type.Type): an
assessment taken genus type
return: (osid.assessment.AssessmentTakenList) - the returned
``AssessmentTaken list``
raise: NullArgument - ``assessment_taken_genus_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[assessment]]]
variable[result] assign[=] call[call[name[collection].find, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da20c7c9030>], [<ast.Call object at 0x7da20c7cbf70>]]]]]].sort, parameter[constant[_id], name[DESCENDING]]]
return[call[name[objects].AssessmentTakenList, parameter[name[result]]]] | keyword[def] identifier[get_assessments_taken_by_genus_type] ( identifier[self] , identifier[assessment_taken_genus_type] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
identifier[result] = identifier[collection] . identifier[find] (
identifier[dict] ({ literal[string] : identifier[str] ( identifier[assessment_taken_genus_type] )},
** identifier[self] . identifier[_view_filter] ())). identifier[sort] ( literal[string] , identifier[DESCENDING] )
keyword[return] identifier[objects] . identifier[AssessmentTakenList] ( identifier[result] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ) | def get_assessments_taken_by_genus_type(self, assessment_taken_genus_type):
"""Gets an ``AssessmentTakenList`` corresponding to the given assessment taken genus ``Type`` which does not include assessments of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
assessments taken or an error results. Otherwise, the returned
list may contain only those assessments taken that are
accessible through this session.
arg: assessment_taken_genus_type (osid.type.Type): an
assessment taken genus type
return: (osid.assessment.AssessmentTakenList) - the returned
``AssessmentTaken list``
raise: NullArgument - ``assessment_taken_genus_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('assessment', collection='AssessmentTaken', runtime=self._runtime)
result = collection.find(dict({'genusTypeId': str(assessment_taken_genus_type)}, **self._view_filter())).sort('_id', DESCENDING)
return objects.AssessmentTakenList(result, runtime=self._runtime, proxy=self._proxy) |
def _lmder1_powell_singular():
"""Powell's singular function (lmder test #6). Don't run this as a
test, since it just zooms to zero parameters. The precise results
depend a lot on nitty-gritty rounding and tolerances and things."""
def func(params, vec):
vec[0] = params[0] + 10 * params[1]
vec[1] = np.sqrt(5) * (params[2] - params[3])
vec[2] = (params[1] - 2 * params[2])**2
vec[3] = np.sqrt(10) * (params[0] - params[3])**2
def jac(params, jac):
jac.fill(0)
jac[0,0] = 1
jac[0,3] = 2 * np.sqrt(10) * (params[0] - params[3])
jac[1,0] = 10
jac[1,2] = 2 * (params[1] - 2 * params[2])
jac[2,1] = np.sqrt(5)
jac[2,2] = -2 * jac[2,1]
jac[3,1] = -np.sqrt(5)
jac[3,3] = -jac[3,0]
guess = np.asfarray([3, -1, 0, 1])
_lmder1_test(4, func, jac, guess)
_lmder1_test(4, func, jac, guess * 10)
_lmder1_test(4, func, jac, guess * 100) | def function[_lmder1_powell_singular, parameter[]]:
constant[Powell's singular function (lmder test #6). Don't run this as a
test, since it just zooms to zero parameters. The precise results
depend a lot on nitty-gritty rounding and tolerances and things.]
def function[func, parameter[params, vec]]:
call[name[vec]][constant[0]] assign[=] binary_operation[call[name[params]][constant[0]] + binary_operation[constant[10] * call[name[params]][constant[1]]]]
call[name[vec]][constant[1]] assign[=] binary_operation[call[name[np].sqrt, parameter[constant[5]]] * binary_operation[call[name[params]][constant[2]] - call[name[params]][constant[3]]]]
call[name[vec]][constant[2]] assign[=] binary_operation[binary_operation[call[name[params]][constant[1]] - binary_operation[constant[2] * call[name[params]][constant[2]]]] ** constant[2]]
call[name[vec]][constant[3]] assign[=] binary_operation[call[name[np].sqrt, parameter[constant[10]]] * binary_operation[binary_operation[call[name[params]][constant[0]] - call[name[params]][constant[3]]] ** constant[2]]]
def function[jac, parameter[params, jac]]:
call[name[jac].fill, parameter[constant[0]]]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a5a20>, <ast.Constant object at 0x7da1b27a6b60>]]] assign[=] constant[1]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a60e0>, <ast.Constant object at 0x7da1b27a6e90>]]] assign[=] binary_operation[binary_operation[constant[2] * call[name[np].sqrt, parameter[constant[10]]]] * binary_operation[call[name[params]][constant[0]] - call[name[params]][constant[3]]]]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a6200>, <ast.Constant object at 0x7da1b27a5f60>]]] assign[=] constant[10]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a7520>, <ast.Constant object at 0x7da1b27a7190>]]] assign[=] binary_operation[constant[2] * binary_operation[call[name[params]][constant[1]] - binary_operation[constant[2] * call[name[params]][constant[2]]]]]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a61d0>, <ast.Constant object at 0x7da1b27a6dd0>]]] assign[=] call[name[np].sqrt, parameter[constant[5]]]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a6020>, <ast.Constant object at 0x7da1b27a6e30>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b27a7220> * call[name[jac]][tuple[[<ast.Constant object at 0x7da1b27a5b70>, <ast.Constant object at 0x7da1b2795420>]]]]
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b2795810>, <ast.Constant object at 0x7da1b27966e0>]]] assign[=] <ast.UnaryOp object at 0x7da1b27972e0>
call[name[jac]][tuple[[<ast.Constant object at 0x7da1b2795300>, <ast.Constant object at 0x7da1b2795780>]]] assign[=] <ast.UnaryOp object at 0x7da1b2795990>
variable[guess] assign[=] call[name[np].asfarray, parameter[list[[<ast.Constant object at 0x7da1b2796b90>, <ast.UnaryOp object at 0x7da1b2795e10>, <ast.Constant object at 0x7da1b2797490>, <ast.Constant object at 0x7da1b2796a10>]]]]
call[name[_lmder1_test], parameter[constant[4], name[func], name[jac], name[guess]]]
call[name[_lmder1_test], parameter[constant[4], name[func], name[jac], binary_operation[name[guess] * constant[10]]]]
call[name[_lmder1_test], parameter[constant[4], name[func], name[jac], binary_operation[name[guess] * constant[100]]]] | keyword[def] identifier[_lmder1_powell_singular] ():
literal[string]
keyword[def] identifier[func] ( identifier[params] , identifier[vec] ):
identifier[vec] [ literal[int] ]= identifier[params] [ literal[int] ]+ literal[int] * identifier[params] [ literal[int] ]
identifier[vec] [ literal[int] ]= identifier[np] . identifier[sqrt] ( literal[int] )*( identifier[params] [ literal[int] ]- identifier[params] [ literal[int] ])
identifier[vec] [ literal[int] ]=( identifier[params] [ literal[int] ]- literal[int] * identifier[params] [ literal[int] ])** literal[int]
identifier[vec] [ literal[int] ]= identifier[np] . identifier[sqrt] ( literal[int] )*( identifier[params] [ literal[int] ]- identifier[params] [ literal[int] ])** literal[int]
keyword[def] identifier[jac] ( identifier[params] , identifier[jac] ):
identifier[jac] . identifier[fill] ( literal[int] )
identifier[jac] [ literal[int] , literal[int] ]= literal[int]
identifier[jac] [ literal[int] , literal[int] ]= literal[int] * identifier[np] . identifier[sqrt] ( literal[int] )*( identifier[params] [ literal[int] ]- identifier[params] [ literal[int] ])
identifier[jac] [ literal[int] , literal[int] ]= literal[int]
identifier[jac] [ literal[int] , literal[int] ]= literal[int] *( identifier[params] [ literal[int] ]- literal[int] * identifier[params] [ literal[int] ])
identifier[jac] [ literal[int] , literal[int] ]= identifier[np] . identifier[sqrt] ( literal[int] )
identifier[jac] [ literal[int] , literal[int] ]=- literal[int] * identifier[jac] [ literal[int] , literal[int] ]
identifier[jac] [ literal[int] , literal[int] ]=- identifier[np] . identifier[sqrt] ( literal[int] )
identifier[jac] [ literal[int] , literal[int] ]=- identifier[jac] [ literal[int] , literal[int] ]
identifier[guess] = identifier[np] . identifier[asfarray] ([ literal[int] ,- literal[int] , literal[int] , literal[int] ])
identifier[_lmder1_test] ( literal[int] , identifier[func] , identifier[jac] , identifier[guess] )
identifier[_lmder1_test] ( literal[int] , identifier[func] , identifier[jac] , identifier[guess] * literal[int] )
identifier[_lmder1_test] ( literal[int] , identifier[func] , identifier[jac] , identifier[guess] * literal[int] ) | def _lmder1_powell_singular():
"""Powell's singular function (lmder test #6). Don't run this as a
test, since it just zooms to zero parameters. The precise results
depend a lot on nitty-gritty rounding and tolerances and things."""
def func(params, vec):
vec[0] = params[0] + 10 * params[1]
vec[1] = np.sqrt(5) * (params[2] - params[3])
vec[2] = (params[1] - 2 * params[2]) ** 2
vec[3] = np.sqrt(10) * (params[0] - params[3]) ** 2
def jac(params, jac):
jac.fill(0)
jac[0, 0] = 1
jac[0, 3] = 2 * np.sqrt(10) * (params[0] - params[3])
jac[1, 0] = 10
jac[1, 2] = 2 * (params[1] - 2 * params[2])
jac[2, 1] = np.sqrt(5)
jac[2, 2] = -2 * jac[2, 1]
jac[3, 1] = -np.sqrt(5)
jac[3, 3] = -jac[3, 0]
guess = np.asfarray([3, -1, 0, 1])
_lmder1_test(4, func, jac, guess)
_lmder1_test(4, func, jac, guess * 10)
_lmder1_test(4, func, jac, guess * 100) |
def newer_pairwise_group(sources_groups, targets):
"""Walk both arguments in parallel, testing if each source group is newer
than its corresponding target. Returns a pair of lists (sources_groups,
targets) where sources is newer than target, according to the semantics
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
raise ValueError("'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources_groups)):
if newer_group(sources_groups[i], targets[i]):
n_sources.append(sources_groups[i])
n_targets.append(targets[i])
return n_sources, n_targets | def function[newer_pairwise_group, parameter[sources_groups, targets]]:
constant[Walk both arguments in parallel, testing if each source group is newer
than its corresponding target. Returns a pair of lists (sources_groups,
targets) where sources is newer than target, according to the semantics
of 'newer_group()'.
]
if compare[call[name[len], parameter[name[sources_groups]]] not_equal[!=] call[name[len], parameter[name[targets]]]] begin[:]
<ast.Raise object at 0x7da1b1b17040>
variable[n_sources] assign[=] list[[]]
variable[n_targets] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[sources_groups]]]]]] begin[:]
if call[name[newer_group], parameter[call[name[sources_groups]][name[i]], call[name[targets]][name[i]]]] begin[:]
call[name[n_sources].append, parameter[call[name[sources_groups]][name[i]]]]
call[name[n_targets].append, parameter[call[name[targets]][name[i]]]]
return[tuple[[<ast.Name object at 0x7da1b1b154b0>, <ast.Name object at 0x7da1b1b14970>]]] | keyword[def] identifier[newer_pairwise_group] ( identifier[sources_groups] , identifier[targets] ):
literal[string]
keyword[if] identifier[len] ( identifier[sources_groups] )!= identifier[len] ( identifier[targets] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[n_sources] =[]
identifier[n_targets] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[sources_groups] )):
keyword[if] identifier[newer_group] ( identifier[sources_groups] [ identifier[i] ], identifier[targets] [ identifier[i] ]):
identifier[n_sources] . identifier[append] ( identifier[sources_groups] [ identifier[i] ])
identifier[n_targets] . identifier[append] ( identifier[targets] [ identifier[i] ])
keyword[return] identifier[n_sources] , identifier[n_targets] | def newer_pairwise_group(sources_groups, targets):
"""Walk both arguments in parallel, testing if each source group is newer
than its corresponding target. Returns a pair of lists (sources_groups,
targets) where sources is newer than target, according to the semantics
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
raise ValueError("'sources_group' and 'targets' must be the same length") # depends on [control=['if'], data=[]]
# build a pair of lists (sources_groups, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources_groups)):
if newer_group(sources_groups[i], targets[i]):
n_sources.append(sources_groups[i])
n_targets.append(targets[i]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return (n_sources, n_targets) |
def simplified_pos(pos, tagset=None):
"""
Return a simplified POS tag for a full POS tag `pos` belonging to a tagset `tagset`. By default the WordNet
tagset is assumed.
Does the following conversion by default:
- all N... (noun) tags to 'N'
- all V... (verb) tags to 'V'
- all ADJ... (adjective) tags to 'ADJ'
- all ADV... (adverb) tags to 'ADV'
- all other to None
Does the following conversion by with `tagset=='penn'`:
- all N... (noun) tags to 'N'
- all V... (verb) tags to 'V'
- all JJ... (adjective) tags to 'ADJ'
- all RB... (adverb) tags to 'ADV'
- all other to None
"""
if tagset == 'penn':
if pos.startswith('N') or pos.startswith('V'):
return pos[0]
elif pos.startswith('JJ'):
return 'ADJ'
elif pos.startswith('RB'):
return 'ADV'
else:
return None
else: # default: WordNet, STTS or unknown
if pos.startswith('N') or pos.startswith('V'):
return pos[0]
elif pos.startswith('ADJ') or pos.startswith('ADV'):
return pos[:3]
else:
return None | def function[simplified_pos, parameter[pos, tagset]]:
constant[
Return a simplified POS tag for a full POS tag `pos` belonging to a tagset `tagset`. By default the WordNet
tagset is assumed.
Does the following conversion by default:
- all N... (noun) tags to 'N'
- all V... (verb) tags to 'V'
- all ADJ... (adjective) tags to 'ADJ'
- all ADV... (adverb) tags to 'ADV'
- all other to None
Does the following conversion by with `tagset=='penn'`:
- all N... (noun) tags to 'N'
- all V... (verb) tags to 'V'
- all JJ... (adjective) tags to 'ADJ'
- all RB... (adverb) tags to 'ADV'
- all other to None
]
if compare[name[tagset] equal[==] constant[penn]] begin[:]
if <ast.BoolOp object at 0x7da2041d97e0> begin[:]
return[call[name[pos]][constant[0]]] | keyword[def] identifier[simplified_pos] ( identifier[pos] , identifier[tagset] = keyword[None] ):
literal[string]
keyword[if] identifier[tagset] == literal[string] :
keyword[if] identifier[pos] . identifier[startswith] ( literal[string] ) keyword[or] identifier[pos] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[pos] [ literal[int] ]
keyword[elif] identifier[pos] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string]
keyword[elif] identifier[pos] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string]
keyword[else] :
keyword[return] keyword[None]
keyword[else] :
keyword[if] identifier[pos] . identifier[startswith] ( literal[string] ) keyword[or] identifier[pos] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[pos] [ literal[int] ]
keyword[elif] identifier[pos] . identifier[startswith] ( literal[string] ) keyword[or] identifier[pos] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[pos] [: literal[int] ]
keyword[else] :
keyword[return] keyword[None] | def simplified_pos(pos, tagset=None):
"""
Return a simplified POS tag for a full POS tag `pos` belonging to a tagset `tagset`. By default the WordNet
tagset is assumed.
Does the following conversion by default:
- all N... (noun) tags to 'N'
- all V... (verb) tags to 'V'
- all ADJ... (adjective) tags to 'ADJ'
- all ADV... (adverb) tags to 'ADV'
- all other to None
Does the following conversion by with `tagset=='penn'`:
- all N... (noun) tags to 'N'
- all V... (verb) tags to 'V'
- all JJ... (adjective) tags to 'ADJ'
- all RB... (adverb) tags to 'ADV'
- all other to None
"""
if tagset == 'penn':
if pos.startswith('N') or pos.startswith('V'):
return pos[0] # depends on [control=['if'], data=[]]
elif pos.startswith('JJ'):
return 'ADJ' # depends on [control=['if'], data=[]]
elif pos.startswith('RB'):
return 'ADV' # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['if'], data=[]] # default: WordNet, STTS or unknown
elif pos.startswith('N') or pos.startswith('V'):
return pos[0] # depends on [control=['if'], data=[]]
elif pos.startswith('ADJ') or pos.startswith('ADV'):
return pos[:3] # depends on [control=['if'], data=[]]
else:
return None |
def update_playlist(self, access_token, playlist_id, title,
tags=None, category=None, description=None):
"""doc: http://open.youku.com/docs/doc?id=73
"""
url = 'https://openapi.youku.com/v2/playlists/update.json'
data = {
'client_id': self.client_id,
'access_token': access_token,
'playlist_id': playlist_id,
'title': title,
'tags': tags,
'category': category,
'description': description
}
data = remove_none_value(data)
r = requests.post(url, data=data)
check_error(r)
return r.json()['id'] | def function[update_playlist, parameter[self, access_token, playlist_id, title, tags, category, description]]:
constant[doc: http://open.youku.com/docs/doc?id=73
]
variable[url] assign[=] constant[https://openapi.youku.com/v2/playlists/update.json]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2604f10>, <ast.Constant object at 0x7da1b2607f40>, <ast.Constant object at 0x7da1b26068f0>, <ast.Constant object at 0x7da1b2605e40>, <ast.Constant object at 0x7da1b2607e50>, <ast.Constant object at 0x7da1b26047c0>, <ast.Constant object at 0x7da1b2604670>], [<ast.Attribute object at 0x7da1b2606740>, <ast.Name object at 0x7da1b2604820>, <ast.Name object at 0x7da1b2605780>, <ast.Name object at 0x7da1b26043d0>, <ast.Name object at 0x7da1b26064a0>, <ast.Name object at 0x7da1b2606320>, <ast.Name object at 0x7da1b2607ee0>]]
variable[data] assign[=] call[name[remove_none_value], parameter[name[data]]]
variable[r] assign[=] call[name[requests].post, parameter[name[url]]]
call[name[check_error], parameter[name[r]]]
return[call[call[name[r].json, parameter[]]][constant[id]]] | keyword[def] identifier[update_playlist] ( identifier[self] , identifier[access_token] , identifier[playlist_id] , identifier[title] ,
identifier[tags] = keyword[None] , identifier[category] = keyword[None] , identifier[description] = keyword[None] ):
literal[string]
identifier[url] = literal[string]
identifier[data] ={
literal[string] : identifier[self] . identifier[client_id] ,
literal[string] : identifier[access_token] ,
literal[string] : identifier[playlist_id] ,
literal[string] : identifier[title] ,
literal[string] : identifier[tags] ,
literal[string] : identifier[category] ,
literal[string] : identifier[description]
}
identifier[data] = identifier[remove_none_value] ( identifier[data] )
identifier[r] = identifier[requests] . identifier[post] ( identifier[url] , identifier[data] = identifier[data] )
identifier[check_error] ( identifier[r] )
keyword[return] identifier[r] . identifier[json] ()[ literal[string] ] | def update_playlist(self, access_token, playlist_id, title, tags=None, category=None, description=None):
"""doc: http://open.youku.com/docs/doc?id=73
"""
url = 'https://openapi.youku.com/v2/playlists/update.json'
data = {'client_id': self.client_id, 'access_token': access_token, 'playlist_id': playlist_id, 'title': title, 'tags': tags, 'category': category, 'description': description}
data = remove_none_value(data)
r = requests.post(url, data=data)
check_error(r)
return r.json()['id'] |
def connected(self, msg):
"""Once I've connected I want to subscribe to my the message queue.
"""
stomper.Engine.connected(self, msg)
self.log.info("Connected: session %s. Beginning say hello." % msg['headers']['session'])
def setup_looping_call():
lc = LoopingCall(self.send)
lc.start(2)
reactor.callLater(1, setup_looping_call)
f = stomper.Frame()
f.unpack(stomper.subscribe(DESTINATION))
# ActiveMQ specific headers:
#
# prevent the messages we send comming back to us.
f.headers['activemq.noLocal'] = 'true'
return f.pack() | def function[connected, parameter[self, msg]]:
constant[Once I've connected I want to subscribe to my the message queue.
]
call[name[stomper].Engine.connected, parameter[name[self], name[msg]]]
call[name[self].log.info, parameter[binary_operation[constant[Connected: session %s. Beginning say hello.] <ast.Mod object at 0x7da2590d6920> call[call[name[msg]][constant[headers]]][constant[session]]]]]
def function[setup_looping_call, parameter[]]:
variable[lc] assign[=] call[name[LoopingCall], parameter[name[self].send]]
call[name[lc].start, parameter[constant[2]]]
call[name[reactor].callLater, parameter[constant[1], name[setup_looping_call]]]
variable[f] assign[=] call[name[stomper].Frame, parameter[]]
call[name[f].unpack, parameter[call[name[stomper].subscribe, parameter[name[DESTINATION]]]]]
call[name[f].headers][constant[activemq.noLocal]] assign[=] constant[true]
return[call[name[f].pack, parameter[]]] | keyword[def] identifier[connected] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[stomper] . identifier[Engine] . identifier[connected] ( identifier[self] , identifier[msg] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] % identifier[msg] [ literal[string] ][ literal[string] ])
keyword[def] identifier[setup_looping_call] ():
identifier[lc] = identifier[LoopingCall] ( identifier[self] . identifier[send] )
identifier[lc] . identifier[start] ( literal[int] )
identifier[reactor] . identifier[callLater] ( literal[int] , identifier[setup_looping_call] )
identifier[f] = identifier[stomper] . identifier[Frame] ()
identifier[f] . identifier[unpack] ( identifier[stomper] . identifier[subscribe] ( identifier[DESTINATION] ))
identifier[f] . identifier[headers] [ literal[string] ]= literal[string]
keyword[return] identifier[f] . identifier[pack] () | def connected(self, msg):
"""Once I've connected I want to subscribe to my the message queue.
"""
stomper.Engine.connected(self, msg)
self.log.info('Connected: session %s. Beginning say hello.' % msg['headers']['session'])
def setup_looping_call():
lc = LoopingCall(self.send)
lc.start(2)
reactor.callLater(1, setup_looping_call)
f = stomper.Frame()
f.unpack(stomper.subscribe(DESTINATION))
# ActiveMQ specific headers:
#
# prevent the messages we send comming back to us.
f.headers['activemq.noLocal'] = 'true'
return f.pack() |
def is_list(node):
"""Does the node represent a list literal?"""
return (isinstance(node, Node)
and len(node.children) > 1
and isinstance(node.children[0], Leaf)
and isinstance(node.children[-1], Leaf)
and node.children[0].value == u"["
and node.children[-1].value == u"]") | def function[is_list, parameter[node]]:
constant[Does the node represent a list literal?]
return[<ast.BoolOp object at 0x7da18dc07580>] | keyword[def] identifier[is_list] ( identifier[node] ):
literal[string]
keyword[return] ( identifier[isinstance] ( identifier[node] , identifier[Node] )
keyword[and] identifier[len] ( identifier[node] . identifier[children] )> literal[int]
keyword[and] identifier[isinstance] ( identifier[node] . identifier[children] [ literal[int] ], identifier[Leaf] )
keyword[and] identifier[isinstance] ( identifier[node] . identifier[children] [- literal[int] ], identifier[Leaf] )
keyword[and] identifier[node] . identifier[children] [ literal[int] ]. identifier[value] == literal[string]
keyword[and] identifier[node] . identifier[children] [- literal[int] ]. identifier[value] == literal[string] ) | def is_list(node):
"""Does the node represent a list literal?"""
return isinstance(node, Node) and len(node.children) > 1 and isinstance(node.children[0], Leaf) and isinstance(node.children[-1], Leaf) and (node.children[0].value == u'[') and (node.children[-1].value == u']') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.