code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _convert_xml_to_service_stats(response):
'''
<?xml version="1.0" encoding="utf-8"?>
<StorageServiceStats>
<GeoReplication>
<Status>live|bootstrap|unavailable</Status>
<LastSyncTime>sync-time|<empty></LastSyncTime>
</GeoReplication>
</StorageServiceStats>
'''
if response is None or response.body is None:
return None
service_stats_element = ETree.fromstring(response.body)
geo_replication_element = service_stats_element.find('GeoReplication')
geo_replication = GeoReplication()
geo_replication.status = geo_replication_element.find('Status').text
geo_replication.last_sync_time = parser.parse(geo_replication_element.find('LastSyncTime').text)
service_stats = ServiceStats()
service_stats.geo_replication = geo_replication
return service_stats | def function[_convert_xml_to_service_stats, parameter[response]]:
constant[
<?xml version="1.0" encoding="utf-8"?>
<StorageServiceStats>
<GeoReplication>
<Status>live|bootstrap|unavailable</Status>
<LastSyncTime>sync-time|<empty></LastSyncTime>
</GeoReplication>
</StorageServiceStats>
]
if <ast.BoolOp object at 0x7da18f58c1f0> begin[:]
return[constant[None]]
variable[service_stats_element] assign[=] call[name[ETree].fromstring, parameter[name[response].body]]
variable[geo_replication_element] assign[=] call[name[service_stats_element].find, parameter[constant[GeoReplication]]]
variable[geo_replication] assign[=] call[name[GeoReplication], parameter[]]
name[geo_replication].status assign[=] call[name[geo_replication_element].find, parameter[constant[Status]]].text
name[geo_replication].last_sync_time assign[=] call[name[parser].parse, parameter[call[name[geo_replication_element].find, parameter[constant[LastSyncTime]]].text]]
variable[service_stats] assign[=] call[name[ServiceStats], parameter[]]
name[service_stats].geo_replication assign[=] name[geo_replication]
return[name[service_stats]] | keyword[def] identifier[_convert_xml_to_service_stats] ( identifier[response] ):
literal[string]
keyword[if] identifier[response] keyword[is] keyword[None] keyword[or] identifier[response] . identifier[body] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[service_stats_element] = identifier[ETree] . identifier[fromstring] ( identifier[response] . identifier[body] )
identifier[geo_replication_element] = identifier[service_stats_element] . identifier[find] ( literal[string] )
identifier[geo_replication] = identifier[GeoReplication] ()
identifier[geo_replication] . identifier[status] = identifier[geo_replication_element] . identifier[find] ( literal[string] ). identifier[text]
identifier[geo_replication] . identifier[last_sync_time] = identifier[parser] . identifier[parse] ( identifier[geo_replication_element] . identifier[find] ( literal[string] ). identifier[text] )
identifier[service_stats] = identifier[ServiceStats] ()
identifier[service_stats] . identifier[geo_replication] = identifier[geo_replication]
keyword[return] identifier[service_stats] | def _convert_xml_to_service_stats(response):
"""
<?xml version="1.0" encoding="utf-8"?>
<StorageServiceStats>
<GeoReplication>
<Status>live|bootstrap|unavailable</Status>
<LastSyncTime>sync-time|<empty></LastSyncTime>
</GeoReplication>
</StorageServiceStats>
"""
if response is None or response.body is None:
return None # depends on [control=['if'], data=[]]
service_stats_element = ETree.fromstring(response.body)
geo_replication_element = service_stats_element.find('GeoReplication')
geo_replication = GeoReplication()
geo_replication.status = geo_replication_element.find('Status').text
geo_replication.last_sync_time = parser.parse(geo_replication_element.find('LastSyncTime').text)
service_stats = ServiceStats()
service_stats.geo_replication = geo_replication
return service_stats |
def _sync_labels(self, labels_json):
""""Populate the user's labels from a JSON encoded list."""
for label_json in labels_json:
label_id = label_json['id']
self.labels[label_id] = Label(label_json, self) | def function[_sync_labels, parameter[self, labels_json]]:
constant["Populate the user's labels from a JSON encoded list.]
for taget[name[label_json]] in starred[name[labels_json]] begin[:]
variable[label_id] assign[=] call[name[label_json]][constant[id]]
call[name[self].labels][name[label_id]] assign[=] call[name[Label], parameter[name[label_json], name[self]]] | keyword[def] identifier[_sync_labels] ( identifier[self] , identifier[labels_json] ):
literal[string]
keyword[for] identifier[label_json] keyword[in] identifier[labels_json] :
identifier[label_id] = identifier[label_json] [ literal[string] ]
identifier[self] . identifier[labels] [ identifier[label_id] ]= identifier[Label] ( identifier[label_json] , identifier[self] ) | def _sync_labels(self, labels_json):
""""Populate the user's labels from a JSON encoded list."""
for label_json in labels_json:
label_id = label_json['id']
self.labels[label_id] = Label(label_json, self) # depends on [control=['for'], data=['label_json']] |
def plot_vs_years(tt, step = None, ax=None, confidence=None, ticks=True, **kwargs):
'''
Converts branch length to years and plots the time tree on a time axis.
Parameters
----------
tt : TreeTime object
A TreeTime instance after a time tree is inferred
step : int
Width of shaded boxes indicating blocks of years. Will be inferred if not specified.
To switch off drawing of boxes, set to 0
ax : matplotlib axes
Axes to be used to plot, will create new axis if None
confidence : tuple, float
Draw confidence intervals. This assumes that marginal time tree inference was run.
Confidence intervals are either specified as an interval of the posterior distribution
like (0.05, 0.95) or as the weight of the maximal posterior region , e.g. 0.9
**kwargs : dict
Key word arguments that are passed down to Phylo.draw
'''
import matplotlib.pyplot as plt
tt.branch_length_to_years()
nleafs = tt.tree.count_terminals()
if ax is None:
fig = plt.figure(figsize=(12,10))
ax = plt.subplot(111)
else:
fig = None
# draw tree
if "label_func" not in kwargs:
kwargs["label_func"] = lambda x:x.name if (x.is_terminal() and nleafs<30) else ""
Phylo.draw(tt.tree, axes=ax, **kwargs)
offset = tt.tree.root.numdate - tt.tree.root.branch_length
date_range = np.max([n.numdate for n in tt.tree.get_terminals()])-offset
# estimate year intervals if not explicitly specified
if step is None or (step>0 and date_range/step>100):
step = 10**np.floor(np.log10(date_range))
if date_range/step<2:
step/=5
elif date_range/step<5:
step/=2
step = max(1.0/12,step)
# set axis labels
if step:
dtick = step
min_tick = step*(offset//step)
extra = dtick if dtick<date_range else dtick
tick_vals = np.arange(min_tick, min_tick+date_range+extra, dtick)
xticks = tick_vals - offset
else:
xticks = ax.get_xticks()
dtick = xticks[1]-xticks[0]
shift = offset - dtick*(offset//dtick)
xticks -= shift
tick_vals = [x+offset-shift for x in xticks]
ax.set_xticks(xticks)
ax.set_xticklabels(map(str, tick_vals))
ax.set_xlabel('year')
ax.set_ylabel('')
ax.set_xlim((0,date_range))
# put shaded boxes to delineate years
if step:
ylim = ax.get_ylim()
xlim = ax.get_xlim()
from matplotlib.patches import Rectangle
for yi,year in enumerate(np.arange(np.floor(tick_vals[0]), tick_vals[-1]+.01, step)):
pos = year - offset
r = Rectangle((pos, ylim[1]-5),
step, ylim[0]-ylim[1]+10,
facecolor=[0.7+0.1*(1+yi%2)] * 3,
edgecolor=[1,1,1])
ax.add_patch(r)
if year in tick_vals and pos>=xlim[0] and pos<=xlim[1] and ticks:
label_str = str(step*(year//step)) if step<1 else str(int(year))
ax.text(pos,ylim[0]-0.04*(ylim[1]-ylim[0]), label_str,
horizontalalignment='center')
ax.set_axis_off()
# add confidence intervals to the tree graph -- grey bars
if confidence:
tree_layout(tt.tree)
if not hasattr(tt.tree.root, "marginal_inverse_cdf"):
print("marginal time tree reconstruction required for confidence intervals")
return ttconf.ERROR
elif type(confidence) is float:
cfunc = tt.get_max_posterior_region
elif len(confidence)==2:
cfunc = tt.get_confidence_interval
else:
print("confidence needs to be either a float (for max posterior region) or a two numbers specifying lower and upper bounds")
return ttconf.ERROR
for n in tt.tree.find_clades():
pos = cfunc(n, confidence)
ax.plot(pos-offset, np.ones(len(pos))*n.ypos, lw=3, c=(0.5,0.5,0.5))
return fig, ax | def function[plot_vs_years, parameter[tt, step, ax, confidence, ticks]]:
constant[
Converts branch length to years and plots the time tree on a time axis.
Parameters
----------
tt : TreeTime object
A TreeTime instance after a time tree is inferred
step : int
Width of shaded boxes indicating blocks of years. Will be inferred if not specified.
To switch off drawing of boxes, set to 0
ax : matplotlib axes
Axes to be used to plot, will create new axis if None
confidence : tuple, float
Draw confidence intervals. This assumes that marginal time tree inference was run.
Confidence intervals are either specified as an interval of the posterior distribution
like (0.05, 0.95) or as the weight of the maximal posterior region , e.g. 0.9
**kwargs : dict
Key word arguments that are passed down to Phylo.draw
]
import module[matplotlib.pyplot] as alias[plt]
call[name[tt].branch_length_to_years, parameter[]]
variable[nleafs] assign[=] call[name[tt].tree.count_terminals, parameter[]]
if compare[name[ax] is constant[None]] begin[:]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[plt].subplot, parameter[constant[111]]]
if compare[constant[label_func] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[label_func]] assign[=] <ast.Lambda object at 0x7da1b0144c10>
call[name[Phylo].draw, parameter[name[tt].tree]]
variable[offset] assign[=] binary_operation[name[tt].tree.root.numdate - name[tt].tree.root.branch_length]
variable[date_range] assign[=] binary_operation[call[name[np].max, parameter[<ast.ListComp object at 0x7da1b02dd090>]] - name[offset]]
if <ast.BoolOp object at 0x7da1b02dfdc0> begin[:]
variable[step] assign[=] binary_operation[constant[10] ** call[name[np].floor, parameter[call[name[np].log10, parameter[name[date_range]]]]]]
if compare[binary_operation[name[date_range] / name[step]] less[<] constant[2]] begin[:]
<ast.AugAssign object at 0x7da1b0286aa0>
variable[step] assign[=] call[name[max], parameter[binary_operation[constant[1.0] / constant[12]], name[step]]]
if name[step] begin[:]
variable[dtick] assign[=] name[step]
variable[min_tick] assign[=] binary_operation[name[step] * binary_operation[name[offset] <ast.FloorDiv object at 0x7da2590d6bc0> name[step]]]
variable[extra] assign[=] <ast.IfExp object at 0x7da1b0145c90>
variable[tick_vals] assign[=] call[name[np].arange, parameter[name[min_tick], binary_operation[binary_operation[name[min_tick] + name[date_range]] + name[extra]], name[dtick]]]
variable[xticks] assign[=] binary_operation[name[tick_vals] - name[offset]]
call[name[ax].set_xticks, parameter[name[xticks]]]
call[name[ax].set_xticklabels, parameter[call[name[map], parameter[name[str], name[tick_vals]]]]]
call[name[ax].set_xlabel, parameter[constant[year]]]
call[name[ax].set_ylabel, parameter[constant[]]]
call[name[ax].set_xlim, parameter[tuple[[<ast.Constant object at 0x7da1b0146530>, <ast.Name object at 0x7da1b0144df0>]]]]
if name[step] begin[:]
variable[ylim] assign[=] call[name[ax].get_ylim, parameter[]]
variable[xlim] assign[=] call[name[ax].get_xlim, parameter[]]
from relative_module[matplotlib.patches] import module[Rectangle]
for taget[tuple[[<ast.Name object at 0x7da1b0145ff0>, <ast.Name object at 0x7da1b0146c20>]]] in starred[call[name[enumerate], parameter[call[name[np].arange, parameter[call[name[np].floor, parameter[call[name[tick_vals]][constant[0]]]], binary_operation[call[name[tick_vals]][<ast.UnaryOp object at 0x7da1b0146e00>] + constant[0.01]], name[step]]]]]] begin[:]
variable[pos] assign[=] binary_operation[name[year] - name[offset]]
variable[r] assign[=] call[name[Rectangle], parameter[tuple[[<ast.Name object at 0x7da1b01459c0>, <ast.BinOp object at 0x7da1b01447f0>]], name[step], binary_operation[binary_operation[call[name[ylim]][constant[0]] - call[name[ylim]][constant[1]]] + constant[10]]]]
call[name[ax].add_patch, parameter[name[r]]]
if <ast.BoolOp object at 0x7da1b0144520> begin[:]
variable[label_str] assign[=] <ast.IfExp object at 0x7da1b0144340>
call[name[ax].text, parameter[name[pos], binary_operation[call[name[ylim]][constant[0]] - binary_operation[constant[0.04] * binary_operation[call[name[ylim]][constant[1]] - call[name[ylim]][constant[0]]]]], name[label_str]]]
call[name[ax].set_axis_off, parameter[]]
if name[confidence] begin[:]
call[name[tree_layout], parameter[name[tt].tree]]
if <ast.UnaryOp object at 0x7da1b023cb20> begin[:]
call[name[print], parameter[constant[marginal time tree reconstruction required for confidence intervals]]]
return[name[ttconf].ERROR]
for taget[name[n]] in starred[call[name[tt].tree.find_clades, parameter[]]] begin[:]
variable[pos] assign[=] call[name[cfunc], parameter[name[n], name[confidence]]]
call[name[ax].plot, parameter[binary_operation[name[pos] - name[offset]], binary_operation[call[name[np].ones, parameter[call[name[len], parameter[name[pos]]]]] * name[n].ypos]]]
return[tuple[[<ast.Name object at 0x7da1b021b8b0>, <ast.Name object at 0x7da1b0218580>]]] | keyword[def] identifier[plot_vs_years] ( identifier[tt] , identifier[step] = keyword[None] , identifier[ax] = keyword[None] , identifier[confidence] = keyword[None] , identifier[ticks] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
identifier[tt] . identifier[branch_length_to_years] ()
identifier[nleafs] = identifier[tt] . identifier[tree] . identifier[count_terminals] ()
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ))
identifier[ax] = identifier[plt] . identifier[subplot] ( literal[int] )
keyword[else] :
identifier[fig] = keyword[None]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= keyword[lambda] identifier[x] : identifier[x] . identifier[name] keyword[if] ( identifier[x] . identifier[is_terminal] () keyword[and] identifier[nleafs] < literal[int] ) keyword[else] literal[string]
identifier[Phylo] . identifier[draw] ( identifier[tt] . identifier[tree] , identifier[axes] = identifier[ax] ,** identifier[kwargs] )
identifier[offset] = identifier[tt] . identifier[tree] . identifier[root] . identifier[numdate] - identifier[tt] . identifier[tree] . identifier[root] . identifier[branch_length]
identifier[date_range] = identifier[np] . identifier[max] ([ identifier[n] . identifier[numdate] keyword[for] identifier[n] keyword[in] identifier[tt] . identifier[tree] . identifier[get_terminals] ()])- identifier[offset]
keyword[if] identifier[step] keyword[is] keyword[None] keyword[or] ( identifier[step] > literal[int] keyword[and] identifier[date_range] / identifier[step] > literal[int] ):
identifier[step] = literal[int] ** identifier[np] . identifier[floor] ( identifier[np] . identifier[log10] ( identifier[date_range] ))
keyword[if] identifier[date_range] / identifier[step] < literal[int] :
identifier[step] /= literal[int]
keyword[elif] identifier[date_range] / identifier[step] < literal[int] :
identifier[step] /= literal[int]
identifier[step] = identifier[max] ( literal[int] / literal[int] , identifier[step] )
keyword[if] identifier[step] :
identifier[dtick] = identifier[step]
identifier[min_tick] = identifier[step] *( identifier[offset] // identifier[step] )
identifier[extra] = identifier[dtick] keyword[if] identifier[dtick] < identifier[date_range] keyword[else] identifier[dtick]
identifier[tick_vals] = identifier[np] . identifier[arange] ( identifier[min_tick] , identifier[min_tick] + identifier[date_range] + identifier[extra] , identifier[dtick] )
identifier[xticks] = identifier[tick_vals] - identifier[offset]
keyword[else] :
identifier[xticks] = identifier[ax] . identifier[get_xticks] ()
identifier[dtick] = identifier[xticks] [ literal[int] ]- identifier[xticks] [ literal[int] ]
identifier[shift] = identifier[offset] - identifier[dtick] *( identifier[offset] // identifier[dtick] )
identifier[xticks] -= identifier[shift]
identifier[tick_vals] =[ identifier[x] + identifier[offset] - identifier[shift] keyword[for] identifier[x] keyword[in] identifier[xticks] ]
identifier[ax] . identifier[set_xticks] ( identifier[xticks] )
identifier[ax] . identifier[set_xticklabels] ( identifier[map] ( identifier[str] , identifier[tick_vals] ))
identifier[ax] . identifier[set_xlabel] ( literal[string] )
identifier[ax] . identifier[set_ylabel] ( literal[string] )
identifier[ax] . identifier[set_xlim] (( literal[int] , identifier[date_range] ))
keyword[if] identifier[step] :
identifier[ylim] = identifier[ax] . identifier[get_ylim] ()
identifier[xlim] = identifier[ax] . identifier[get_xlim] ()
keyword[from] identifier[matplotlib] . identifier[patches] keyword[import] identifier[Rectangle]
keyword[for] identifier[yi] , identifier[year] keyword[in] identifier[enumerate] ( identifier[np] . identifier[arange] ( identifier[np] . identifier[floor] ( identifier[tick_vals] [ literal[int] ]), identifier[tick_vals] [- literal[int] ]+ literal[int] , identifier[step] )):
identifier[pos] = identifier[year] - identifier[offset]
identifier[r] = identifier[Rectangle] (( identifier[pos] , identifier[ylim] [ literal[int] ]- literal[int] ),
identifier[step] , identifier[ylim] [ literal[int] ]- identifier[ylim] [ literal[int] ]+ literal[int] ,
identifier[facecolor] =[ literal[int] + literal[int] *( literal[int] + identifier[yi] % literal[int] )]* literal[int] ,
identifier[edgecolor] =[ literal[int] , literal[int] , literal[int] ])
identifier[ax] . identifier[add_patch] ( identifier[r] )
keyword[if] identifier[year] keyword[in] identifier[tick_vals] keyword[and] identifier[pos] >= identifier[xlim] [ literal[int] ] keyword[and] identifier[pos] <= identifier[xlim] [ literal[int] ] keyword[and] identifier[ticks] :
identifier[label_str] = identifier[str] ( identifier[step] *( identifier[year] // identifier[step] )) keyword[if] identifier[step] < literal[int] keyword[else] identifier[str] ( identifier[int] ( identifier[year] ))
identifier[ax] . identifier[text] ( identifier[pos] , identifier[ylim] [ literal[int] ]- literal[int] *( identifier[ylim] [ literal[int] ]- identifier[ylim] [ literal[int] ]), identifier[label_str] ,
identifier[horizontalalignment] = literal[string] )
identifier[ax] . identifier[set_axis_off] ()
keyword[if] identifier[confidence] :
identifier[tree_layout] ( identifier[tt] . identifier[tree] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[tt] . identifier[tree] . identifier[root] , literal[string] ):
identifier[print] ( literal[string] )
keyword[return] identifier[ttconf] . identifier[ERROR]
keyword[elif] identifier[type] ( identifier[confidence] ) keyword[is] identifier[float] :
identifier[cfunc] = identifier[tt] . identifier[get_max_posterior_region]
keyword[elif] identifier[len] ( identifier[confidence] )== literal[int] :
identifier[cfunc] = identifier[tt] . identifier[get_confidence_interval]
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] identifier[ttconf] . identifier[ERROR]
keyword[for] identifier[n] keyword[in] identifier[tt] . identifier[tree] . identifier[find_clades] ():
identifier[pos] = identifier[cfunc] ( identifier[n] , identifier[confidence] )
identifier[ax] . identifier[plot] ( identifier[pos] - identifier[offset] , identifier[np] . identifier[ones] ( identifier[len] ( identifier[pos] ))* identifier[n] . identifier[ypos] , identifier[lw] = literal[int] , identifier[c] =( literal[int] , literal[int] , literal[int] ))
keyword[return] identifier[fig] , identifier[ax] | def plot_vs_years(tt, step=None, ax=None, confidence=None, ticks=True, **kwargs):
"""
Converts branch length to years and plots the time tree on a time axis.
Parameters
----------
tt : TreeTime object
A TreeTime instance after a time tree is inferred
step : int
Width of shaded boxes indicating blocks of years. Will be inferred if not specified.
To switch off drawing of boxes, set to 0
ax : matplotlib axes
Axes to be used to plot, will create new axis if None
confidence : tuple, float
Draw confidence intervals. This assumes that marginal time tree inference was run.
Confidence intervals are either specified as an interval of the posterior distribution
like (0.05, 0.95) or as the weight of the maximal posterior region , e.g. 0.9
**kwargs : dict
Key word arguments that are passed down to Phylo.draw
"""
import matplotlib.pyplot as plt
tt.branch_length_to_years()
nleafs = tt.tree.count_terminals()
if ax is None:
fig = plt.figure(figsize=(12, 10))
ax = plt.subplot(111) # depends on [control=['if'], data=['ax']]
else:
fig = None
# draw tree
if 'label_func' not in kwargs:
kwargs['label_func'] = lambda x: x.name if x.is_terminal() and nleafs < 30 else '' # depends on [control=['if'], data=['kwargs']]
Phylo.draw(tt.tree, axes=ax, **kwargs)
offset = tt.tree.root.numdate - tt.tree.root.branch_length
date_range = np.max([n.numdate for n in tt.tree.get_terminals()]) - offset
# estimate year intervals if not explicitly specified
if step is None or (step > 0 and date_range / step > 100):
step = 10 ** np.floor(np.log10(date_range))
if date_range / step < 2:
step /= 5 # depends on [control=['if'], data=[]]
elif date_range / step < 5:
step /= 2 # depends on [control=['if'], data=[]]
step = max(1.0 / 12, step) # depends on [control=['if'], data=[]]
# set axis labels
if step:
dtick = step
min_tick = step * (offset // step)
extra = dtick if dtick < date_range else dtick
tick_vals = np.arange(min_tick, min_tick + date_range + extra, dtick)
xticks = tick_vals - offset # depends on [control=['if'], data=[]]
else:
xticks = ax.get_xticks()
dtick = xticks[1] - xticks[0]
shift = offset - dtick * (offset // dtick)
xticks -= shift
tick_vals = [x + offset - shift for x in xticks]
ax.set_xticks(xticks)
ax.set_xticklabels(map(str, tick_vals))
ax.set_xlabel('year')
ax.set_ylabel('')
ax.set_xlim((0, date_range))
# put shaded boxes to delineate years
if step:
ylim = ax.get_ylim()
xlim = ax.get_xlim()
from matplotlib.patches import Rectangle
for (yi, year) in enumerate(np.arange(np.floor(tick_vals[0]), tick_vals[-1] + 0.01, step)):
pos = year - offset
r = Rectangle((pos, ylim[1] - 5), step, ylim[0] - ylim[1] + 10, facecolor=[0.7 + 0.1 * (1 + yi % 2)] * 3, edgecolor=[1, 1, 1])
ax.add_patch(r)
if year in tick_vals and pos >= xlim[0] and (pos <= xlim[1]) and ticks:
label_str = str(step * (year // step)) if step < 1 else str(int(year))
ax.text(pos, ylim[0] - 0.04 * (ylim[1] - ylim[0]), label_str, horizontalalignment='center') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
ax.set_axis_off() # depends on [control=['if'], data=[]]
# add confidence intervals to the tree graph -- grey bars
if confidence:
tree_layout(tt.tree)
if not hasattr(tt.tree.root, 'marginal_inverse_cdf'):
print('marginal time tree reconstruction required for confidence intervals')
return ttconf.ERROR # depends on [control=['if'], data=[]]
elif type(confidence) is float:
cfunc = tt.get_max_posterior_region # depends on [control=['if'], data=[]]
elif len(confidence) == 2:
cfunc = tt.get_confidence_interval # depends on [control=['if'], data=[]]
else:
print('confidence needs to be either a float (for max posterior region) or a two numbers specifying lower and upper bounds')
return ttconf.ERROR
for n in tt.tree.find_clades():
pos = cfunc(n, confidence)
ax.plot(pos - offset, np.ones(len(pos)) * n.ypos, lw=3, c=(0.5, 0.5, 0.5)) # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=[]]
return (fig, ax) |
def build_image(self, conf, pushing=False):
"""Build this image"""
with conf.make_context() as context:
try:
stream = BuildProgressStream(conf.harpoon.silent_build)
with self.remove_replaced_images(conf) as info:
cached = NormalBuilder().build(conf, context, stream)
info['cached'] = cached
except (KeyboardInterrupt, Exception) as error:
exc_info = sys.exc_info()
if stream.current_container:
Runner().stage_build_intervention(conf, stream.current_container)
if isinstance(error, KeyboardInterrupt):
raise UserQuit()
else:
six.reraise(*exc_info)
finally:
if stream and stream.intermediate_images and conf.cleanup_intermediate_images:
for image in stream.intermediate_images:
log.info("Deleting intermediate image\timage=%s", image)
try:
conf.harpoon.docker_api.remove_image(image)
except Exception as error:
log.error("Failed to remove intermediate image\timage=%s\terror=%s", image, error)
return cached | def function[build_image, parameter[self, conf, pushing]]:
constant[Build this image]
with call[name[conf].make_context, parameter[]] begin[:]
<ast.Try object at 0x7da20c7cb5e0>
return[name[cached]] | keyword[def] identifier[build_image] ( identifier[self] , identifier[conf] , identifier[pushing] = keyword[False] ):
literal[string]
keyword[with] identifier[conf] . identifier[make_context] () keyword[as] identifier[context] :
keyword[try] :
identifier[stream] = identifier[BuildProgressStream] ( identifier[conf] . identifier[harpoon] . identifier[silent_build] )
keyword[with] identifier[self] . identifier[remove_replaced_images] ( identifier[conf] ) keyword[as] identifier[info] :
identifier[cached] = identifier[NormalBuilder] (). identifier[build] ( identifier[conf] , identifier[context] , identifier[stream] )
identifier[info] [ literal[string] ]= identifier[cached]
keyword[except] ( identifier[KeyboardInterrupt] , identifier[Exception] ) keyword[as] identifier[error] :
identifier[exc_info] = identifier[sys] . identifier[exc_info] ()
keyword[if] identifier[stream] . identifier[current_container] :
identifier[Runner] (). identifier[stage_build_intervention] ( identifier[conf] , identifier[stream] . identifier[current_container] )
keyword[if] identifier[isinstance] ( identifier[error] , identifier[KeyboardInterrupt] ):
keyword[raise] identifier[UserQuit] ()
keyword[else] :
identifier[six] . identifier[reraise] (* identifier[exc_info] )
keyword[finally] :
keyword[if] identifier[stream] keyword[and] identifier[stream] . identifier[intermediate_images] keyword[and] identifier[conf] . identifier[cleanup_intermediate_images] :
keyword[for] identifier[image] keyword[in] identifier[stream] . identifier[intermediate_images] :
identifier[log] . identifier[info] ( literal[string] , identifier[image] )
keyword[try] :
identifier[conf] . identifier[harpoon] . identifier[docker_api] . identifier[remove_image] ( identifier[image] )
keyword[except] identifier[Exception] keyword[as] identifier[error] :
identifier[log] . identifier[error] ( literal[string] , identifier[image] , identifier[error] )
keyword[return] identifier[cached] | def build_image(self, conf, pushing=False):
"""Build this image"""
with conf.make_context() as context:
try:
stream = BuildProgressStream(conf.harpoon.silent_build)
with self.remove_replaced_images(conf) as info:
cached = NormalBuilder().build(conf, context, stream)
info['cached'] = cached # depends on [control=['with'], data=['info']] # depends on [control=['try'], data=[]]
except (KeyboardInterrupt, Exception) as error:
exc_info = sys.exc_info()
if stream.current_container:
Runner().stage_build_intervention(conf, stream.current_container) # depends on [control=['if'], data=[]]
if isinstance(error, KeyboardInterrupt):
raise UserQuit() # depends on [control=['if'], data=[]]
else:
six.reraise(*exc_info) # depends on [control=['except'], data=['error']]
finally:
if stream and stream.intermediate_images and conf.cleanup_intermediate_images:
for image in stream.intermediate_images:
log.info('Deleting intermediate image\timage=%s', image)
try:
conf.harpoon.docker_api.remove_image(image) # depends on [control=['try'], data=[]]
except Exception as error:
log.error('Failed to remove intermediate image\timage=%s\terror=%s', image, error) # depends on [control=['except'], data=['error']] # depends on [control=['for'], data=['image']] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['context']]
return cached |
def checkUpdate(self, *args):
"""
Updates values after first checking instrument parameters are OK.
This is not integrated within update to prevent ifinite recursion
since update gets called from ipars.
"""
g = get_root(self).globals
if not self.check():
g.clog.warn('Current observing parameters are not valid.')
return False
if not g.ipars.check():
g.clog.warn('Current instrument parameters are not valid.')
return False | def function[checkUpdate, parameter[self]]:
constant[
Updates values after first checking instrument parameters are OK.
This is not integrated within update to prevent ifinite recursion
since update gets called from ipars.
]
variable[g] assign[=] call[name[get_root], parameter[name[self]]].globals
if <ast.UnaryOp object at 0x7da207f99c90> begin[:]
call[name[g].clog.warn, parameter[constant[Current observing parameters are not valid.]]]
return[constant[False]]
if <ast.UnaryOp object at 0x7da207f9ad70> begin[:]
call[name[g].clog.warn, parameter[constant[Current instrument parameters are not valid.]]]
return[constant[False]] | keyword[def] identifier[checkUpdate] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[g] = identifier[get_root] ( identifier[self] ). identifier[globals]
keyword[if] keyword[not] identifier[self] . identifier[check] ():
identifier[g] . identifier[clog] . identifier[warn] ( literal[string] )
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[g] . identifier[ipars] . identifier[check] ():
identifier[g] . identifier[clog] . identifier[warn] ( literal[string] )
keyword[return] keyword[False] | def checkUpdate(self, *args):
"""
Updates values after first checking instrument parameters are OK.
This is not integrated within update to prevent ifinite recursion
since update gets called from ipars.
"""
g = get_root(self).globals
if not self.check():
g.clog.warn('Current observing parameters are not valid.')
return False # depends on [control=['if'], data=[]]
if not g.ipars.check():
g.clog.warn('Current instrument parameters are not valid.')
return False # depends on [control=['if'], data=[]] |
def create_objective(self, objective_form):
"""Creates a new ``Objective``.
arg: objective_form (osid.learning.ObjectiveForm): the form
for this ``Objective``
return: (osid.learning.Objective) - the new ``Objective``
raise: IllegalState - ``objective_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``objective_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``objective_form`` did not originate from
``get_objective_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.create_resource_template
collection = JSONClientValidated('learning',
collection='Objective',
runtime=self._runtime)
if not isinstance(objective_form, ABCObjectiveForm):
raise errors.InvalidArgument('argument type is not an ObjectiveForm')
if objective_form.is_for_update():
raise errors.InvalidArgument('the ObjectiveForm is for update only, not create')
try:
if self._forms[objective_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('objective_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('objective_form did not originate from this session')
if not objective_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
insert_result = collection.insert_one(objective_form._my_map)
self._forms[objective_form.get_id().get_identifier()] = CREATED
result = objects.Objective(
osid_object_map=collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime,
proxy=self._proxy)
return result | def function[create_objective, parameter[self, objective_form]]:
constant[Creates a new ``Objective``.
arg: objective_form (osid.learning.ObjectiveForm): the form
for this ``Objective``
return: (osid.learning.Objective) - the new ``Objective``
raise: IllegalState - ``objective_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``objective_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``objective_form`` did not originate from
``get_objective_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[learning]]]
if <ast.UnaryOp object at 0x7da1b26ac9d0> begin[:]
<ast.Raise object at 0x7da1b26ae740>
if call[name[objective_form].is_for_update, parameter[]] begin[:]
<ast.Raise object at 0x7da1b26afaf0>
<ast.Try object at 0x7da1b26ae770>
if <ast.UnaryOp object at 0x7da1b26ad660> begin[:]
<ast.Raise object at 0x7da1b26ae500>
variable[insert_result] assign[=] call[name[collection].insert_one, parameter[name[objective_form]._my_map]]
call[name[self]._forms][call[call[name[objective_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] name[CREATED]
variable[result] assign[=] call[name[objects].Objective, parameter[]]
return[name[result]] | keyword[def] identifier[create_objective] ( identifier[self] , identifier[objective_form] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[objective_form] , identifier[ABCObjectiveForm] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[objective_form] . identifier[is_for_update] ():
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[try] :
keyword[if] identifier[self] . identifier[_forms] [ identifier[objective_form] . identifier[get_id] (). identifier[get_identifier] ()]== identifier[CREATED] :
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[errors] . identifier[Unsupported] ( literal[string] )
keyword[if] keyword[not] identifier[objective_form] . identifier[is_valid] ():
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
identifier[insert_result] = identifier[collection] . identifier[insert_one] ( identifier[objective_form] . identifier[_my_map] )
identifier[self] . identifier[_forms] [ identifier[objective_form] . identifier[get_id] (). identifier[get_identifier] ()]= identifier[CREATED]
identifier[result] = identifier[objects] . identifier[Objective] (
identifier[osid_object_map] = identifier[collection] . identifier[find_one] ({ literal[string] : identifier[insert_result] . identifier[inserted_id] }),
identifier[runtime] = identifier[self] . identifier[_runtime] ,
identifier[proxy] = identifier[self] . identifier[_proxy] )
keyword[return] identifier[result] | def create_objective(self, objective_form):
"""Creates a new ``Objective``.
arg: objective_form (osid.learning.ObjectiveForm): the form
for this ``Objective``
return: (osid.learning.Objective) - the new ``Objective``
raise: IllegalState - ``objective_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``objective_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``objective_form`` did not originate from
``get_objective_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.create_resource_template
collection = JSONClientValidated('learning', collection='Objective', runtime=self._runtime)
if not isinstance(objective_form, ABCObjectiveForm):
raise errors.InvalidArgument('argument type is not an ObjectiveForm') # depends on [control=['if'], data=[]]
if objective_form.is_for_update():
raise errors.InvalidArgument('the ObjectiveForm is for update only, not create') # depends on [control=['if'], data=[]]
try:
if self._forms[objective_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('objective_form already used in a create transaction') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
raise errors.Unsupported('objective_form did not originate from this session') # depends on [control=['except'], data=[]]
if not objective_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid') # depends on [control=['if'], data=[]]
insert_result = collection.insert_one(objective_form._my_map)
self._forms[objective_form.get_id().get_identifier()] = CREATED
result = objects.Objective(osid_object_map=collection.find_one({'_id': insert_result.inserted_id}), runtime=self._runtime, proxy=self._proxy)
return result |
def language_file_exists(language_code):
"""
Check if TinyMCE has a language file for the specified lang code
:param language_code: language code
:type language_code: str
:return: check result
:rtype: bool
"""
filename = '{0}.js'.format(language_code)
path = os.path.join('tinymce', 'js', 'tinymce', 'langs', filename)
return finders.find(path) is not None | def function[language_file_exists, parameter[language_code]]:
constant[
Check if TinyMCE has a language file for the specified lang code
:param language_code: language code
:type language_code: str
:return: check result
:rtype: bool
]
variable[filename] assign[=] call[constant[{0}.js].format, parameter[name[language_code]]]
variable[path] assign[=] call[name[os].path.join, parameter[constant[tinymce], constant[js], constant[tinymce], constant[langs], name[filename]]]
return[compare[call[name[finders].find, parameter[name[path]]] is_not constant[None]]] | keyword[def] identifier[language_file_exists] ( identifier[language_code] ):
literal[string]
identifier[filename] = literal[string] . identifier[format] ( identifier[language_code] )
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] , literal[string] , identifier[filename] )
keyword[return] identifier[finders] . identifier[find] ( identifier[path] ) keyword[is] keyword[not] keyword[None] | def language_file_exists(language_code):
"""
Check if TinyMCE has a language file for the specified lang code
:param language_code: language code
:type language_code: str
:return: check result
:rtype: bool
"""
filename = '{0}.js'.format(language_code)
path = os.path.join('tinymce', 'js', 'tinymce', 'langs', filename)
return finders.find(path) is not None |
def runRmFile(self, path, timeout=None, **kwargs):
""" remove a file from the worker """
cmd_args = {'path': path, 'logEnviron': self.logEnviron}
if timeout:
cmd_args['timeout'] = timeout
if self.workerVersionIsOlderThan('rmfile', '3.1'):
cmd_args['dir'] = os.path.abspath(path)
return self.runRemoteCommand('rmdir', cmd_args, **kwargs)
return self.runRemoteCommand('rmfile', cmd_args, **kwargs) | def function[runRmFile, parameter[self, path, timeout]]:
constant[ remove a file from the worker ]
variable[cmd_args] assign[=] dictionary[[<ast.Constant object at 0x7da1b2098d90>, <ast.Constant object at 0x7da1b2098d00>], [<ast.Name object at 0x7da1b20982e0>, <ast.Attribute object at 0x7da1b2098c40>]]
if name[timeout] begin[:]
call[name[cmd_args]][constant[timeout]] assign[=] name[timeout]
if call[name[self].workerVersionIsOlderThan, parameter[constant[rmfile], constant[3.1]]] begin[:]
call[name[cmd_args]][constant[dir]] assign[=] call[name[os].path.abspath, parameter[name[path]]]
return[call[name[self].runRemoteCommand, parameter[constant[rmdir], name[cmd_args]]]]
return[call[name[self].runRemoteCommand, parameter[constant[rmfile], name[cmd_args]]]] | keyword[def] identifier[runRmFile] ( identifier[self] , identifier[path] , identifier[timeout] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[cmd_args] ={ literal[string] : identifier[path] , literal[string] : identifier[self] . identifier[logEnviron] }
keyword[if] identifier[timeout] :
identifier[cmd_args] [ literal[string] ]= identifier[timeout]
keyword[if] identifier[self] . identifier[workerVersionIsOlderThan] ( literal[string] , literal[string] ):
identifier[cmd_args] [ literal[string] ]= identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] )
keyword[return] identifier[self] . identifier[runRemoteCommand] ( literal[string] , identifier[cmd_args] ,** identifier[kwargs] )
keyword[return] identifier[self] . identifier[runRemoteCommand] ( literal[string] , identifier[cmd_args] ,** identifier[kwargs] ) | def runRmFile(self, path, timeout=None, **kwargs):
""" remove a file from the worker """
cmd_args = {'path': path, 'logEnviron': self.logEnviron}
if timeout:
cmd_args['timeout'] = timeout # depends on [control=['if'], data=[]]
if self.workerVersionIsOlderThan('rmfile', '3.1'):
cmd_args['dir'] = os.path.abspath(path)
return self.runRemoteCommand('rmdir', cmd_args, **kwargs) # depends on [control=['if'], data=[]]
return self.runRemoteCommand('rmfile', cmd_args, **kwargs) |
def tryReduceAnd(sig, val):
"""
Return sig and val reduced by & operator or None
if it is not possible to statically reduce expression
"""
m = sig._dtype.all_mask()
if val._isFullVld():
v = val.val
if v == m:
return sig
elif v == 0:
return val | def function[tryReduceAnd, parameter[sig, val]]:
constant[
Return sig and val reduced by & operator or None
if it is not possible to statically reduce expression
]
variable[m] assign[=] call[name[sig]._dtype.all_mask, parameter[]]
if call[name[val]._isFullVld, parameter[]] begin[:]
variable[v] assign[=] name[val].val
if compare[name[v] equal[==] name[m]] begin[:]
return[name[sig]] | keyword[def] identifier[tryReduceAnd] ( identifier[sig] , identifier[val] ):
literal[string]
identifier[m] = identifier[sig] . identifier[_dtype] . identifier[all_mask] ()
keyword[if] identifier[val] . identifier[_isFullVld] ():
identifier[v] = identifier[val] . identifier[val]
keyword[if] identifier[v] == identifier[m] :
keyword[return] identifier[sig]
keyword[elif] identifier[v] == literal[int] :
keyword[return] identifier[val] | def tryReduceAnd(sig, val):
"""
Return sig and val reduced by & operator or None
if it is not possible to statically reduce expression
"""
m = sig._dtype.all_mask()
if val._isFullVld():
v = val.val
if v == m:
return sig # depends on [control=['if'], data=[]]
elif v == 0:
return val # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def match_plot(plotdata, outfile):
"""Plot list of motifs with database match and p-value
"param plotdata: list of (motif, dbmotif, pval)
"""
fig_h = 2
fig_w = 7
nrows = len(plotdata)
ncols = 2
fig = plt.figure(figsize=(fig_w, nrows * fig_h))
for i, (motif, dbmotif, pval) in enumerate(plotdata):
text = "Motif: %s\nBest match: %s\np-value: %0.2e" % (motif.id, dbmotif.id, pval)
grid = ImageGrid(fig, (nrows, ncols, i * 2 + 1),
nrows_ncols = (2,1),
axes_pad=0,
)
for j in range(2):
axes_off(grid[j])
tmp = NamedTemporaryFile(dir=mytmpdir(), suffix=".png")
motif.to_img(tmp.name, fmt="PNG", height=6)
grid[0].imshow(plt.imread(tmp.name), interpolation="none")
tmp = NamedTemporaryFile(dir=mytmpdir(), suffix=".png")
dbmotif.to_img(tmp.name, fmt="PNG")
grid[1].imshow(plt.imread(tmp.name), interpolation="none")
ax = plt.subplot(nrows, ncols, i * 2 + 2)
axes_off(ax)
ax.text(0, 0.5, text,
horizontalalignment='left',
verticalalignment='center')
plt.savefig(outfile, dpi=300, bbox_inches='tight')
plt.close(fig) | def function[match_plot, parameter[plotdata, outfile]]:
constant[Plot list of motifs with database match and p-value
"param plotdata: list of (motif, dbmotif, pval)
]
variable[fig_h] assign[=] constant[2]
variable[fig_w] assign[=] constant[7]
variable[nrows] assign[=] call[name[len], parameter[name[plotdata]]]
variable[ncols] assign[=] constant[2]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e5720>, <ast.Tuple object at 0x7da20c6e4af0>]]] in starred[call[name[enumerate], parameter[name[plotdata]]]] begin[:]
variable[text] assign[=] binary_operation[constant[Motif: %s
Best match: %s
p-value: %0.2e] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18dc9a920>, <ast.Attribute object at 0x7da18dc9a560>, <ast.Name object at 0x7da18dc98f40>]]]
variable[grid] assign[=] call[name[ImageGrid], parameter[name[fig], tuple[[<ast.Name object at 0x7da18dc9ae60>, <ast.Name object at 0x7da18dc9b2e0>, <ast.BinOp object at 0x7da18dc9b8e0>]]]]
for taget[name[j]] in starred[call[name[range], parameter[constant[2]]]] begin[:]
call[name[axes_off], parameter[call[name[grid]][name[j]]]]
variable[tmp] assign[=] call[name[NamedTemporaryFile], parameter[]]
call[name[motif].to_img, parameter[name[tmp].name]]
call[call[name[grid]][constant[0]].imshow, parameter[call[name[plt].imread, parameter[name[tmp].name]]]]
variable[tmp] assign[=] call[name[NamedTemporaryFile], parameter[]]
call[name[dbmotif].to_img, parameter[name[tmp].name]]
call[call[name[grid]][constant[1]].imshow, parameter[call[name[plt].imread, parameter[name[tmp].name]]]]
variable[ax] assign[=] call[name[plt].subplot, parameter[name[nrows], name[ncols], binary_operation[binary_operation[name[i] * constant[2]] + constant[2]]]]
call[name[axes_off], parameter[name[ax]]]
call[name[ax].text, parameter[constant[0], constant[0.5], name[text]]]
call[name[plt].savefig, parameter[name[outfile]]]
call[name[plt].close, parameter[name[fig]]] | keyword[def] identifier[match_plot] ( identifier[plotdata] , identifier[outfile] ):
literal[string]
identifier[fig_h] = literal[int]
identifier[fig_w] = literal[int]
identifier[nrows] = identifier[len] ( identifier[plotdata] )
identifier[ncols] = literal[int]
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( identifier[fig_w] , identifier[nrows] * identifier[fig_h] ))
keyword[for] identifier[i] ,( identifier[motif] , identifier[dbmotif] , identifier[pval] ) keyword[in] identifier[enumerate] ( identifier[plotdata] ):
identifier[text] = literal[string] %( identifier[motif] . identifier[id] , identifier[dbmotif] . identifier[id] , identifier[pval] )
identifier[grid] = identifier[ImageGrid] ( identifier[fig] ,( identifier[nrows] , identifier[ncols] , identifier[i] * literal[int] + literal[int] ),
identifier[nrows_ncols] =( literal[int] , literal[int] ),
identifier[axes_pad] = literal[int] ,
)
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ):
identifier[axes_off] ( identifier[grid] [ identifier[j] ])
identifier[tmp] = identifier[NamedTemporaryFile] ( identifier[dir] = identifier[mytmpdir] (), identifier[suffix] = literal[string] )
identifier[motif] . identifier[to_img] ( identifier[tmp] . identifier[name] , identifier[fmt] = literal[string] , identifier[height] = literal[int] )
identifier[grid] [ literal[int] ]. identifier[imshow] ( identifier[plt] . identifier[imread] ( identifier[tmp] . identifier[name] ), identifier[interpolation] = literal[string] )
identifier[tmp] = identifier[NamedTemporaryFile] ( identifier[dir] = identifier[mytmpdir] (), identifier[suffix] = literal[string] )
identifier[dbmotif] . identifier[to_img] ( identifier[tmp] . identifier[name] , identifier[fmt] = literal[string] )
identifier[grid] [ literal[int] ]. identifier[imshow] ( identifier[plt] . identifier[imread] ( identifier[tmp] . identifier[name] ), identifier[interpolation] = literal[string] )
identifier[ax] = identifier[plt] . identifier[subplot] ( identifier[nrows] , identifier[ncols] , identifier[i] * literal[int] + literal[int] )
identifier[axes_off] ( identifier[ax] )
identifier[ax] . identifier[text] ( literal[int] , literal[int] , identifier[text] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] )
identifier[plt] . identifier[savefig] ( identifier[outfile] , identifier[dpi] = literal[int] , identifier[bbox_inches] = literal[string] )
identifier[plt] . identifier[close] ( identifier[fig] ) | def match_plot(plotdata, outfile):
"""Plot list of motifs with database match and p-value
"param plotdata: list of (motif, dbmotif, pval)
"""
fig_h = 2
fig_w = 7
nrows = len(plotdata)
ncols = 2
fig = plt.figure(figsize=(fig_w, nrows * fig_h))
for (i, (motif, dbmotif, pval)) in enumerate(plotdata):
text = 'Motif: %s\nBest match: %s\np-value: %0.2e' % (motif.id, dbmotif.id, pval)
grid = ImageGrid(fig, (nrows, ncols, i * 2 + 1), nrows_ncols=(2, 1), axes_pad=0)
for j in range(2):
axes_off(grid[j]) # depends on [control=['for'], data=['j']]
tmp = NamedTemporaryFile(dir=mytmpdir(), suffix='.png')
motif.to_img(tmp.name, fmt='PNG', height=6)
grid[0].imshow(plt.imread(tmp.name), interpolation='none')
tmp = NamedTemporaryFile(dir=mytmpdir(), suffix='.png')
dbmotif.to_img(tmp.name, fmt='PNG')
grid[1].imshow(plt.imread(tmp.name), interpolation='none')
ax = plt.subplot(nrows, ncols, i * 2 + 2)
axes_off(ax)
ax.text(0, 0.5, text, horizontalalignment='left', verticalalignment='center') # depends on [control=['for'], data=[]]
plt.savefig(outfile, dpi=300, bbox_inches='tight')
plt.close(fig) |
def get_option(self, key):
"""Return the current value of the option `key` (string).
Instance method, only refers to current instance."""
return self._options.get(key, self._default_options[key]) | def function[get_option, parameter[self, key]]:
constant[Return the current value of the option `key` (string).
Instance method, only refers to current instance.]
return[call[name[self]._options.get, parameter[name[key], call[name[self]._default_options][name[key]]]]] | keyword[def] identifier[get_option] ( identifier[self] , identifier[key] ):
literal[string]
keyword[return] identifier[self] . identifier[_options] . identifier[get] ( identifier[key] , identifier[self] . identifier[_default_options] [ identifier[key] ]) | def get_option(self, key):
"""Return the current value of the option `key` (string).
Instance method, only refers to current instance."""
return self._options.get(key, self._default_options[key]) |
def _is_in_max_difference(value_1, value_2, max_difference):
''' Helper function to determine the difference of two values that can be np.uints. Works in python and numba mode.
Circumvents numba bug #1653
'''
if value_1 <= value_2:
return value_2 - value_1 <= max_difference
return value_1 - value_2 <= max_difference | def function[_is_in_max_difference, parameter[value_1, value_2, max_difference]]:
constant[ Helper function to determine the difference of two values that can be np.uints. Works in python and numba mode.
Circumvents numba bug #1653
]
if compare[name[value_1] less_or_equal[<=] name[value_2]] begin[:]
return[compare[binary_operation[name[value_2] - name[value_1]] less_or_equal[<=] name[max_difference]]]
return[compare[binary_operation[name[value_1] - name[value_2]] less_or_equal[<=] name[max_difference]]] | keyword[def] identifier[_is_in_max_difference] ( identifier[value_1] , identifier[value_2] , identifier[max_difference] ):
literal[string]
keyword[if] identifier[value_1] <= identifier[value_2] :
keyword[return] identifier[value_2] - identifier[value_1] <= identifier[max_difference]
keyword[return] identifier[value_1] - identifier[value_2] <= identifier[max_difference] | def _is_in_max_difference(value_1, value_2, max_difference):
""" Helper function to determine the difference of two values that can be np.uints. Works in python and numba mode.
Circumvents numba bug #1653
"""
if value_1 <= value_2:
return value_2 - value_1 <= max_difference # depends on [control=['if'], data=['value_1', 'value_2']]
return value_1 - value_2 <= max_difference |
def deleteSession(self, verbose=None):
"""
This deletes the current session and initializes a new one. A message is returned to indicate the success of the deletion.
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'session', method="DELETE", verbose=verbose)
return response | def function[deleteSession, parameter[self, verbose]]:
constant[
This deletes the current session and initializes a new one. A message is returned to indicate the success of the deletion.
:param verbose: print more
:returns: 200: successful operation
]
variable[response] assign[=] call[name[api], parameter[]]
return[name[response]] | keyword[def] identifier[deleteSession] ( identifier[self] , identifier[verbose] = keyword[None] ):
literal[string]
identifier[response] = identifier[api] ( identifier[url] = identifier[self] . identifier[___url] + literal[string] , identifier[method] = literal[string] , identifier[verbose] = identifier[verbose] )
keyword[return] identifier[response] | def deleteSession(self, verbose=None):
"""
This deletes the current session and initializes a new one. A message is returned to indicate the success of the deletion.
:param verbose: print more
:returns: 200: successful operation
"""
response = api(url=self.___url + 'session', method='DELETE', verbose=verbose)
return response |
def get_thumb(settings, filename):
"""Return the path to the thumb.
examples:
>>> default_settings = create_settings()
>>> get_thumb(default_settings, "bar/foo.jpg")
"bar/thumbnails/foo.jpg"
>>> get_thumb(default_settings, "bar/foo.png")
"bar/thumbnails/foo.png"
for videos, it returns a jpg file:
>>> get_thumb(default_settings, "bar/foo.webm")
"bar/thumbnails/foo.jpg"
"""
path, filen = os.path.split(filename)
name, ext = os.path.splitext(filen)
if ext.lower() in settings['video_extensions']:
ext = '.jpg'
return join(path, settings['thumb_dir'], settings['thumb_prefix'] +
name + settings['thumb_suffix'] + ext) | def function[get_thumb, parameter[settings, filename]]:
constant[Return the path to the thumb.
examples:
>>> default_settings = create_settings()
>>> get_thumb(default_settings, "bar/foo.jpg")
"bar/thumbnails/foo.jpg"
>>> get_thumb(default_settings, "bar/foo.png")
"bar/thumbnails/foo.png"
for videos, it returns a jpg file:
>>> get_thumb(default_settings, "bar/foo.webm")
"bar/thumbnails/foo.jpg"
]
<ast.Tuple object at 0x7da1b0109150> assign[=] call[name[os].path.split, parameter[name[filename]]]
<ast.Tuple object at 0x7da1b0109d80> assign[=] call[name[os].path.splitext, parameter[name[filen]]]
if compare[call[name[ext].lower, parameter[]] in call[name[settings]][constant[video_extensions]]] begin[:]
variable[ext] assign[=] constant[.jpg]
return[call[name[join], parameter[name[path], call[name[settings]][constant[thumb_dir]], binary_operation[binary_operation[binary_operation[call[name[settings]][constant[thumb_prefix]] + name[name]] + call[name[settings]][constant[thumb_suffix]]] + name[ext]]]]] | keyword[def] identifier[get_thumb] ( identifier[settings] , identifier[filename] ):
literal[string]
identifier[path] , identifier[filen] = identifier[os] . identifier[path] . identifier[split] ( identifier[filename] )
identifier[name] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filen] )
keyword[if] identifier[ext] . identifier[lower] () keyword[in] identifier[settings] [ literal[string] ]:
identifier[ext] = literal[string]
keyword[return] identifier[join] ( identifier[path] , identifier[settings] [ literal[string] ], identifier[settings] [ literal[string] ]+
identifier[name] + identifier[settings] [ literal[string] ]+ identifier[ext] ) | def get_thumb(settings, filename):
"""Return the path to the thumb.
examples:
>>> default_settings = create_settings()
>>> get_thumb(default_settings, "bar/foo.jpg")
"bar/thumbnails/foo.jpg"
>>> get_thumb(default_settings, "bar/foo.png")
"bar/thumbnails/foo.png"
for videos, it returns a jpg file:
>>> get_thumb(default_settings, "bar/foo.webm")
"bar/thumbnails/foo.jpg"
"""
(path, filen) = os.path.split(filename)
(name, ext) = os.path.splitext(filen)
if ext.lower() in settings['video_extensions']:
ext = '.jpg' # depends on [control=['if'], data=[]]
return join(path, settings['thumb_dir'], settings['thumb_prefix'] + name + settings['thumb_suffix'] + ext) |
def upload_from_filename(
self, filename, content_type=None, client=None, predefined_acl=None
):
"""Upload this blob's contents from the content of a named file.
The content type of the upload will be determined in order
of precedence:
- The value passed in to this method (if not :data:`None`)
- The value stored on the current blob
- The value given by ``mimetypes.guess_type``
- The default value ('application/octet-stream')
.. note::
The effect of uploading to an existing blob depends on the
"versioning" and "lifecycle" policies defined on the blob's
bucket. In the absence of those policies, upload will
overwrite any existing contents.
See the `object versioning
<https://cloud.google.com/storage/docs/object-versioning>`_ and
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
API documents for details.
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type filename: str
:param filename: The path to the file.
:type content_type: str
:param content_type: Optional type of content being uploaded.
:type client: :class:`~google.cloud.storage.client.Client`
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
"""
content_type = self._get_content_type(content_type, filename=filename)
with open(filename, "rb") as file_obj:
total_bytes = os.fstat(file_obj.fileno()).st_size
self.upload_from_file(
file_obj,
content_type=content_type,
client=client,
size=total_bytes,
predefined_acl=predefined_acl,
) | def function[upload_from_filename, parameter[self, filename, content_type, client, predefined_acl]]:
constant[Upload this blob's contents from the content of a named file.
The content type of the upload will be determined in order
of precedence:
- The value passed in to this method (if not :data:`None`)
- The value stored on the current blob
- The value given by ``mimetypes.guess_type``
- The default value ('application/octet-stream')
.. note::
The effect of uploading to an existing blob depends on the
"versioning" and "lifecycle" policies defined on the blob's
bucket. In the absence of those policies, upload will
overwrite any existing contents.
See the `object versioning
<https://cloud.google.com/storage/docs/object-versioning>`_ and
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
API documents for details.
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type filename: str
:param filename: The path to the file.
:type content_type: str
:param content_type: Optional type of content being uploaded.
:type client: :class:`~google.cloud.storage.client.Client`
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
]
variable[content_type] assign[=] call[name[self]._get_content_type, parameter[name[content_type]]]
with call[name[open], parameter[name[filename], constant[rb]]] begin[:]
variable[total_bytes] assign[=] call[name[os].fstat, parameter[call[name[file_obj].fileno, parameter[]]]].st_size
call[name[self].upload_from_file, parameter[name[file_obj]]] | keyword[def] identifier[upload_from_filename] (
identifier[self] , identifier[filename] , identifier[content_type] = keyword[None] , identifier[client] = keyword[None] , identifier[predefined_acl] = keyword[None]
):
literal[string]
identifier[content_type] = identifier[self] . identifier[_get_content_type] ( identifier[content_type] , identifier[filename] = identifier[filename] )
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[file_obj] :
identifier[total_bytes] = identifier[os] . identifier[fstat] ( identifier[file_obj] . identifier[fileno] ()). identifier[st_size]
identifier[self] . identifier[upload_from_file] (
identifier[file_obj] ,
identifier[content_type] = identifier[content_type] ,
identifier[client] = identifier[client] ,
identifier[size] = identifier[total_bytes] ,
identifier[predefined_acl] = identifier[predefined_acl] ,
) | def upload_from_filename(self, filename, content_type=None, client=None, predefined_acl=None):
"""Upload this blob's contents from the content of a named file.
The content type of the upload will be determined in order
of precedence:
- The value passed in to this method (if not :data:`None`)
- The value stored on the current blob
- The value given by ``mimetypes.guess_type``
- The default value ('application/octet-stream')
.. note::
The effect of uploading to an existing blob depends on the
"versioning" and "lifecycle" policies defined on the blob's
bucket. In the absence of those policies, upload will
overwrite any existing contents.
See the `object versioning
<https://cloud.google.com/storage/docs/object-versioning>`_ and
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
API documents for details.
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type filename: str
:param filename: The path to the file.
:type content_type: str
:param content_type: Optional type of content being uploaded.
:type client: :class:`~google.cloud.storage.client.Client`
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
"""
content_type = self._get_content_type(content_type, filename=filename)
with open(filename, 'rb') as file_obj:
total_bytes = os.fstat(file_obj.fileno()).st_size
self.upload_from_file(file_obj, content_type=content_type, client=client, size=total_bytes, predefined_acl=predefined_acl) # depends on [control=['with'], data=['file_obj']] |
def _get_bravais_lattice(spg_symbol, lattice_type, a, b, c, unique):
"""Get Bravais lattice symbol from symmetry data"""
if lattice_type == 'triclinic':
return('triclinic')
elif lattice_type == 'monoclinic':
if 'P' in spg_symbol:
if unique == 0:
return('mon_p_a')
elif unique == 1:
return('mon_p_b')
elif unique == 2:
return('mon_p_c')
elif 'C' in spg_symbol:
if unique == 0:
return('mon_c_a')
elif unique == 1:
return('mon_c_b')
elif unique == 2:
return('mon_c_c')
elif lattice_type == 'orthorhombic':
if 'P' in spg_symbol:
return('orth_p')
elif 'A' in spg_symbol or 'C' in spg_symbol:
if a > b:
return('orth_c_a')
elif b > a:
return('orth_c_b')
elif 'F' in spg_symbol:
if (1/a**2 < 1/b**2 + 1/c**2 and 1/b**2 < 1/c**2 + 1/a**2 and
1/c**2 < 1/a**2 + 1/b**2):
return('orth_f_1')
elif 1/c**2 > 1/a**2 + 1/b**2:
return('orth_f_2')
elif 1/b**2 > 1/a**2 + 1/c**2:
return('orth_f_3')
elif 1/a**2 > 1/c**2 + 1/b**2:
return('orth_f_4')
elif 'I' in spg_symbol:
if a > b and a > c:
return('orth_i_a')
elif b > a and b > c:
return('orth_i_b')
elif c > a and c > b:
return('orth_i_c')
elif lattice_type == 'tetragonal':
if 'P' in spg_symbol:
return('tet_p')
elif 'I' in spg_symbol:
if a > c:
return('tet_i_a')
else:
return('tet_i_c')
elif (lattice_type == 'trigonal' or lattice_type == 'hexagonal'
or lattice_type == 'rhombohedral'):
if 'R' in spg_symbol:
if a > np.sqrt(2) * c:
return('trig_r_a')
else:
return('trig_r_c')
elif 'P' in spg_symbol:
if unique == 0:
return('trig_p_a')
elif unique == 2:
return('trig_p_c')
elif lattice_type == "cubic":
if 'P' in spg_symbol:
return('cubic_p')
elif 'I' in spg_symbol:
return('cubic_i')
elif 'F' in spg_symbol:
return('cubic_f') | def function[_get_bravais_lattice, parameter[spg_symbol, lattice_type, a, b, c, unique]]:
constant[Get Bravais lattice symbol from symmetry data]
if compare[name[lattice_type] equal[==] constant[triclinic]] begin[:]
return[constant[triclinic]] | keyword[def] identifier[_get_bravais_lattice] ( identifier[spg_symbol] , identifier[lattice_type] , identifier[a] , identifier[b] , identifier[c] , identifier[unique] ):
literal[string]
keyword[if] identifier[lattice_type] == literal[string] :
keyword[return] ( literal[string] )
keyword[elif] identifier[lattice_type] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[lattice_type] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[spg_symbol] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] keyword[or] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[a] > identifier[b] :
keyword[return] ( literal[string] )
keyword[elif] identifier[b] > identifier[a] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] ( literal[int] / identifier[a] ** literal[int] < literal[int] / identifier[b] ** literal[int] + literal[int] / identifier[c] ** literal[int] keyword[and] literal[int] / identifier[b] ** literal[int] < literal[int] / identifier[c] ** literal[int] + literal[int] / identifier[a] ** literal[int] keyword[and]
literal[int] / identifier[c] ** literal[int] < literal[int] / identifier[a] ** literal[int] + literal[int] / identifier[b] ** literal[int] ):
keyword[return] ( literal[string] )
keyword[elif] literal[int] / identifier[c] ** literal[int] > literal[int] / identifier[a] ** literal[int] + literal[int] / identifier[b] ** literal[int] :
keyword[return] ( literal[string] )
keyword[elif] literal[int] / identifier[b] ** literal[int] > literal[int] / identifier[a] ** literal[int] + literal[int] / identifier[c] ** literal[int] :
keyword[return] ( literal[string] )
keyword[elif] literal[int] / identifier[a] ** literal[int] > literal[int] / identifier[c] ** literal[int] + literal[int] / identifier[b] ** literal[int] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[a] > identifier[b] keyword[and] identifier[a] > identifier[c] :
keyword[return] ( literal[string] )
keyword[elif] identifier[b] > identifier[a] keyword[and] identifier[b] > identifier[c] :
keyword[return] ( literal[string] )
keyword[elif] identifier[c] > identifier[a] keyword[and] identifier[c] > identifier[b] :
keyword[return] ( literal[string] )
keyword[elif] identifier[lattice_type] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[spg_symbol] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[a] > identifier[c] :
keyword[return] ( literal[string] )
keyword[else] :
keyword[return] ( literal[string] )
keyword[elif] ( identifier[lattice_type] == literal[string] keyword[or] identifier[lattice_type] == literal[string]
keyword[or] identifier[lattice_type] == literal[string] ):
keyword[if] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[a] > identifier[np] . identifier[sqrt] ( literal[int] )* identifier[c] :
keyword[return] ( literal[string] )
keyword[else] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[if] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[unique] == literal[int] :
keyword[return] ( literal[string] )
keyword[elif] identifier[lattice_type] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[spg_symbol] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[return] ( literal[string] )
keyword[elif] literal[string] keyword[in] identifier[spg_symbol] :
keyword[return] ( literal[string] ) | def _get_bravais_lattice(spg_symbol, lattice_type, a, b, c, unique):
"""Get Bravais lattice symbol from symmetry data"""
if lattice_type == 'triclinic':
return 'triclinic' # depends on [control=['if'], data=[]]
elif lattice_type == 'monoclinic':
if 'P' in spg_symbol:
if unique == 0:
return 'mon_p_a' # depends on [control=['if'], data=[]]
elif unique == 1:
return 'mon_p_b' # depends on [control=['if'], data=[]]
elif unique == 2:
return 'mon_p_c' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'C' in spg_symbol:
if unique == 0:
return 'mon_c_a' # depends on [control=['if'], data=[]]
elif unique == 1:
return 'mon_c_b' # depends on [control=['if'], data=[]]
elif unique == 2:
return 'mon_c_c' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif lattice_type == 'orthorhombic':
if 'P' in spg_symbol:
return 'orth_p' # depends on [control=['if'], data=[]]
elif 'A' in spg_symbol or 'C' in spg_symbol:
if a > b:
return 'orth_c_a' # depends on [control=['if'], data=[]]
elif b > a:
return 'orth_c_b' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'F' in spg_symbol:
if 1 / a ** 2 < 1 / b ** 2 + 1 / c ** 2 and 1 / b ** 2 < 1 / c ** 2 + 1 / a ** 2 and (1 / c ** 2 < 1 / a ** 2 + 1 / b ** 2):
return 'orth_f_1' # depends on [control=['if'], data=[]]
elif 1 / c ** 2 > 1 / a ** 2 + 1 / b ** 2:
return 'orth_f_2' # depends on [control=['if'], data=[]]
elif 1 / b ** 2 > 1 / a ** 2 + 1 / c ** 2:
return 'orth_f_3' # depends on [control=['if'], data=[]]
elif 1 / a ** 2 > 1 / c ** 2 + 1 / b ** 2:
return 'orth_f_4' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'I' in spg_symbol:
if a > b and a > c:
return 'orth_i_a' # depends on [control=['if'], data=[]]
elif b > a and b > c:
return 'orth_i_b' # depends on [control=['if'], data=[]]
elif c > a and c > b:
return 'orth_i_c' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif lattice_type == 'tetragonal':
if 'P' in spg_symbol:
return 'tet_p' # depends on [control=['if'], data=[]]
elif 'I' in spg_symbol:
if a > c:
return 'tet_i_a' # depends on [control=['if'], data=[]]
else:
return 'tet_i_c' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif lattice_type == 'trigonal' or lattice_type == 'hexagonal' or lattice_type == 'rhombohedral':
if 'R' in spg_symbol:
if a > np.sqrt(2) * c:
return 'trig_r_a' # depends on [control=['if'], data=[]]
else:
return 'trig_r_c' # depends on [control=['if'], data=[]]
elif 'P' in spg_symbol:
if unique == 0:
return 'trig_p_a' # depends on [control=['if'], data=[]]
elif unique == 2:
return 'trig_p_c' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif lattice_type == 'cubic':
if 'P' in spg_symbol:
return 'cubic_p' # depends on [control=['if'], data=[]]
elif 'I' in spg_symbol:
return 'cubic_i' # depends on [control=['if'], data=[]]
elif 'F' in spg_symbol:
return 'cubic_f' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def error(errors):
"""Create error element."""
e_tree, e_oaipmh = envelope()
for code, message in errors:
e_error = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, 'error'))
e_error.set('code', code)
e_error.text = message
return e_tree | def function[error, parameter[errors]]:
constant[Create error element.]
<ast.Tuple object at 0x7da207f99a50> assign[=] call[name[envelope], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18eb57370>, <ast.Name object at 0x7da18eb54130>]]] in starred[name[errors]] begin[:]
variable[e_error] assign[=] call[name[SubElement], parameter[name[e_oaipmh], call[name[etree].QName, parameter[name[NS_OAIPMH], constant[error]]]]]
call[name[e_error].set, parameter[constant[code], name[code]]]
name[e_error].text assign[=] name[message]
return[name[e_tree]] | keyword[def] identifier[error] ( identifier[errors] ):
literal[string]
identifier[e_tree] , identifier[e_oaipmh] = identifier[envelope] ()
keyword[for] identifier[code] , identifier[message] keyword[in] identifier[errors] :
identifier[e_error] = identifier[SubElement] ( identifier[e_oaipmh] , identifier[etree] . identifier[QName] ( identifier[NS_OAIPMH] , literal[string] ))
identifier[e_error] . identifier[set] ( literal[string] , identifier[code] )
identifier[e_error] . identifier[text] = identifier[message]
keyword[return] identifier[e_tree] | def error(errors):
"""Create error element."""
(e_tree, e_oaipmh) = envelope()
for (code, message) in errors:
e_error = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, 'error'))
e_error.set('code', code)
e_error.text = message # depends on [control=['for'], data=[]]
return e_tree |
def registerPlexObject(cls):
""" Registry of library types we may come across when parsing XML. This allows us to
define a few helper functions to dynamically convery the XML into objects. See
buildItem() below for an example.
"""
etype = getattr(cls, 'STREAMTYPE', cls.TYPE)
ehash = '%s.%s' % (cls.TAG, etype) if etype else cls.TAG
if ehash in PLEXOBJECTS:
raise Exception('Ambiguous PlexObject definition %s(tag=%s, type=%s) with %s' %
(cls.__name__, cls.TAG, etype, PLEXOBJECTS[ehash].__name__))
PLEXOBJECTS[ehash] = cls
return cls | def function[registerPlexObject, parameter[cls]]:
constant[ Registry of library types we may come across when parsing XML. This allows us to
define a few helper functions to dynamically convery the XML into objects. See
buildItem() below for an example.
]
variable[etype] assign[=] call[name[getattr], parameter[name[cls], constant[STREAMTYPE], name[cls].TYPE]]
variable[ehash] assign[=] <ast.IfExp object at 0x7da1b0695c90>
if compare[name[ehash] in name[PLEXOBJECTS]] begin[:]
<ast.Raise object at 0x7da1b0695a50>
call[name[PLEXOBJECTS]][name[ehash]] assign[=] name[cls]
return[name[cls]] | keyword[def] identifier[registerPlexObject] ( identifier[cls] ):
literal[string]
identifier[etype] = identifier[getattr] ( identifier[cls] , literal[string] , identifier[cls] . identifier[TYPE] )
identifier[ehash] = literal[string] %( identifier[cls] . identifier[TAG] , identifier[etype] ) keyword[if] identifier[etype] keyword[else] identifier[cls] . identifier[TAG]
keyword[if] identifier[ehash] keyword[in] identifier[PLEXOBJECTS] :
keyword[raise] identifier[Exception] ( literal[string] %
( identifier[cls] . identifier[__name__] , identifier[cls] . identifier[TAG] , identifier[etype] , identifier[PLEXOBJECTS] [ identifier[ehash] ]. identifier[__name__] ))
identifier[PLEXOBJECTS] [ identifier[ehash] ]= identifier[cls]
keyword[return] identifier[cls] | def registerPlexObject(cls):
""" Registry of library types we may come across when parsing XML. This allows us to
define a few helper functions to dynamically convery the XML into objects. See
buildItem() below for an example.
"""
etype = getattr(cls, 'STREAMTYPE', cls.TYPE)
ehash = '%s.%s' % (cls.TAG, etype) if etype else cls.TAG
if ehash in PLEXOBJECTS:
raise Exception('Ambiguous PlexObject definition %s(tag=%s, type=%s) with %s' % (cls.__name__, cls.TAG, etype, PLEXOBJECTS[ehash].__name__)) # depends on [control=['if'], data=['ehash', 'PLEXOBJECTS']]
PLEXOBJECTS[ehash] = cls
return cls |
def delete_gene(self, *gene_ids):
"""Delete one or more gene ids form the list."""
self.gene_ids = [gene_id for gene_id in self.gene_ids
if gene_id not in gene_ids] | def function[delete_gene, parameter[self]]:
constant[Delete one or more gene ids form the list.]
name[self].gene_ids assign[=] <ast.ListComp object at 0x7da20c6e5480> | keyword[def] identifier[delete_gene] ( identifier[self] ,* identifier[gene_ids] ):
literal[string]
identifier[self] . identifier[gene_ids] =[ identifier[gene_id] keyword[for] identifier[gene_id] keyword[in] identifier[self] . identifier[gene_ids]
keyword[if] identifier[gene_id] keyword[not] keyword[in] identifier[gene_ids] ] | def delete_gene(self, *gene_ids):
"""Delete one or more gene ids form the list."""
self.gene_ids = [gene_id for gene_id in self.gene_ids if gene_id not in gene_ids] |
def _get_century_code(year):
"""Returns the century code for a given year"""
if 2000 <= year < 3000:
separator = 'A'
elif 1900 <= year < 2000:
separator = '-'
elif 1800 <= year < 1900:
separator = '+'
else:
raise ValueError('Finnish SSN do not support people born before the year 1800 or after the year 2999')
return separator | def function[_get_century_code, parameter[year]]:
constant[Returns the century code for a given year]
if compare[constant[2000] less_or_equal[<=] name[year]] begin[:]
variable[separator] assign[=] constant[A]
return[name[separator]] | keyword[def] identifier[_get_century_code] ( identifier[year] ):
literal[string]
keyword[if] literal[int] <= identifier[year] < literal[int] :
identifier[separator] = literal[string]
keyword[elif] literal[int] <= identifier[year] < literal[int] :
identifier[separator] = literal[string]
keyword[elif] literal[int] <= identifier[year] < literal[int] :
identifier[separator] = literal[string]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[separator] | def _get_century_code(year):
"""Returns the century code for a given year"""
if 2000 <= year < 3000:
separator = 'A' # depends on [control=['if'], data=[]]
elif 1900 <= year < 2000:
separator = '-' # depends on [control=['if'], data=[]]
elif 1800 <= year < 1900:
separator = '+' # depends on [control=['if'], data=[]]
else:
raise ValueError('Finnish SSN do not support people born before the year 1800 or after the year 2999')
return separator |
def quirks_from_any_parent(
loc: Union[Labware, Well, str, ModuleGeometry, None]) -> List[str]:
""" Walk the tree of wells and labwares and extract quirks """
def recursive_get_quirks(obj, found):
if isinstance(obj, Labware):
return found + obj.quirks
elif isinstance(obj, Well):
return recursive_get_quirks(obj.parent, found)
else:
return found
return recursive_get_quirks(loc, []) | def function[quirks_from_any_parent, parameter[loc]]:
constant[ Walk the tree of wells and labwares and extract quirks ]
def function[recursive_get_quirks, parameter[obj, found]]:
if call[name[isinstance], parameter[name[obj], name[Labware]]] begin[:]
return[binary_operation[name[found] + name[obj].quirks]]
return[call[name[recursive_get_quirks], parameter[name[loc], list[[]]]]] | keyword[def] identifier[quirks_from_any_parent] (
identifier[loc] : identifier[Union] [ identifier[Labware] , identifier[Well] , identifier[str] , identifier[ModuleGeometry] , keyword[None] ])-> identifier[List] [ identifier[str] ]:
literal[string]
keyword[def] identifier[recursive_get_quirks] ( identifier[obj] , identifier[found] ):
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Labware] ):
keyword[return] identifier[found] + identifier[obj] . identifier[quirks]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Well] ):
keyword[return] identifier[recursive_get_quirks] ( identifier[obj] . identifier[parent] , identifier[found] )
keyword[else] :
keyword[return] identifier[found]
keyword[return] identifier[recursive_get_quirks] ( identifier[loc] ,[]) | def quirks_from_any_parent(loc: Union[Labware, Well, str, ModuleGeometry, None]) -> List[str]:
""" Walk the tree of wells and labwares and extract quirks """
def recursive_get_quirks(obj, found):
if isinstance(obj, Labware):
return found + obj.quirks # depends on [control=['if'], data=[]]
elif isinstance(obj, Well):
return recursive_get_quirks(obj.parent, found) # depends on [control=['if'], data=[]]
else:
return found
return recursive_get_quirks(loc, []) |
def load_font(prefix, ttf_filename, charmap_filename, directory=None):
"""
Loads a font file and the associated charmap.
If ``directory`` is None, the files will be looked for in ``./fonts/``.
Parameters
----------
prefix: str
Prefix string to be used when accessing a given font set
ttf_filename: str
Ttf font filename
charmap_filename: str
Character map filename
directory: str or None, optional
Directory for font and charmap files
Example
-------
The spyder ide uses qtawesome and uses a custom font for spyder-specific
icons::
qta.load_font('spyder', 'spyder.ttf', 'spyder-charmap.json')
"""
return _instance().load_font(prefix, ttf_filename, charmap_filename, directory) | def function[load_font, parameter[prefix, ttf_filename, charmap_filename, directory]]:
constant[
Loads a font file and the associated charmap.
If ``directory`` is None, the files will be looked for in ``./fonts/``.
Parameters
----------
prefix: str
Prefix string to be used when accessing a given font set
ttf_filename: str
Ttf font filename
charmap_filename: str
Character map filename
directory: str or None, optional
Directory for font and charmap files
Example
-------
The spyder ide uses qtawesome and uses a custom font for spyder-specific
icons::
qta.load_font('spyder', 'spyder.ttf', 'spyder-charmap.json')
]
return[call[call[name[_instance], parameter[]].load_font, parameter[name[prefix], name[ttf_filename], name[charmap_filename], name[directory]]]] | keyword[def] identifier[load_font] ( identifier[prefix] , identifier[ttf_filename] , identifier[charmap_filename] , identifier[directory] = keyword[None] ):
literal[string]
keyword[return] identifier[_instance] (). identifier[load_font] ( identifier[prefix] , identifier[ttf_filename] , identifier[charmap_filename] , identifier[directory] ) | def load_font(prefix, ttf_filename, charmap_filename, directory=None):
"""
Loads a font file and the associated charmap.
If ``directory`` is None, the files will be looked for in ``./fonts/``.
Parameters
----------
prefix: str
Prefix string to be used when accessing a given font set
ttf_filename: str
Ttf font filename
charmap_filename: str
Character map filename
directory: str or None, optional
Directory for font and charmap files
Example
-------
The spyder ide uses qtawesome and uses a custom font for spyder-specific
icons::
qta.load_font('spyder', 'spyder.ttf', 'spyder-charmap.json')
"""
return _instance().load_font(prefix, ttf_filename, charmap_filename, directory) |
def save_form_data(self, instance, data):
"""
The ``KeywordsWidget`` field will return data as a string of
comma separated IDs for the ``Keyword`` model - convert these
into actual ``AssignedKeyword`` instances. Also delete
``Keyword`` instances if their last related ``AssignedKeyword``
instance is being removed.
"""
from yacms.generic.models import Keyword
related_manager = getattr(instance, self.name)
# Get a list of Keyword IDs being removed.
old_ids = [str(a.keyword_id) for a in related_manager.all()]
new_ids = data.split(",")
removed_ids = set(old_ids) - set(new_ids)
# Remove current AssignedKeyword instances.
related_manager.all().delete()
# Convert the data into AssignedKeyword instances.
if data:
data = [related_manager.create(keyword_id=i) for i in new_ids]
# Remove keywords that are no longer assigned to anything.
Keyword.objects.delete_unused(removed_ids)
super(KeywordsField, self).save_form_data(instance, data) | def function[save_form_data, parameter[self, instance, data]]:
constant[
The ``KeywordsWidget`` field will return data as a string of
comma separated IDs for the ``Keyword`` model - convert these
into actual ``AssignedKeyword`` instances. Also delete
``Keyword`` instances if their last related ``AssignedKeyword``
instance is being removed.
]
from relative_module[yacms.generic.models] import module[Keyword]
variable[related_manager] assign[=] call[name[getattr], parameter[name[instance], name[self].name]]
variable[old_ids] assign[=] <ast.ListComp object at 0x7da1b1367970>
variable[new_ids] assign[=] call[name[data].split, parameter[constant[,]]]
variable[removed_ids] assign[=] binary_operation[call[name[set], parameter[name[old_ids]]] - call[name[set], parameter[name[new_ids]]]]
call[call[name[related_manager].all, parameter[]].delete, parameter[]]
if name[data] begin[:]
variable[data] assign[=] <ast.ListComp object at 0x7da1b1364af0>
call[name[Keyword].objects.delete_unused, parameter[name[removed_ids]]]
call[call[name[super], parameter[name[KeywordsField], name[self]]].save_form_data, parameter[name[instance], name[data]]] | keyword[def] identifier[save_form_data] ( identifier[self] , identifier[instance] , identifier[data] ):
literal[string]
keyword[from] identifier[yacms] . identifier[generic] . identifier[models] keyword[import] identifier[Keyword]
identifier[related_manager] = identifier[getattr] ( identifier[instance] , identifier[self] . identifier[name] )
identifier[old_ids] =[ identifier[str] ( identifier[a] . identifier[keyword_id] ) keyword[for] identifier[a] keyword[in] identifier[related_manager] . identifier[all] ()]
identifier[new_ids] = identifier[data] . identifier[split] ( literal[string] )
identifier[removed_ids] = identifier[set] ( identifier[old_ids] )- identifier[set] ( identifier[new_ids] )
identifier[related_manager] . identifier[all] (). identifier[delete] ()
keyword[if] identifier[data] :
identifier[data] =[ identifier[related_manager] . identifier[create] ( identifier[keyword_id] = identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[new_ids] ]
identifier[Keyword] . identifier[objects] . identifier[delete_unused] ( identifier[removed_ids] )
identifier[super] ( identifier[KeywordsField] , identifier[self] ). identifier[save_form_data] ( identifier[instance] , identifier[data] ) | def save_form_data(self, instance, data):
"""
The ``KeywordsWidget`` field will return data as a string of
comma separated IDs for the ``Keyword`` model - convert these
into actual ``AssignedKeyword`` instances. Also delete
``Keyword`` instances if their last related ``AssignedKeyword``
instance is being removed.
"""
from yacms.generic.models import Keyword
related_manager = getattr(instance, self.name)
# Get a list of Keyword IDs being removed.
old_ids = [str(a.keyword_id) for a in related_manager.all()]
new_ids = data.split(',')
removed_ids = set(old_ids) - set(new_ids)
# Remove current AssignedKeyword instances.
related_manager.all().delete()
# Convert the data into AssignedKeyword instances.
if data:
data = [related_manager.create(keyword_id=i) for i in new_ids] # depends on [control=['if'], data=[]]
# Remove keywords that are no longer assigned to anything.
Keyword.objects.delete_unused(removed_ids)
super(KeywordsField, self).save_form_data(instance, data) |
def pwArcsinh(inputArray,constant):
'''
Function returns an ndarray by performing a pointwise inverse hyperbolic sine transformation on the input ndarray
:param inputArray: ndarray of gene expression data in raw count or RPKM format
:param contstant: some constant to normalize for total read count number
:return transformed: ndarray of transformed inputArray
'''
#this function assumes rows are cells and columns are genes in the input array
countsum=np.sum(inputArray,axis=1) #calculate the total number of counts per cell
holder=np.zeros_like(inputArray,dtype=float)
transformed=np.zeros_like(inputArray,dtype=float) #initialize the output array
print ("Completion:")
for i in range(0,inputArray.shape[0]):
print (((i/inputArray.shape[0])*100),end='\r')#progress meter
holder[i,:]=(inputArray[i,:]/countsum[i])*constant #divide each genes counts by total number of counts in cell
transformed[i,:]=np.arcsinh((holder[i,:]/constant*1000)) #do arcsinh transform for each element of matrix
return transformed | def function[pwArcsinh, parameter[inputArray, constant]]:
constant[
Function returns an ndarray by performing a pointwise inverse hyperbolic sine transformation on the input ndarray
:param inputArray: ndarray of gene expression data in raw count or RPKM format
:param contstant: some constant to normalize for total read count number
:return transformed: ndarray of transformed inputArray
]
variable[countsum] assign[=] call[name[np].sum, parameter[name[inputArray]]]
variable[holder] assign[=] call[name[np].zeros_like, parameter[name[inputArray]]]
variable[transformed] assign[=] call[name[np].zeros_like, parameter[name[inputArray]]]
call[name[print], parameter[constant[Completion:]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[inputArray].shape][constant[0]]]]] begin[:]
call[name[print], parameter[binary_operation[binary_operation[name[i] / call[name[inputArray].shape][constant[0]]] * constant[100]]]]
call[name[holder]][tuple[[<ast.Name object at 0x7da2047e8eb0>, <ast.Slice object at 0x7da2047ea8c0>]]] assign[=] binary_operation[binary_operation[call[name[inputArray]][tuple[[<ast.Name object at 0x7da2047e9c60>, <ast.Slice object at 0x7da2047eb3d0>]]] / call[name[countsum]][name[i]]] * name[constant]]
call[name[transformed]][tuple[[<ast.Name object at 0x7da2047e8b80>, <ast.Slice object at 0x7da2047ea770>]]] assign[=] call[name[np].arcsinh, parameter[binary_operation[binary_operation[call[name[holder]][tuple[[<ast.Name object at 0x7da2047eaa70>, <ast.Slice object at 0x7da2044c1de0>]]] / name[constant]] * constant[1000]]]]
return[name[transformed]] | keyword[def] identifier[pwArcsinh] ( identifier[inputArray] , identifier[constant] ):
literal[string]
identifier[countsum] = identifier[np] . identifier[sum] ( identifier[inputArray] , identifier[axis] = literal[int] )
identifier[holder] = identifier[np] . identifier[zeros_like] ( identifier[inputArray] , identifier[dtype] = identifier[float] )
identifier[transformed] = identifier[np] . identifier[zeros_like] ( identifier[inputArray] , identifier[dtype] = identifier[float] )
identifier[print] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[inputArray] . identifier[shape] [ literal[int] ]):
identifier[print] ((( identifier[i] / identifier[inputArray] . identifier[shape] [ literal[int] ])* literal[int] ), identifier[end] = literal[string] )
identifier[holder] [ identifier[i] ,:]=( identifier[inputArray] [ identifier[i] ,:]/ identifier[countsum] [ identifier[i] ])* identifier[constant]
identifier[transformed] [ identifier[i] ,:]= identifier[np] . identifier[arcsinh] (( identifier[holder] [ identifier[i] ,:]/ identifier[constant] * literal[int] ))
keyword[return] identifier[transformed] | def pwArcsinh(inputArray, constant):
"""
Function returns an ndarray by performing a pointwise inverse hyperbolic sine transformation on the input ndarray
:param inputArray: ndarray of gene expression data in raw count or RPKM format
:param contstant: some constant to normalize for total read count number
:return transformed: ndarray of transformed inputArray
"""
#this function assumes rows are cells and columns are genes in the input array
countsum = np.sum(inputArray, axis=1) #calculate the total number of counts per cell
holder = np.zeros_like(inputArray, dtype=float)
transformed = np.zeros_like(inputArray, dtype=float) #initialize the output array
print('Completion:')
for i in range(0, inputArray.shape[0]):
print(i / inputArray.shape[0] * 100, end='\r') #progress meter
holder[i, :] = inputArray[i, :] / countsum[i] * constant #divide each genes counts by total number of counts in cell
transformed[i, :] = np.arcsinh(holder[i, :] / constant * 1000) #do arcsinh transform for each element of matrix # depends on [control=['for'], data=['i']]
return transformed |
def hashstr(data, hashlen=HASH_LEN, alphabet=ALPHABET):
"""
python -c "import utool as ut; print(ut.hashstr('abcd'))"
Args:
data (hashable):
hashlen (int): (default = 16)
alphabet (list): list of characters:
Returns:
str: hashstr
CommandLine:
python -m utool.util_hash --test-hashstr
python3 -m utool.util_hash --test-hashstr
python3 -m utool.util_hash --test-hashstr:2
python -m utool.util_hash hashstr:3
python3 -m utool.util_hash hashstr:3
Example0:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> data = 'foobar'
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = mi5yum60mbxhyp+x
Example1:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> data = ''
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = 0000000000000000
Example2:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> data = np.array([1, 2, 3])
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = z5lqw0bzt4dmb9yy
Example2:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> from uuid import UUID
>>> data = (UUID('7cd0197b-1394-9d16-b1eb-0d8d7a60aedc'), UUID('c76b54a5-adb6-7f16-f0fb-190ab99409f8'))
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr_arr(data, 'label')
>>> result = ('text = %s' % (str(text),))
>>> print(result)
Example3:
>>> # DISABLE_DOCTEST
>>> # UNSTABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> data = np.array(['a', 'b'], dtype=object)
>>> text = hashstr(data, alphabet=ALPHABET_27)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
Ignore:
data = np.array(['a', 'b'], dtype=object)
data.tobytes()
data = np.array(['a', 'b'])
data = ['a', 'b']
data = np.array([1, 2, 3])
import hashlib
from six.moves import cPickle as pickle
pickle.dumps(data, protocol=2)
python2 -c "import hashlib, numpy; print(hashlib.sha1('ab').hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1('ab'.encode('utf8')).hexdigest())"
python2 -c "import hashlib, numpy; print(hashlib.sha1('ab').hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(b'ab').hexdigest())"
python2 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([1, 2])).hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([1, 2])).hexdigest())"
# TODO: numpy arrays of strings must be encoded to bytes first in python3
python2 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'])).hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([b'a', b'b'])).hexdigest())"
python -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'], dtype=object)).hexdigest())"
python -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'], dtype=object)).hexdigest())"
"""
if util_type.HAVE_NUMPY and isinstance(data, np.ndarray):
if data.dtype.kind == 'O':
msg = '[ut] hashing ndarrays with dtype=object is unstable'
warnings.warn(msg, RuntimeWarning)
# but tobytes is ok, but differs between python 2 and 3 for objects
data = data.dumps()
# data = data.tobytes()
if isinstance(data, tuple):
# should instead do
if False:
hasher = hashlib.sha512()
items = data
for item in items:
if isinstance(item, uuid.UUID):
hasher.update(item.bytes)
else:
hasher.update(item)
text = hasher.hexdigest()
hashstr2 = convert_hexstr_to_bigbase(text, alphabet, bigbase=len(alphabet))
# Truncate
text = hashstr2[:hashlen]
return text
else:
msg = '[ut] hashing tuples with repr is not a good idea. FIXME'
# warnings.warn(msg, RuntimeWarning)
data = repr(data) # Hack?
# convert unicode into raw bytes
if isinstance(data, six.text_type):
data = data.encode('utf-8')
if isinstance(data, stringlike) and len(data) == 0:
# Make a special hash for empty data
text = (alphabet[0] * hashlen)
else:
# Get a 128 character hex string
text = hashlib.sha512(data).hexdigest()
# Shorten length of string (by increasing base)
hashstr2 = convert_hexstr_to_bigbase(text, alphabet, bigbase=len(alphabet))
# Truncate
text = hashstr2[:hashlen]
return text | def function[hashstr, parameter[data, hashlen, alphabet]]:
constant[
python -c "import utool as ut; print(ut.hashstr('abcd'))"
Args:
data (hashable):
hashlen (int): (default = 16)
alphabet (list): list of characters:
Returns:
str: hashstr
CommandLine:
python -m utool.util_hash --test-hashstr
python3 -m utool.util_hash --test-hashstr
python3 -m utool.util_hash --test-hashstr:2
python -m utool.util_hash hashstr:3
python3 -m utool.util_hash hashstr:3
Example0:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> data = 'foobar'
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = mi5yum60mbxhyp+x
Example1:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> data = ''
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = 0000000000000000
Example2:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> data = np.array([1, 2, 3])
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = z5lqw0bzt4dmb9yy
Example2:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> from uuid import UUID
>>> data = (UUID('7cd0197b-1394-9d16-b1eb-0d8d7a60aedc'), UUID('c76b54a5-adb6-7f16-f0fb-190ab99409f8'))
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr_arr(data, 'label')
>>> result = ('text = %s' % (str(text),))
>>> print(result)
Example3:
>>> # DISABLE_DOCTEST
>>> # UNSTABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> data = np.array(['a', 'b'], dtype=object)
>>> text = hashstr(data, alphabet=ALPHABET_27)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
Ignore:
data = np.array(['a', 'b'], dtype=object)
data.tobytes()
data = np.array(['a', 'b'])
data = ['a', 'b']
data = np.array([1, 2, 3])
import hashlib
from six.moves import cPickle as pickle
pickle.dumps(data, protocol=2)
python2 -c "import hashlib, numpy; print(hashlib.sha1('ab').hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1('ab'.encode('utf8')).hexdigest())"
python2 -c "import hashlib, numpy; print(hashlib.sha1('ab').hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(b'ab').hexdigest())"
python2 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([1, 2])).hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([1, 2])).hexdigest())"
# TODO: numpy arrays of strings must be encoded to bytes first in python3
python2 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'])).hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([b'a', b'b'])).hexdigest())"
python -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'], dtype=object)).hexdigest())"
python -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'], dtype=object)).hexdigest())"
]
if <ast.BoolOp object at 0x7da1b24ca3b0> begin[:]
if compare[name[data].dtype.kind equal[==] constant[O]] begin[:]
variable[msg] assign[=] constant[[ut] hashing ndarrays with dtype=object is unstable]
call[name[warnings].warn, parameter[name[msg], name[RuntimeWarning]]]
variable[data] assign[=] call[name[data].dumps, parameter[]]
if call[name[isinstance], parameter[name[data], name[tuple]]] begin[:]
if constant[False] begin[:]
variable[hasher] assign[=] call[name[hashlib].sha512, parameter[]]
variable[items] assign[=] name[data]
for taget[name[item]] in starred[name[items]] begin[:]
if call[name[isinstance], parameter[name[item], name[uuid].UUID]] begin[:]
call[name[hasher].update, parameter[name[item].bytes]]
variable[text] assign[=] call[name[hasher].hexdigest, parameter[]]
variable[hashstr2] assign[=] call[name[convert_hexstr_to_bigbase], parameter[name[text], name[alphabet]]]
variable[text] assign[=] call[name[hashstr2]][<ast.Slice object at 0x7da1b24c8ac0>]
return[name[text]]
if call[name[isinstance], parameter[name[data], name[six].text_type]] begin[:]
variable[data] assign[=] call[name[data].encode, parameter[constant[utf-8]]]
if <ast.BoolOp object at 0x7da1b24ca110> begin[:]
variable[text] assign[=] binary_operation[call[name[alphabet]][constant[0]] * name[hashlen]]
return[name[text]] | keyword[def] identifier[hashstr] ( identifier[data] , identifier[hashlen] = identifier[HASH_LEN] , identifier[alphabet] = identifier[ALPHABET] ):
literal[string]
keyword[if] identifier[util_type] . identifier[HAVE_NUMPY] keyword[and] identifier[isinstance] ( identifier[data] , identifier[np] . identifier[ndarray] ):
keyword[if] identifier[data] . identifier[dtype] . identifier[kind] == literal[string] :
identifier[msg] = literal[string]
identifier[warnings] . identifier[warn] ( identifier[msg] , identifier[RuntimeWarning] )
identifier[data] = identifier[data] . identifier[dumps] ()
keyword[if] identifier[isinstance] ( identifier[data] , identifier[tuple] ):
keyword[if] keyword[False] :
identifier[hasher] = identifier[hashlib] . identifier[sha512] ()
identifier[items] = identifier[data]
keyword[for] identifier[item] keyword[in] identifier[items] :
keyword[if] identifier[isinstance] ( identifier[item] , identifier[uuid] . identifier[UUID] ):
identifier[hasher] . identifier[update] ( identifier[item] . identifier[bytes] )
keyword[else] :
identifier[hasher] . identifier[update] ( identifier[item] )
identifier[text] = identifier[hasher] . identifier[hexdigest] ()
identifier[hashstr2] = identifier[convert_hexstr_to_bigbase] ( identifier[text] , identifier[alphabet] , identifier[bigbase] = identifier[len] ( identifier[alphabet] ))
identifier[text] = identifier[hashstr2] [: identifier[hashlen] ]
keyword[return] identifier[text]
keyword[else] :
identifier[msg] = literal[string]
identifier[data] = identifier[repr] ( identifier[data] )
keyword[if] identifier[isinstance] ( identifier[data] , identifier[six] . identifier[text_type] ):
identifier[data] = identifier[data] . identifier[encode] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[data] , identifier[stringlike] ) keyword[and] identifier[len] ( identifier[data] )== literal[int] :
identifier[text] =( identifier[alphabet] [ literal[int] ]* identifier[hashlen] )
keyword[else] :
identifier[text] = identifier[hashlib] . identifier[sha512] ( identifier[data] ). identifier[hexdigest] ()
identifier[hashstr2] = identifier[convert_hexstr_to_bigbase] ( identifier[text] , identifier[alphabet] , identifier[bigbase] = identifier[len] ( identifier[alphabet] ))
identifier[text] = identifier[hashstr2] [: identifier[hashlen] ]
keyword[return] identifier[text] | def hashstr(data, hashlen=HASH_LEN, alphabet=ALPHABET):
"""
python -c "import utool as ut; print(ut.hashstr('abcd'))"
Args:
data (hashable):
hashlen (int): (default = 16)
alphabet (list): list of characters:
Returns:
str: hashstr
CommandLine:
python -m utool.util_hash --test-hashstr
python3 -m utool.util_hash --test-hashstr
python3 -m utool.util_hash --test-hashstr:2
python -m utool.util_hash hashstr:3
python3 -m utool.util_hash hashstr:3
Example0:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> data = 'foobar'
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = mi5yum60mbxhyp+x
Example1:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> data = ''
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = 0000000000000000
Example2:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> data = np.array([1, 2, 3])
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr(data, hashlen, alphabet)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
text = z5lqw0bzt4dmb9yy
Example2:
>>> # ENABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> from uuid import UUID
>>> data = (UUID('7cd0197b-1394-9d16-b1eb-0d8d7a60aedc'), UUID('c76b54a5-adb6-7f16-f0fb-190ab99409f8'))
>>> hashlen = 16
>>> alphabet = ALPHABET_41
>>> text = hashstr_arr(data, 'label')
>>> result = ('text = %s' % (str(text),))
>>> print(result)
Example3:
>>> # DISABLE_DOCTEST
>>> # UNSTABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> import numpy as np
>>> data = np.array(['a', 'b'], dtype=object)
>>> text = hashstr(data, alphabet=ALPHABET_27)
>>> result = ('text = %s' % (str(text),))
>>> print(result)
Ignore:
data = np.array(['a', 'b'], dtype=object)
data.tobytes()
data = np.array(['a', 'b'])
data = ['a', 'b']
data = np.array([1, 2, 3])
import hashlib
from six.moves import cPickle as pickle
pickle.dumps(data, protocol=2)
python2 -c "import hashlib, numpy; print(hashlib.sha1('ab').hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1('ab'.encode('utf8')).hexdigest())"
python2 -c "import hashlib, numpy; print(hashlib.sha1('ab').hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(b'ab').hexdigest())"
python2 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([1, 2])).hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([1, 2])).hexdigest())"
# TODO: numpy arrays of strings must be encoded to bytes first in python3
python2 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'])).hexdigest())"
python3 -c "import hashlib, numpy; print(hashlib.sha1(numpy.array([b'a', b'b'])).hexdigest())"
python -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'], dtype=object)).hexdigest())"
python -c "import hashlib, numpy; print(hashlib.sha1(numpy.array(['a', 'b'], dtype=object)).hexdigest())"
"""
if util_type.HAVE_NUMPY and isinstance(data, np.ndarray):
if data.dtype.kind == 'O':
msg = '[ut] hashing ndarrays with dtype=object is unstable'
warnings.warn(msg, RuntimeWarning)
# but tobytes is ok, but differs between python 2 and 3 for objects
data = data.dumps() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# data = data.tobytes()
if isinstance(data, tuple):
# should instead do
if False:
hasher = hashlib.sha512()
items = data
for item in items:
if isinstance(item, uuid.UUID):
hasher.update(item.bytes) # depends on [control=['if'], data=[]]
else:
hasher.update(item) # depends on [control=['for'], data=['item']]
text = hasher.hexdigest()
hashstr2 = convert_hexstr_to_bigbase(text, alphabet, bigbase=len(alphabet))
# Truncate
text = hashstr2[:hashlen]
return text # depends on [control=['if'], data=[]]
else:
msg = '[ut] hashing tuples with repr is not a good idea. FIXME'
# warnings.warn(msg, RuntimeWarning)
data = repr(data) # Hack? # depends on [control=['if'], data=[]]
# convert unicode into raw bytes
if isinstance(data, six.text_type):
data = data.encode('utf-8') # depends on [control=['if'], data=[]]
if isinstance(data, stringlike) and len(data) == 0:
# Make a special hash for empty data
text = alphabet[0] * hashlen # depends on [control=['if'], data=[]]
else:
# Get a 128 character hex string
text = hashlib.sha512(data).hexdigest()
# Shorten length of string (by increasing base)
hashstr2 = convert_hexstr_to_bigbase(text, alphabet, bigbase=len(alphabet))
# Truncate
text = hashstr2[:hashlen]
return text |
def highlight_text(text, lexer_name='python', **kwargs):
r"""
SeeAlso:
color_text
"""
# Resolve extensions to languages
lexer_name = {
'py': 'python',
'h': 'cpp',
'cpp': 'cpp',
'c': 'cpp',
}.get(lexer_name.replace('.', ''), lexer_name)
if lexer_name in ['red', 'yellow', 'blue', 'green']:
# hack for coloring
return color_text(text, lexer_name)
import utool as ut
if ENABLE_COLORS:
try:
import pygments
import pygments.lexers
import pygments.formatters
#from pygments import highlight
#from pygments.lexers import get_lexer_by_name
#from pygments.formatters import TerminalFormatter
#if ut.WIN32:
# assert False
# #formater = pygments.formatters.terminal256.Terminal256Formatter()
# import pygments.formatters.terminal256
# formater = pygments.formatters.terminal256.Terminal256Formatter()
#else:
import pygments.formatters.terminal
formater = pygments.formatters.terminal.TerminalFormatter(bg='dark')
lexer = pygments.lexers.get_lexer_by_name(lexer_name, **kwargs)
return pygments.highlight(text, lexer, formater)
except Exception:
if ut.SUPER_STRICT:
raise
return text
return text | def function[highlight_text, parameter[text, lexer_name]]:
constant[
SeeAlso:
color_text
]
variable[lexer_name] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b24c6ec0>, <ast.Constant object at 0x7da1b24c40d0>, <ast.Constant object at 0x7da1b24c7070>, <ast.Constant object at 0x7da1b24c4ac0>], [<ast.Constant object at 0x7da1b24c4ca0>, <ast.Constant object at 0x7da1b24c48b0>, <ast.Constant object at 0x7da1b24c74c0>, <ast.Constant object at 0x7da1b24c5f90>]].get, parameter[call[name[lexer_name].replace, parameter[constant[.], constant[]]], name[lexer_name]]]
if compare[name[lexer_name] in list[[<ast.Constant object at 0x7da1b24c4c70>, <ast.Constant object at 0x7da1b24c5840>, <ast.Constant object at 0x7da1b24c5330>, <ast.Constant object at 0x7da1b24c4d00>]]] begin[:]
return[call[name[color_text], parameter[name[text], name[lexer_name]]]]
import module[utool] as alias[ut]
if name[ENABLE_COLORS] begin[:]
<ast.Try object at 0x7da1b246a440>
return[name[text]] | keyword[def] identifier[highlight_text] ( identifier[text] , identifier[lexer_name] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[lexer_name] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}. identifier[get] ( identifier[lexer_name] . identifier[replace] ( literal[string] , literal[string] ), identifier[lexer_name] )
keyword[if] identifier[lexer_name] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[return] identifier[color_text] ( identifier[text] , identifier[lexer_name] )
keyword[import] identifier[utool] keyword[as] identifier[ut]
keyword[if] identifier[ENABLE_COLORS] :
keyword[try] :
keyword[import] identifier[pygments]
keyword[import] identifier[pygments] . identifier[lexers]
keyword[import] identifier[pygments] . identifier[formatters]
keyword[import] identifier[pygments] . identifier[formatters] . identifier[terminal]
identifier[formater] = identifier[pygments] . identifier[formatters] . identifier[terminal] . identifier[TerminalFormatter] ( identifier[bg] = literal[string] )
identifier[lexer] = identifier[pygments] . identifier[lexers] . identifier[get_lexer_by_name] ( identifier[lexer_name] ,** identifier[kwargs] )
keyword[return] identifier[pygments] . identifier[highlight] ( identifier[text] , identifier[lexer] , identifier[formater] )
keyword[except] identifier[Exception] :
keyword[if] identifier[ut] . identifier[SUPER_STRICT] :
keyword[raise]
keyword[return] identifier[text]
keyword[return] identifier[text] | def highlight_text(text, lexer_name='python', **kwargs):
"""
SeeAlso:
color_text
"""
# Resolve extensions to languages
lexer_name = {'py': 'python', 'h': 'cpp', 'cpp': 'cpp', 'c': 'cpp'}.get(lexer_name.replace('.', ''), lexer_name)
if lexer_name in ['red', 'yellow', 'blue', 'green']:
# hack for coloring
return color_text(text, lexer_name) # depends on [control=['if'], data=['lexer_name']]
import utool as ut
if ENABLE_COLORS:
try:
import pygments
import pygments.lexers
import pygments.formatters
#from pygments import highlight
#from pygments.lexers import get_lexer_by_name
#from pygments.formatters import TerminalFormatter
#if ut.WIN32:
# assert False
# #formater = pygments.formatters.terminal256.Terminal256Formatter()
# import pygments.formatters.terminal256
# formater = pygments.formatters.terminal256.Terminal256Formatter()
#else:
import pygments.formatters.terminal
formater = pygments.formatters.terminal.TerminalFormatter(bg='dark')
lexer = pygments.lexers.get_lexer_by_name(lexer_name, **kwargs)
return pygments.highlight(text, lexer, formater) # depends on [control=['try'], data=[]]
except Exception:
if ut.SUPER_STRICT:
raise # depends on [control=['if'], data=[]]
return text # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return text |
def _calculate_dispersion(X: Union[pd.DataFrame, np.ndarray], labels: np.ndarray, centroids: np.ndarray) -> float:
"""
Calculate the dispersion between actual points and their assigned centroids
"""
disp = np.sum(np.sum([np.abs(inst - centroids[label]) ** 2 for inst, label in zip(X, labels)])) # type: float
return disp | def function[_calculate_dispersion, parameter[X, labels, centroids]]:
constant[
Calculate the dispersion between actual points and their assigned centroids
]
variable[disp] assign[=] call[name[np].sum, parameter[call[name[np].sum, parameter[<ast.ListComp object at 0x7da1b10e4eb0>]]]]
return[name[disp]] | keyword[def] identifier[_calculate_dispersion] ( identifier[X] : identifier[Union] [ identifier[pd] . identifier[DataFrame] , identifier[np] . identifier[ndarray] ], identifier[labels] : identifier[np] . identifier[ndarray] , identifier[centroids] : identifier[np] . identifier[ndarray] )-> identifier[float] :
literal[string]
identifier[disp] = identifier[np] . identifier[sum] ( identifier[np] . identifier[sum] ([ identifier[np] . identifier[abs] ( identifier[inst] - identifier[centroids] [ identifier[label] ])** literal[int] keyword[for] identifier[inst] , identifier[label] keyword[in] identifier[zip] ( identifier[X] , identifier[labels] )]))
keyword[return] identifier[disp] | def _calculate_dispersion(X: Union[pd.DataFrame, np.ndarray], labels: np.ndarray, centroids: np.ndarray) -> float:
"""
Calculate the dispersion between actual points and their assigned centroids
"""
disp = np.sum(np.sum([np.abs(inst - centroids[label]) ** 2 for (inst, label) in zip(X, labels)])) # type: float
return disp |
def unicode_http_header(value):
r"""
Convert an ASCII HTTP header string into a unicode string with the
appropriate encoding applied. Expects headers to be RFC 2047 compliant.
>>> unicode_http_header('=?iso-8859-1?q?p=F6stal?=') == u'p\xf6stal'
True
>>> unicode_http_header(b'=?iso-8859-1?q?p=F6stal?=') == u'p\xf6stal'
True
>>> unicode_http_header('p\xf6stal') == u'p\xf6stal'
True
"""
if six.PY3: # pragma: no cover
# email.header.decode_header expects strings, not bytes. Your input data may be in bytes.
# Since these bytes are almost always ASCII, calling `.decode()` on it without specifying
# a charset should work fine.
if isinstance(value, six.binary_type):
value = value.decode()
return u''.join([six.text_type(s, e or 'iso-8859-1') if not isinstance(s, six.text_type) else s
for s, e in decode_header(value)]) | def function[unicode_http_header, parameter[value]]:
constant[
Convert an ASCII HTTP header string into a unicode string with the
appropriate encoding applied. Expects headers to be RFC 2047 compliant.
>>> unicode_http_header('=?iso-8859-1?q?p=F6stal?=') == u'p\xf6stal'
True
>>> unicode_http_header(b'=?iso-8859-1?q?p=F6stal?=') == u'p\xf6stal'
True
>>> unicode_http_header('p\xf6stal') == u'p\xf6stal'
True
]
if name[six].PY3 begin[:]
if call[name[isinstance], parameter[name[value], name[six].binary_type]] begin[:]
variable[value] assign[=] call[name[value].decode, parameter[]]
return[call[constant[].join, parameter[<ast.ListComp object at 0x7da18dc07a60>]]] | keyword[def] identifier[unicode_http_header] ( identifier[value] ):
literal[string]
keyword[if] identifier[six] . identifier[PY3] :
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[binary_type] ):
identifier[value] = identifier[value] . identifier[decode] ()
keyword[return] literal[string] . identifier[join] ([ identifier[six] . identifier[text_type] ( identifier[s] , identifier[e] keyword[or] literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[s] , identifier[six] . identifier[text_type] ) keyword[else] identifier[s]
keyword[for] identifier[s] , identifier[e] keyword[in] identifier[decode_header] ( identifier[value] )]) | def unicode_http_header(value):
"""
Convert an ASCII HTTP header string into a unicode string with the
appropriate encoding applied. Expects headers to be RFC 2047 compliant.
>>> unicode_http_header('=?iso-8859-1?q?p=F6stal?=') == u'p\\xf6stal'
True
>>> unicode_http_header(b'=?iso-8859-1?q?p=F6stal?=') == u'p\\xf6stal'
True
>>> unicode_http_header('p\\xf6stal') == u'p\\xf6stal'
True
"""
if six.PY3: # pragma: no cover
# email.header.decode_header expects strings, not bytes. Your input data may be in bytes.
# Since these bytes are almost always ASCII, calling `.decode()` on it without specifying
# a charset should work fine.
if isinstance(value, six.binary_type):
value = value.decode() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return u''.join([six.text_type(s, e or 'iso-8859-1') if not isinstance(s, six.text_type) else s for (s, e) in decode_header(value)]) |
def _set_char(self, char, type):
'''
Sets the currently active character, e.g. ト. We save some information
about the character as well. active_char_info contains the full
tuple of rōmaji info, and active_ro_vowel contains e.g. 'o' for ト.
We also set the character type: either a consonant-vowel pair
or a vowel. This affects the way the character is flushed later.
'''
self.next_char_info = self._char_lookup(char)
self.next_char_type = type
self._flush_char()
self.active_char = char
self.active_char_type = type
self.active_char_info = self._char_lookup(char)
self.active_vowel_ro = self._char_ro_vowel(self.active_char_info, type) | def function[_set_char, parameter[self, char, type]]:
constant[
Sets the currently active character, e.g. ト. We save some information
about the character as well. active_char_info contains the full
tuple of rōmaji info, and active_ro_vowel contains e.g. 'o' for ト.
We also set the character type: either a consonant-vowel pair
or a vowel. This affects the way the character is flushed later.
]
name[self].next_char_info assign[=] call[name[self]._char_lookup, parameter[name[char]]]
name[self].next_char_type assign[=] name[type]
call[name[self]._flush_char, parameter[]]
name[self].active_char assign[=] name[char]
name[self].active_char_type assign[=] name[type]
name[self].active_char_info assign[=] call[name[self]._char_lookup, parameter[name[char]]]
name[self].active_vowel_ro assign[=] call[name[self]._char_ro_vowel, parameter[name[self].active_char_info, name[type]]] | keyword[def] identifier[_set_char] ( identifier[self] , identifier[char] , identifier[type] ):
literal[string]
identifier[self] . identifier[next_char_info] = identifier[self] . identifier[_char_lookup] ( identifier[char] )
identifier[self] . identifier[next_char_type] = identifier[type]
identifier[self] . identifier[_flush_char] ()
identifier[self] . identifier[active_char] = identifier[char]
identifier[self] . identifier[active_char_type] = identifier[type]
identifier[self] . identifier[active_char_info] = identifier[self] . identifier[_char_lookup] ( identifier[char] )
identifier[self] . identifier[active_vowel_ro] = identifier[self] . identifier[_char_ro_vowel] ( identifier[self] . identifier[active_char_info] , identifier[type] ) | def _set_char(self, char, type):
"""
Sets the currently active character, e.g. ト. We save some information
about the character as well. active_char_info contains the full
tuple of rōmaji info, and active_ro_vowel contains e.g. 'o' for ト.
We also set the character type: either a consonant-vowel pair
or a vowel. This affects the way the character is flushed later.
"""
self.next_char_info = self._char_lookup(char)
self.next_char_type = type
self._flush_char()
self.active_char = char
self.active_char_type = type
self.active_char_info = self._char_lookup(char)
self.active_vowel_ro = self._char_ro_vowel(self.active_char_info, type) |
def heartbeat_encode(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version=2):
'''
The heartbeat message shows that a system is present and responding.
The type of the MAV and Autopilot hardware allow the
receiving system to treat further messages from this
system appropriate (e.g. by laying out the user
interface based on the autopilot).
type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t)
autopilot : Autopilot type / class. defined in MAV_AUTOPILOT ENUM (uint8_t)
base_mode : System mode bitfield, see MAV_MODE_FLAGS ENUM in mavlink/include/mavlink_types.h (uint8_t)
custom_mode : A bitfield for use for autopilot-specific flags. (uint32_t)
system_status : System status flag, see MAV_STATE ENUM (uint8_t)
mavlink_version : MAVLink version (uint8_t)
'''
return MAVLink_heartbeat_message(type, autopilot, base_mode, custom_mode, system_status, mavlink_version) | def function[heartbeat_encode, parameter[self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version]]:
constant[
The heartbeat message shows that a system is present and responding.
The type of the MAV and Autopilot hardware allow the
receiving system to treat further messages from this
system appropriate (e.g. by laying out the user
interface based on the autopilot).
type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t)
autopilot : Autopilot type / class. defined in MAV_AUTOPILOT ENUM (uint8_t)
base_mode : System mode bitfield, see MAV_MODE_FLAGS ENUM in mavlink/include/mavlink_types.h (uint8_t)
custom_mode : A bitfield for use for autopilot-specific flags. (uint32_t)
system_status : System status flag, see MAV_STATE ENUM (uint8_t)
mavlink_version : MAVLink version (uint8_t)
]
return[call[name[MAVLink_heartbeat_message], parameter[name[type], name[autopilot], name[base_mode], name[custom_mode], name[system_status], name[mavlink_version]]]] | keyword[def] identifier[heartbeat_encode] ( identifier[self] , identifier[type] , identifier[autopilot] , identifier[base_mode] , identifier[custom_mode] , identifier[system_status] , identifier[mavlink_version] = literal[int] ):
literal[string]
keyword[return] identifier[MAVLink_heartbeat_message] ( identifier[type] , identifier[autopilot] , identifier[base_mode] , identifier[custom_mode] , identifier[system_status] , identifier[mavlink_version] ) | def heartbeat_encode(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version=2):
"""
The heartbeat message shows that a system is present and responding.
The type of the MAV and Autopilot hardware allow the
receiving system to treat further messages from this
system appropriate (e.g. by laying out the user
interface based on the autopilot).
type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t)
autopilot : Autopilot type / class. defined in MAV_AUTOPILOT ENUM (uint8_t)
base_mode : System mode bitfield, see MAV_MODE_FLAGS ENUM in mavlink/include/mavlink_types.h (uint8_t)
custom_mode : A bitfield for use for autopilot-specific flags. (uint32_t)
system_status : System status flag, see MAV_STATE ENUM (uint8_t)
mavlink_version : MAVLink version (uint8_t)
"""
return MAVLink_heartbeat_message(type, autopilot, base_mode, custom_mode, system_status, mavlink_version) |
def _disconnect_locked(self):
"""Closes the current connection. Assume self._lock is held."""
self._connected = False
self._connect_cond.notify_all()
self._telnet = None
_LOGGER.warning("Disconnected") | def function[_disconnect_locked, parameter[self]]:
constant[Closes the current connection. Assume self._lock is held.]
name[self]._connected assign[=] constant[False]
call[name[self]._connect_cond.notify_all, parameter[]]
name[self]._telnet assign[=] constant[None]
call[name[_LOGGER].warning, parameter[constant[Disconnected]]] | keyword[def] identifier[_disconnect_locked] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_connected] = keyword[False]
identifier[self] . identifier[_connect_cond] . identifier[notify_all] ()
identifier[self] . identifier[_telnet] = keyword[None]
identifier[_LOGGER] . identifier[warning] ( literal[string] ) | def _disconnect_locked(self):
"""Closes the current connection. Assume self._lock is held."""
self._connected = False
self._connect_cond.notify_all()
self._telnet = None
_LOGGER.warning('Disconnected') |
def decode_list(self, node, cache, as_map_key):
"""Special case decodes map-as-array.
Otherwise lists are treated as Python lists.
Arguments follow the same convention as the top-level 'decode'
function.
"""
if node:
if node[0] == MAP_AS_ARR:
# key must be decoded before value for caching to work.
returned_dict = {}
for k, v in pairs(node[1:]):
key = self._decode(k, cache, True)
val = self._decode(v, cache, as_map_key)
returned_dict[key] = val
return transit_types.frozendict(returned_dict)
decoded = self._decode(node[0], cache, as_map_key)
if isinstance(decoded, Tag):
return self.decode_tag(decoded.tag,
self._decode(node[1], cache, as_map_key))
return tuple(self._decode(x, cache, as_map_key) for x in node) | def function[decode_list, parameter[self, node, cache, as_map_key]]:
constant[Special case decodes map-as-array.
Otherwise lists are treated as Python lists.
Arguments follow the same convention as the top-level 'decode'
function.
]
if name[node] begin[:]
if compare[call[name[node]][constant[0]] equal[==] name[MAP_AS_ARR]] begin[:]
variable[returned_dict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20c7cb340>, <ast.Name object at 0x7da20c7c9a20>]]] in starred[call[name[pairs], parameter[call[name[node]][<ast.Slice object at 0x7da20c7c83a0>]]]] begin[:]
variable[key] assign[=] call[name[self]._decode, parameter[name[k], name[cache], constant[True]]]
variable[val] assign[=] call[name[self]._decode, parameter[name[v], name[cache], name[as_map_key]]]
call[name[returned_dict]][name[key]] assign[=] name[val]
return[call[name[transit_types].frozendict, parameter[name[returned_dict]]]]
variable[decoded] assign[=] call[name[self]._decode, parameter[call[name[node]][constant[0]], name[cache], name[as_map_key]]]
if call[name[isinstance], parameter[name[decoded], name[Tag]]] begin[:]
return[call[name[self].decode_tag, parameter[name[decoded].tag, call[name[self]._decode, parameter[call[name[node]][constant[1]], name[cache], name[as_map_key]]]]]]
return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da20c7c9330>]]] | keyword[def] identifier[decode_list] ( identifier[self] , identifier[node] , identifier[cache] , identifier[as_map_key] ):
literal[string]
keyword[if] identifier[node] :
keyword[if] identifier[node] [ literal[int] ]== identifier[MAP_AS_ARR] :
identifier[returned_dict] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[pairs] ( identifier[node] [ literal[int] :]):
identifier[key] = identifier[self] . identifier[_decode] ( identifier[k] , identifier[cache] , keyword[True] )
identifier[val] = identifier[self] . identifier[_decode] ( identifier[v] , identifier[cache] , identifier[as_map_key] )
identifier[returned_dict] [ identifier[key] ]= identifier[val]
keyword[return] identifier[transit_types] . identifier[frozendict] ( identifier[returned_dict] )
identifier[decoded] = identifier[self] . identifier[_decode] ( identifier[node] [ literal[int] ], identifier[cache] , identifier[as_map_key] )
keyword[if] identifier[isinstance] ( identifier[decoded] , identifier[Tag] ):
keyword[return] identifier[self] . identifier[decode_tag] ( identifier[decoded] . identifier[tag] ,
identifier[self] . identifier[_decode] ( identifier[node] [ literal[int] ], identifier[cache] , identifier[as_map_key] ))
keyword[return] identifier[tuple] ( identifier[self] . identifier[_decode] ( identifier[x] , identifier[cache] , identifier[as_map_key] ) keyword[for] identifier[x] keyword[in] identifier[node] ) | def decode_list(self, node, cache, as_map_key):
"""Special case decodes map-as-array.
Otherwise lists are treated as Python lists.
Arguments follow the same convention as the top-level 'decode'
function.
"""
if node:
if node[0] == MAP_AS_ARR:
# key must be decoded before value for caching to work.
returned_dict = {}
for (k, v) in pairs(node[1:]):
key = self._decode(k, cache, True)
val = self._decode(v, cache, as_map_key)
returned_dict[key] = val # depends on [control=['for'], data=[]]
return transit_types.frozendict(returned_dict) # depends on [control=['if'], data=[]]
decoded = self._decode(node[0], cache, as_map_key)
if isinstance(decoded, Tag):
return self.decode_tag(decoded.tag, self._decode(node[1], cache, as_map_key)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return tuple((self._decode(x, cache, as_map_key) for x in node)) |
def plot_triaxial_descent_ascent(Ax, Az, des, asc):
'''Plot triaxial accelerometer data for whole deployment, descents, and
ascents
Only x and z axes are ploted since these are associated with stroking
Args
----
Ax: ndarray
X-axis acclerometer data array
Az: ndarray
Z-axis acclerometer data array
des: ndarray
boolean mask for slicing descent phases of dives from tag dta
asc: ndarray
boolean mask for slicing asccent phases of dives from tag dta
'''
import numpy
from . import plotutils
fig, ((ax1, ax3), (ax2, ax4)) = plt.subplots(2, 2, sharex=True, sharey=True)
# Convert boolean mask to indices
des_ind = numpy.where(des)[0]
asc_ind = numpy.where(asc)[0]
cols = [('x', Ax, [ax1, ax2]),
('z', Az, [ax3, ax4])]
for label, data, axes in cols:
axes[0].title.set_text('Whole {}'.format(label))
axes[0].plot(range(len(data)), data, color=_colors[0],
linewidth=_linewidth, label='{}'.format(label))
axes[1].title.set_text('Descents & Ascents {}'.format(label))
axes[1] = plotutils.plot_noncontiguous(axes[1], data, des_ind,
color=_colors[1],
label='descents')
axes[1] = plotutils.plot_noncontiguous(axes[1], data, asc_ind,
color=_colors[2],
label='ascents')
axes[1].legend(loc='upper right')
plt.show()
return None | def function[plot_triaxial_descent_ascent, parameter[Ax, Az, des, asc]]:
constant[Plot triaxial accelerometer data for whole deployment, descents, and
ascents
Only x and z axes are ploted since these are associated with stroking
Args
----
Ax: ndarray
X-axis acclerometer data array
Az: ndarray
Z-axis acclerometer data array
des: ndarray
boolean mask for slicing descent phases of dives from tag dta
asc: ndarray
boolean mask for slicing asccent phases of dives from tag dta
]
import module[numpy]
from relative_module[None] import module[plotutils]
<ast.Tuple object at 0x7da1b1365780> assign[=] call[name[plt].subplots, parameter[constant[2], constant[2]]]
variable[des_ind] assign[=] call[call[name[numpy].where, parameter[name[des]]]][constant[0]]
variable[asc_ind] assign[=] call[call[name[numpy].where, parameter[name[asc]]]][constant[0]]
variable[cols] assign[=] list[[<ast.Tuple object at 0x7da1b1364190>, <ast.Tuple object at 0x7da1b1367130>]]
for taget[tuple[[<ast.Name object at 0x7da1b1365870>, <ast.Name object at 0x7da1b1366680>, <ast.Name object at 0x7da1b1365c00>]]] in starred[name[cols]] begin[:]
call[call[name[axes]][constant[0]].title.set_text, parameter[call[constant[Whole {}].format, parameter[name[label]]]]]
call[call[name[axes]][constant[0]].plot, parameter[call[name[range], parameter[call[name[len], parameter[name[data]]]]], name[data]]]
call[call[name[axes]][constant[1]].title.set_text, parameter[call[constant[Descents & Ascents {}].format, parameter[name[label]]]]]
call[name[axes]][constant[1]] assign[=] call[name[plotutils].plot_noncontiguous, parameter[call[name[axes]][constant[1]], name[data], name[des_ind]]]
call[name[axes]][constant[1]] assign[=] call[name[plotutils].plot_noncontiguous, parameter[call[name[axes]][constant[1]], name[data], name[asc_ind]]]
call[call[name[axes]][constant[1]].legend, parameter[]]
call[name[plt].show, parameter[]]
return[constant[None]] | keyword[def] identifier[plot_triaxial_descent_ascent] ( identifier[Ax] , identifier[Az] , identifier[des] , identifier[asc] ):
literal[string]
keyword[import] identifier[numpy]
keyword[from] . keyword[import] identifier[plotutils]
identifier[fig] ,(( identifier[ax1] , identifier[ax3] ),( identifier[ax2] , identifier[ax4] ))= identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[sharey] = keyword[True] )
identifier[des_ind] = identifier[numpy] . identifier[where] ( identifier[des] )[ literal[int] ]
identifier[asc_ind] = identifier[numpy] . identifier[where] ( identifier[asc] )[ literal[int] ]
identifier[cols] =[( literal[string] , identifier[Ax] ,[ identifier[ax1] , identifier[ax2] ]),
( literal[string] , identifier[Az] ,[ identifier[ax3] , identifier[ax4] ])]
keyword[for] identifier[label] , identifier[data] , identifier[axes] keyword[in] identifier[cols] :
identifier[axes] [ literal[int] ]. identifier[title] . identifier[set_text] ( literal[string] . identifier[format] ( identifier[label] ))
identifier[axes] [ literal[int] ]. identifier[plot] ( identifier[range] ( identifier[len] ( identifier[data] )), identifier[data] , identifier[color] = identifier[_colors] [ literal[int] ],
identifier[linewidth] = identifier[_linewidth] , identifier[label] = literal[string] . identifier[format] ( identifier[label] ))
identifier[axes] [ literal[int] ]. identifier[title] . identifier[set_text] ( literal[string] . identifier[format] ( identifier[label] ))
identifier[axes] [ literal[int] ]= identifier[plotutils] . identifier[plot_noncontiguous] ( identifier[axes] [ literal[int] ], identifier[data] , identifier[des_ind] ,
identifier[color] = identifier[_colors] [ literal[int] ],
identifier[label] = literal[string] )
identifier[axes] [ literal[int] ]= identifier[plotutils] . identifier[plot_noncontiguous] ( identifier[axes] [ literal[int] ], identifier[data] , identifier[asc_ind] ,
identifier[color] = identifier[_colors] [ literal[int] ],
identifier[label] = literal[string] )
identifier[axes] [ literal[int] ]. identifier[legend] ( identifier[loc] = literal[string] )
identifier[plt] . identifier[show] ()
keyword[return] keyword[None] | def plot_triaxial_descent_ascent(Ax, Az, des, asc):
"""Plot triaxial accelerometer data for whole deployment, descents, and
ascents
Only x and z axes are ploted since these are associated with stroking
Args
----
Ax: ndarray
X-axis acclerometer data array
Az: ndarray
Z-axis acclerometer data array
des: ndarray
boolean mask for slicing descent phases of dives from tag dta
asc: ndarray
boolean mask for slicing asccent phases of dives from tag dta
"""
import numpy
from . import plotutils
(fig, ((ax1, ax3), (ax2, ax4))) = plt.subplots(2, 2, sharex=True, sharey=True)
# Convert boolean mask to indices
des_ind = numpy.where(des)[0]
asc_ind = numpy.where(asc)[0]
cols = [('x', Ax, [ax1, ax2]), ('z', Az, [ax3, ax4])]
for (label, data, axes) in cols:
axes[0].title.set_text('Whole {}'.format(label))
axes[0].plot(range(len(data)), data, color=_colors[0], linewidth=_linewidth, label='{}'.format(label))
axes[1].title.set_text('Descents & Ascents {}'.format(label))
axes[1] = plotutils.plot_noncontiguous(axes[1], data, des_ind, color=_colors[1], label='descents')
axes[1] = plotutils.plot_noncontiguous(axes[1], data, asc_ind, color=_colors[2], label='ascents')
axes[1].legend(loc='upper right') # depends on [control=['for'], data=[]]
plt.show()
return None |
def debugger():
"""Return the current debugger instance (if any),
or creates a new one."""
dbg = _current[0]
if dbg is None or not dbg.active:
dbg = _current[0] = RemoteCeleryTrepan()
return dbg | def function[debugger, parameter[]]:
constant[Return the current debugger instance (if any),
or creates a new one.]
variable[dbg] assign[=] call[name[_current]][constant[0]]
if <ast.BoolOp object at 0x7da1b05bf970> begin[:]
variable[dbg] assign[=] call[name[RemoteCeleryTrepan], parameter[]]
return[name[dbg]] | keyword[def] identifier[debugger] ():
literal[string]
identifier[dbg] = identifier[_current] [ literal[int] ]
keyword[if] identifier[dbg] keyword[is] keyword[None] keyword[or] keyword[not] identifier[dbg] . identifier[active] :
identifier[dbg] = identifier[_current] [ literal[int] ]= identifier[RemoteCeleryTrepan] ()
keyword[return] identifier[dbg] | def debugger():
"""Return the current debugger instance (if any),
or creates a new one."""
dbg = _current[0]
if dbg is None or not dbg.active:
dbg = _current[0] = RemoteCeleryTrepan() # depends on [control=['if'], data=[]]
return dbg |
def got_response(self):
"""
:returns: Whether or not the :class:`~.MessageThread`. has received a
response.
"""
return any(message.sender != self.initiator
for message in self.messages) | def function[got_response, parameter[self]]:
constant[
:returns: Whether or not the :class:`~.MessageThread`. has received a
response.
]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b261d5d0>]]] | keyword[def] identifier[got_response] ( identifier[self] ):
literal[string]
keyword[return] identifier[any] ( identifier[message] . identifier[sender] != identifier[self] . identifier[initiator]
keyword[for] identifier[message] keyword[in] identifier[self] . identifier[messages] ) | def got_response(self):
"""
:returns: Whether or not the :class:`~.MessageThread`. has received a
response.
"""
return any((message.sender != self.initiator for message in self.messages)) |
def pitching_stats_range(start_dt=None, end_dt=None):
"""
Get all pitching stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format.
"""
# ensure valid date strings, perform necessary processing for query
start_dt, end_dt = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
#fix some strange formatting for percentage columns
table = table.replace('---%', np.nan)
#make sure these are all numeric
for column in ['Age', '#days', 'G', 'GS', 'W', 'L', 'SV', 'IP', 'H',
'R', 'ER', 'BB', 'SO', 'HR', 'HBP', 'ERA', 'AB', '2B',
'3B', 'IBB', 'GDP', 'SF', 'SB', 'CS', 'PO', 'BF', 'Pit',
'WHIP', 'BAbip', 'SO9', 'SO/W']:
table[column] = pd.to_numeric(table[column])
#convert str(xx%) values to float(0.XX) decimal values
for column in ['Str', 'StL', 'StS', 'GB/FB', 'LD', 'PU']:
table[column] = table[column].replace('%','',regex=True).astype('float')/100
table = table.drop('',1)
return table | def function[pitching_stats_range, parameter[start_dt, end_dt]]:
constant[
Get all pitching stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format.
]
<ast.Tuple object at 0x7da1b1b3ece0> assign[=] call[name[sanitize_input], parameter[name[start_dt], name[end_dt]]]
if compare[call[name[datetime].datetime.strptime, parameter[name[start_dt], constant[%Y-%m-%d]]].year less[<] constant[2008]] begin[:]
<ast.Raise object at 0x7da1b1b3e920>
if compare[call[name[datetime].datetime.strptime, parameter[name[end_dt], constant[%Y-%m-%d]]].year less[<] constant[2008]] begin[:]
<ast.Raise object at 0x7da1b1b3e740>
variable[soup] assign[=] call[name[get_soup], parameter[name[start_dt], name[end_dt]]]
variable[table] assign[=] call[name[get_table], parameter[name[soup]]]
variable[table] assign[=] call[name[table].dropna, parameter[]]
variable[table] assign[=] call[name[table].replace, parameter[constant[---%], name[np].nan]]
for taget[name[column]] in starred[list[[<ast.Constant object at 0x7da1b1b3d420>, <ast.Constant object at 0x7da1b1b3d450>, <ast.Constant object at 0x7da1b1b3da80>, <ast.Constant object at 0x7da1b1b3f040>, <ast.Constant object at 0x7da1b1b3ceb0>, <ast.Constant object at 0x7da1b1b3ec80>, <ast.Constant object at 0x7da1b1b3f4f0>, <ast.Constant object at 0x7da1b1b3e440>, <ast.Constant object at 0x7da1b1b3ed70>, <ast.Constant object at 0x7da1b1b3cf10>, <ast.Constant object at 0x7da1b1b3f070>, <ast.Constant object at 0x7da1b1b3d300>, <ast.Constant object at 0x7da1b1b3ea40>, <ast.Constant object at 0x7da1b1b3f730>, <ast.Constant object at 0x7da1b1b3dc90>, <ast.Constant object at 0x7da1b1b3f2e0>, <ast.Constant object at 0x7da1b1b3c970>, <ast.Constant object at 0x7da1b1b3c0a0>, <ast.Constant object at 0x7da1b1b3d510>, <ast.Constant object at 0x7da1b1b3d990>, <ast.Constant object at 0x7da1b1b3e380>, <ast.Constant object at 0x7da1b1b3ce50>, <ast.Constant object at 0x7da1b1b3d9f0>, <ast.Constant object at 0x7da1b1b3c310>, <ast.Constant object at 0x7da1b1b3ea10>, <ast.Constant object at 0x7da1b1b3d1e0>, <ast.Constant object at 0x7da1b1b3c0d0>, <ast.Constant object at 0x7da1b1b3d240>, <ast.Constant object at 0x7da1b1b3cb20>, <ast.Constant object at 0x7da1b1b3dea0>, <ast.Constant object at 0x7da1b1b3cf40>]]] begin[:]
call[name[table]][name[column]] assign[=] call[name[pd].to_numeric, parameter[call[name[table]][name[column]]]]
for taget[name[column]] in starred[list[[<ast.Constant object at 0x7da1b1b3d3c0>, <ast.Constant object at 0x7da1b1b3e530>, <ast.Constant object at 0x7da1b1b3c280>, <ast.Constant object at 0x7da1b1b3d9c0>, <ast.Constant object at 0x7da1b1b3e080>, <ast.Constant object at 0x7da1b1b3c340>]]] begin[:]
call[name[table]][name[column]] assign[=] binary_operation[call[call[call[name[table]][name[column]].replace, parameter[constant[%], constant[]]].astype, parameter[constant[float]]] / constant[100]]
variable[table] assign[=] call[name[table].drop, parameter[constant[], constant[1]]]
return[name[table]] | keyword[def] identifier[pitching_stats_range] ( identifier[start_dt] = keyword[None] , identifier[end_dt] = keyword[None] ):
literal[string]
identifier[start_dt] , identifier[end_dt] = identifier[sanitize_input] ( identifier[start_dt] , identifier[end_dt] )
keyword[if] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[start_dt] , literal[string] ). identifier[year] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[end_dt] , literal[string] ). identifier[year] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[soup] = identifier[get_soup] ( identifier[start_dt] , identifier[end_dt] )
identifier[table] = identifier[get_table] ( identifier[soup] )
identifier[table] = identifier[table] . identifier[dropna] ( identifier[how] = literal[string] )
identifier[table] = identifier[table] . identifier[replace] ( literal[string] , identifier[np] . identifier[nan] )
keyword[for] identifier[column] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[table] [ identifier[column] ]= identifier[pd] . identifier[to_numeric] ( identifier[table] [ identifier[column] ])
keyword[for] identifier[column] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[table] [ identifier[column] ]= identifier[table] [ identifier[column] ]. identifier[replace] ( literal[string] , literal[string] , identifier[regex] = keyword[True] ). identifier[astype] ( literal[string] )/ literal[int]
identifier[table] = identifier[table] . identifier[drop] ( literal[string] , literal[int] )
keyword[return] identifier[table] | def pitching_stats_range(start_dt=None, end_dt=None):
"""
Get all pitching stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format.
"""
# ensure valid date strings, perform necessary processing for query
(start_dt, end_dt) = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, '%Y-%m-%d').year < 2008:
raise ValueError('Year must be 2008 or later') # depends on [control=['if'], data=[]]
if datetime.datetime.strptime(end_dt, '%Y-%m-%d').year < 2008:
raise ValueError('Year must be 2008 or later') # depends on [control=['if'], data=[]]
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
#fix some strange formatting for percentage columns
table = table.replace('---%', np.nan)
#make sure these are all numeric
for column in ['Age', '#days', 'G', 'GS', 'W', 'L', 'SV', 'IP', 'H', 'R', 'ER', 'BB', 'SO', 'HR', 'HBP', 'ERA', 'AB', '2B', '3B', 'IBB', 'GDP', 'SF', 'SB', 'CS', 'PO', 'BF', 'Pit', 'WHIP', 'BAbip', 'SO9', 'SO/W']:
table[column] = pd.to_numeric(table[column]) # depends on [control=['for'], data=['column']]
#convert str(xx%) values to float(0.XX) decimal values
for column in ['Str', 'StL', 'StS', 'GB/FB', 'LD', 'PU']:
table[column] = table[column].replace('%', '', regex=True).astype('float') / 100 # depends on [control=['for'], data=['column']]
table = table.drop('', 1)
return table |
def _transform_variable_to_expression(expression, node, context):
"""Transform a Variable compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, Variable compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
variable_name = expression.variable_name
if not variable_name.startswith(u'$'):
raise AssertionError(u'Unexpectedly received variable name {} that is not '
u'prefixed with "$"'.format(variable_name))
return bindparam(variable_name[1:]) | def function[_transform_variable_to_expression, parameter[expression, node, context]]:
constant[Transform a Variable compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, Variable compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
]
variable[variable_name] assign[=] name[expression].variable_name
if <ast.UnaryOp object at 0x7da1b17cf310> begin[:]
<ast.Raise object at 0x7da1b17cd7e0>
return[call[name[bindparam], parameter[call[name[variable_name]][<ast.Slice object at 0x7da1b16b5a80>]]]] | keyword[def] identifier[_transform_variable_to_expression] ( identifier[expression] , identifier[node] , identifier[context] ):
literal[string]
identifier[variable_name] = identifier[expression] . identifier[variable_name]
keyword[if] keyword[not] identifier[variable_name] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[AssertionError] ( literal[string]
literal[string] . identifier[format] ( identifier[variable_name] ))
keyword[return] identifier[bindparam] ( identifier[variable_name] [ literal[int] :]) | def _transform_variable_to_expression(expression, node, context):
"""Transform a Variable compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, Variable compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
variable_name = expression.variable_name
if not variable_name.startswith(u'$'):
raise AssertionError(u'Unexpectedly received variable name {} that is not prefixed with "$"'.format(variable_name)) # depends on [control=['if'], data=[]]
return bindparam(variable_name[1:]) |
def button_press(self, terminal, event):
"""Handles the button press event in the terminal widget. If
any match string is caught, another application is open to
handle the matched resource uri.
"""
self.matched_value = ''
if (Vte.MAJOR_VERSION, Vte.MINOR_VERSION) >= (0, 46):
matched_string = self.match_check_event(event)
else:
matched_string = self.match_check(
int(event.x / self.get_char_width()), int(event.y / self.get_char_height())
)
self.found_link = None
if event.button == 1 and (event.get_state() & Gdk.ModifierType.CONTROL_MASK):
if (Vte.MAJOR_VERSION, Vte.MINOR_VERSION) > (0, 50):
s = self.hyperlink_check_event(event)
else:
s = None
if s is not None:
self._on_ctrl_click_matcher((s, None))
elif self.get_has_selection():
self.quick_open()
elif matched_string and matched_string[0]:
self._on_ctrl_click_matcher(matched_string)
elif event.button == 3 and matched_string:
self.found_link = self.handleTerminalMatch(matched_string)
self.matched_value = matched_string[0] | def function[button_press, parameter[self, terminal, event]]:
constant[Handles the button press event in the terminal widget. If
any match string is caught, another application is open to
handle the matched resource uri.
]
name[self].matched_value assign[=] constant[]
if compare[tuple[[<ast.Attribute object at 0x7da1b2346b30>, <ast.Attribute object at 0x7da1b2346110>]] greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b23478b0>, <ast.Constant object at 0x7da1b2347ca0>]]] begin[:]
variable[matched_string] assign[=] call[name[self].match_check_event, parameter[name[event]]]
name[self].found_link assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b2346590> begin[:]
if compare[tuple[[<ast.Attribute object at 0x7da1b2344f70>, <ast.Attribute object at 0x7da1b170f7c0>]] greater[>] tuple[[<ast.Constant object at 0x7da1b170d120>, <ast.Constant object at 0x7da1b170cc70>]]] begin[:]
variable[s] assign[=] call[name[self].hyperlink_check_event, parameter[name[event]]]
if compare[name[s] is_not constant[None]] begin[:]
call[name[self]._on_ctrl_click_matcher, parameter[tuple[[<ast.Name object at 0x7da20c990ca0>, <ast.Constant object at 0x7da20c992a40>]]]] | keyword[def] identifier[button_press] ( identifier[self] , identifier[terminal] , identifier[event] ):
literal[string]
identifier[self] . identifier[matched_value] = literal[string]
keyword[if] ( identifier[Vte] . identifier[MAJOR_VERSION] , identifier[Vte] . identifier[MINOR_VERSION] )>=( literal[int] , literal[int] ):
identifier[matched_string] = identifier[self] . identifier[match_check_event] ( identifier[event] )
keyword[else] :
identifier[matched_string] = identifier[self] . identifier[match_check] (
identifier[int] ( identifier[event] . identifier[x] / identifier[self] . identifier[get_char_width] ()), identifier[int] ( identifier[event] . identifier[y] / identifier[self] . identifier[get_char_height] ())
)
identifier[self] . identifier[found_link] = keyword[None]
keyword[if] identifier[event] . identifier[button] == literal[int] keyword[and] ( identifier[event] . identifier[get_state] ()& identifier[Gdk] . identifier[ModifierType] . identifier[CONTROL_MASK] ):
keyword[if] ( identifier[Vte] . identifier[MAJOR_VERSION] , identifier[Vte] . identifier[MINOR_VERSION] )>( literal[int] , literal[int] ):
identifier[s] = identifier[self] . identifier[hyperlink_check_event] ( identifier[event] )
keyword[else] :
identifier[s] = keyword[None]
keyword[if] identifier[s] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_on_ctrl_click_matcher] (( identifier[s] , keyword[None] ))
keyword[elif] identifier[self] . identifier[get_has_selection] ():
identifier[self] . identifier[quick_open] ()
keyword[elif] identifier[matched_string] keyword[and] identifier[matched_string] [ literal[int] ]:
identifier[self] . identifier[_on_ctrl_click_matcher] ( identifier[matched_string] )
keyword[elif] identifier[event] . identifier[button] == literal[int] keyword[and] identifier[matched_string] :
identifier[self] . identifier[found_link] = identifier[self] . identifier[handleTerminalMatch] ( identifier[matched_string] )
identifier[self] . identifier[matched_value] = identifier[matched_string] [ literal[int] ] | def button_press(self, terminal, event):
"""Handles the button press event in the terminal widget. If
any match string is caught, another application is open to
handle the matched resource uri.
"""
self.matched_value = ''
if (Vte.MAJOR_VERSION, Vte.MINOR_VERSION) >= (0, 46):
matched_string = self.match_check_event(event) # depends on [control=['if'], data=[]]
else:
matched_string = self.match_check(int(event.x / self.get_char_width()), int(event.y / self.get_char_height()))
self.found_link = None
if event.button == 1 and event.get_state() & Gdk.ModifierType.CONTROL_MASK:
if (Vte.MAJOR_VERSION, Vte.MINOR_VERSION) > (0, 50):
s = self.hyperlink_check_event(event) # depends on [control=['if'], data=[]]
else:
s = None
if s is not None:
self._on_ctrl_click_matcher((s, None)) # depends on [control=['if'], data=['s']]
elif self.get_has_selection():
self.quick_open() # depends on [control=['if'], data=[]]
elif matched_string and matched_string[0]:
self._on_ctrl_click_matcher(matched_string) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif event.button == 3 and matched_string:
self.found_link = self.handleTerminalMatch(matched_string)
self.matched_value = matched_string[0] # depends on [control=['if'], data=[]] |
def create_relation(self, event, content_object, distinction=''):
"""
Creates a relation between event and content_object.
See EventRelation for help on distinction.
"""
return EventRelation.objects.create(
event=event,
distinction=distinction,
content_object=content_object) | def function[create_relation, parameter[self, event, content_object, distinction]]:
constant[
Creates a relation between event and content_object.
See EventRelation for help on distinction.
]
return[call[name[EventRelation].objects.create, parameter[]]] | keyword[def] identifier[create_relation] ( identifier[self] , identifier[event] , identifier[content_object] , identifier[distinction] = literal[string] ):
literal[string]
keyword[return] identifier[EventRelation] . identifier[objects] . identifier[create] (
identifier[event] = identifier[event] ,
identifier[distinction] = identifier[distinction] ,
identifier[content_object] = identifier[content_object] ) | def create_relation(self, event, content_object, distinction=''):
"""
Creates a relation between event and content_object.
See EventRelation for help on distinction.
"""
return EventRelation.objects.create(event=event, distinction=distinction, content_object=content_object) |
def archs(self, as_list=False):
"""Return all of the architectures for this target.
Args:
as_list (bool): Return a list instead of the default set object.
Returns:
set or list: All of the architectures used in this TargetSettings object.
"""
archs = self.arch_list().split('/')
if as_list:
return archs
return set(archs) | def function[archs, parameter[self, as_list]]:
constant[Return all of the architectures for this target.
Args:
as_list (bool): Return a list instead of the default set object.
Returns:
set or list: All of the architectures used in this TargetSettings object.
]
variable[archs] assign[=] call[call[name[self].arch_list, parameter[]].split, parameter[constant[/]]]
if name[as_list] begin[:]
return[name[archs]]
return[call[name[set], parameter[name[archs]]]] | keyword[def] identifier[archs] ( identifier[self] , identifier[as_list] = keyword[False] ):
literal[string]
identifier[archs] = identifier[self] . identifier[arch_list] (). identifier[split] ( literal[string] )
keyword[if] identifier[as_list] :
keyword[return] identifier[archs]
keyword[return] identifier[set] ( identifier[archs] ) | def archs(self, as_list=False):
"""Return all of the architectures for this target.
Args:
as_list (bool): Return a list instead of the default set object.
Returns:
set or list: All of the architectures used in this TargetSettings object.
"""
archs = self.arch_list().split('/')
if as_list:
return archs # depends on [control=['if'], data=[]]
return set(archs) |
def ptb_raw_data(data_path):
"""Load PTB raw data from data directory "data_path".
Reads PTB text files, converts strings to integer ids,
and performs mini-batching of the inputs.
The PTB dataset comes from Tomas Mikolov's webpage:
http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
Args:
data_path: string path to the directory where simple-examples.tgz has
been extracted.
Returns:
tuple (train_data, valid_data, test_data, vocabulary)
where each of the data objects can be passed to PTBIterator.
"""
train_path = os.path.join(data_path, "ptb.train.txt")
valid_path = os.path.join(data_path, "ptb.valid.txt")
test_path = os.path.join(data_path, "ptb.test.txt")
word_to_id = _build_vocab(train_path)
train_data = _file_to_word_ids(train_path, word_to_id)
valid_data = _file_to_word_ids(valid_path, word_to_id)
test_data = _file_to_word_ids(test_path, word_to_id)
return train_data, valid_data, test_data, word_to_id | def function[ptb_raw_data, parameter[data_path]]:
constant[Load PTB raw data from data directory "data_path".
Reads PTB text files, converts strings to integer ids,
and performs mini-batching of the inputs.
The PTB dataset comes from Tomas Mikolov's webpage:
http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
Args:
data_path: string path to the directory where simple-examples.tgz has
been extracted.
Returns:
tuple (train_data, valid_data, test_data, vocabulary)
where each of the data objects can be passed to PTBIterator.
]
variable[train_path] assign[=] call[name[os].path.join, parameter[name[data_path], constant[ptb.train.txt]]]
variable[valid_path] assign[=] call[name[os].path.join, parameter[name[data_path], constant[ptb.valid.txt]]]
variable[test_path] assign[=] call[name[os].path.join, parameter[name[data_path], constant[ptb.test.txt]]]
variable[word_to_id] assign[=] call[name[_build_vocab], parameter[name[train_path]]]
variable[train_data] assign[=] call[name[_file_to_word_ids], parameter[name[train_path], name[word_to_id]]]
variable[valid_data] assign[=] call[name[_file_to_word_ids], parameter[name[valid_path], name[word_to_id]]]
variable[test_data] assign[=] call[name[_file_to_word_ids], parameter[name[test_path], name[word_to_id]]]
return[tuple[[<ast.Name object at 0x7da1b1cc8f10>, <ast.Name object at 0x7da1b1cc8f40>, <ast.Name object at 0x7da1b1cc8bb0>, <ast.Name object at 0x7da1b1cc8be0>]]] | keyword[def] identifier[ptb_raw_data] ( identifier[data_path] ):
literal[string]
identifier[train_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , literal[string] )
identifier[valid_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , literal[string] )
identifier[test_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , literal[string] )
identifier[word_to_id] = identifier[_build_vocab] ( identifier[train_path] )
identifier[train_data] = identifier[_file_to_word_ids] ( identifier[train_path] , identifier[word_to_id] )
identifier[valid_data] = identifier[_file_to_word_ids] ( identifier[valid_path] , identifier[word_to_id] )
identifier[test_data] = identifier[_file_to_word_ids] ( identifier[test_path] , identifier[word_to_id] )
keyword[return] identifier[train_data] , identifier[valid_data] , identifier[test_data] , identifier[word_to_id] | def ptb_raw_data(data_path):
"""Load PTB raw data from data directory "data_path".
Reads PTB text files, converts strings to integer ids,
and performs mini-batching of the inputs.
The PTB dataset comes from Tomas Mikolov's webpage:
http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
Args:
data_path: string path to the directory where simple-examples.tgz has
been extracted.
Returns:
tuple (train_data, valid_data, test_data, vocabulary)
where each of the data objects can be passed to PTBIterator.
"""
train_path = os.path.join(data_path, 'ptb.train.txt')
valid_path = os.path.join(data_path, 'ptb.valid.txt')
test_path = os.path.join(data_path, 'ptb.test.txt')
word_to_id = _build_vocab(train_path)
train_data = _file_to_word_ids(train_path, word_to_id)
valid_data = _file_to_word_ids(valid_path, word_to_id)
test_data = _file_to_word_ids(test_path, word_to_id)
return (train_data, valid_data, test_data, word_to_id) |
def p_function_expr_2(self, p):
"""
function_expr \
: FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE
| FUNCTION identifier LPAREN formal_parameter_list RPAREN \
LBRACE function_body RBRACE
"""
if len(p) == 8:
p[0] = self.asttypes.FuncExpr(
identifier=p[2], parameters=None, elements=p[6])
else:
p[0] = self.asttypes.FuncExpr(
identifier=p[2], parameters=p[4], elements=p[7])
p[0].setpos(p) | def function[p_function_expr_2, parameter[self, p]]:
constant[
function_expr : FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE
| FUNCTION identifier LPAREN formal_parameter_list RPAREN LBRACE function_body RBRACE
]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[8]] begin[:]
call[name[p]][constant[0]] assign[=] call[name[self].asttypes.FuncExpr, parameter[]]
call[call[name[p]][constant[0]].setpos, parameter[name[p]]] | keyword[def] identifier[p_function_expr_2] ( identifier[self] , identifier[p] ):
literal[string]
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]= identifier[self] . identifier[asttypes] . identifier[FuncExpr] (
identifier[identifier] = identifier[p] [ literal[int] ], identifier[parameters] = keyword[None] , identifier[elements] = identifier[p] [ literal[int] ])
keyword[else] :
identifier[p] [ literal[int] ]= identifier[self] . identifier[asttypes] . identifier[FuncExpr] (
identifier[identifier] = identifier[p] [ literal[int] ], identifier[parameters] = identifier[p] [ literal[int] ], identifier[elements] = identifier[p] [ literal[int] ])
identifier[p] [ literal[int] ]. identifier[setpos] ( identifier[p] ) | def p_function_expr_2(self, p):
"""
function_expr : FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE
| FUNCTION identifier LPAREN formal_parameter_list RPAREN LBRACE function_body RBRACE
"""
if len(p) == 8:
p[0] = self.asttypes.FuncExpr(identifier=p[2], parameters=None, elements=p[6]) # depends on [control=['if'], data=[]]
else:
p[0] = self.asttypes.FuncExpr(identifier=p[2], parameters=p[4], elements=p[7])
p[0].setpos(p) |
def register_variant(cls):
"""
Registers the RADIUS attributes defined in this module.
"""
if hasattr(cls, "val"):
cls.registered_attributes[cls.val] = cls
else:
cls.registered_attributes[cls.type.default] = cls | def function[register_variant, parameter[cls]]:
constant[
Registers the RADIUS attributes defined in this module.
]
if call[name[hasattr], parameter[name[cls], constant[val]]] begin[:]
call[name[cls].registered_attributes][name[cls].val] assign[=] name[cls] | keyword[def] identifier[register_variant] ( identifier[cls] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[cls] , literal[string] ):
identifier[cls] . identifier[registered_attributes] [ identifier[cls] . identifier[val] ]= identifier[cls]
keyword[else] :
identifier[cls] . identifier[registered_attributes] [ identifier[cls] . identifier[type] . identifier[default] ]= identifier[cls] | def register_variant(cls):
"""
Registers the RADIUS attributes defined in this module.
"""
if hasattr(cls, 'val'):
cls.registered_attributes[cls.val] = cls # depends on [control=['if'], data=[]]
else:
cls.registered_attributes[cls.type.default] = cls |
def persist(self: T, **kwargs) -> T:
""" Trigger computation, keeping data as dask arrays
This operation can be used to trigger computation on underlying dask
arrays, similar to ``.compute()``. However this operation keeps the
data as dask arrays. This is particularly useful when using the
dask.distributed scheduler and you want to load a large amount of data
into distributed memory.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.
See Also
--------
dask.persist
"""
new = self.copy(deep=False)
return new._persist_inplace(**kwargs) | def function[persist, parameter[self]]:
constant[ Trigger computation, keeping data as dask arrays
This operation can be used to trigger computation on underlying dask
arrays, similar to ``.compute()``. However this operation keeps the
data as dask arrays. This is particularly useful when using the
dask.distributed scheduler and you want to load a large amount of data
into distributed memory.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.
See Also
--------
dask.persist
]
variable[new] assign[=] call[name[self].copy, parameter[]]
return[call[name[new]._persist_inplace, parameter[]]] | keyword[def] identifier[persist] ( identifier[self] : identifier[T] ,** identifier[kwargs] )-> identifier[T] :
literal[string]
identifier[new] = identifier[self] . identifier[copy] ( identifier[deep] = keyword[False] )
keyword[return] identifier[new] . identifier[_persist_inplace] (** identifier[kwargs] ) | def persist(self: T, **kwargs) -> T:
""" Trigger computation, keeping data as dask arrays
This operation can be used to trigger computation on underlying dask
arrays, similar to ``.compute()``. However this operation keeps the
data as dask arrays. This is particularly useful when using the
dask.distributed scheduler and you want to load a large amount of data
into distributed memory.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.
See Also
--------
dask.persist
"""
new = self.copy(deep=False)
return new._persist_inplace(**kwargs) |
def _run_job(self, job_n, job):
"""
Runs a single job.
Args:
job_n: job number (1 index)
job: Custodian job
Raises:
ValidationError: if a job fails validation
ReturnCodeError: if the process has a return code different from 0
NonRecoverableError: if an unrecoverable occurs
MaxCorrectionsPerJobError: if max_errors_per_job is reached
MaxCorrectionsError: if max_errors is reached
MaxCorrectionsPerHandlerError: if max_errors_per_handler is reached
"""
self.run_log.append({"job": job.as_dict(), "corrections": [],
"handler": None, "validator": None,
"max_errors": False, "max_errors_per_job": False,
"max_errors_per_handler": False,
"nonzero_return_code": False})
self.errors_current_job = 0
# reset the counters of the number of times a correction has been
# applied for each handler
for h in self.handlers:
h.n_applied_corrections = 0
job.setup()
attempt = 0
while (self.total_errors < self.max_errors and
self.errors_current_job < self.max_errors_per_job):
attempt += 1
logger.info(
"Starting job no. {} ({}) attempt no. {}. Total errors and "
"errors in job thus far = {}, {}.".format(
job_n, job.name, attempt, self.total_errors,
self.errors_current_job))
p = job.run()
# Check for errors using the error handlers and perform
# corrections.
has_error = False
zero_return_code = True
# While the job is running, we use the handlers that are
# monitors to monitor the job.
if isinstance(p, subprocess.Popen):
if self.monitors:
n = 0
while True:
n += 1
time.sleep(self.polling_time_step)
if p.poll() is not None:
break
terminate = self.terminate_func or p.terminate
if n % self.monitor_freq == 0:
has_error = self._do_check(self.monitors,
terminate)
if terminate is not None and terminate != p.terminate:
time.sleep(self.polling_time_step)
else:
p.wait()
if self.terminate_func is not None and \
self.terminate_func != p.terminate:
self.terminate_func()
time.sleep(self.polling_time_step)
zero_return_code = p.returncode == 0
logger.info("{}.run has completed. "
"Checking remaining handlers".format(job.name))
# Check for errors again, since in some cases non-monitor
# handlers fix the problems detected by monitors
# if an error has been found, not all handlers need to run
if has_error:
self._do_check([h for h in self.handlers
if not h.is_monitor])
else:
has_error = self._do_check(self.handlers)
if has_error:
# This makes sure the job is killed cleanly for certain systems.
job.terminate()
# If there are no errors detected, perform
# postprocessing and exit.
if not has_error:
for v in self.validators:
if v.check():
self.run_log[-1]["validator"] = v
s = "Validation failed: {}".format(v)
raise ValidationError(s, True, v)
if not zero_return_code:
if self.terminate_on_nonzero_returncode:
self.run_log[-1]["nonzero_return_code"] = True
s = "Job return code is %d. Terminating..." % \
p.returncode
logger.info(s)
raise ReturnCodeError(s, True)
else:
warnings.warn("subprocess returned a non-zero return "
"code. Check outputs carefully...")
job.postprocess()
return
# Check that all errors could be handled
for x in self.run_log[-1]["corrections"]:
if not x["actions"] and x["handler"].raises_runtime_error:
self.run_log[-1]["handler"] = x["handler"]
s = "Unrecoverable error for handler: {}".format(x["handler"])
raise NonRecoverableError(s, True, x["handler"])
for x in self.run_log[-1]["corrections"]:
if not x["actions"]:
self.run_log[-1]["handler"] = x["handler"]
s = "Unrecoverable error for handler: %s" % x["handler"]
raise NonRecoverableError(s, False, x["handler"])
if self.errors_current_job >= self.max_errors_per_job:
self.run_log[-1]["max_errors_per_job"] = True
msg = "Max errors per job reached: {}.".format(self.max_errors_per_job)
logger.info(msg)
raise MaxCorrectionsPerJobError(msg, True, self.max_errors_per_job, job)
else:
self.run_log[-1]["max_errors"] = True
msg = "Max errors reached: {}.".format(self.max_errors)
logger.info(msg)
raise MaxCorrectionsError(msg, True, self.max_errors) | def function[_run_job, parameter[self, job_n, job]]:
constant[
Runs a single job.
Args:
job_n: job number (1 index)
job: Custodian job
Raises:
ValidationError: if a job fails validation
ReturnCodeError: if the process has a return code different from 0
NonRecoverableError: if an unrecoverable occurs
MaxCorrectionsPerJobError: if max_errors_per_job is reached
MaxCorrectionsError: if max_errors is reached
MaxCorrectionsPerHandlerError: if max_errors_per_handler is reached
]
call[name[self].run_log.append, parameter[dictionary[[<ast.Constant object at 0x7da18eb562c0>, <ast.Constant object at 0x7da18eb54850>, <ast.Constant object at 0x7da18eb57fa0>, <ast.Constant object at 0x7da18eb545e0>, <ast.Constant object at 0x7da18eb54070>, <ast.Constant object at 0x7da18eb543a0>, <ast.Constant object at 0x7da18eb54d00>, <ast.Constant object at 0x7da18eb579d0>], [<ast.Call object at 0x7da18eb55510>, <ast.List object at 0x7da18eb547c0>, <ast.Constant object at 0x7da18eb56830>, <ast.Constant object at 0x7da18eb56ec0>, <ast.Constant object at 0x7da18eb57670>, <ast.Constant object at 0x7da18eb57d60>, <ast.Constant object at 0x7da18eb54af0>, <ast.Constant object at 0x7da18eb56740>]]]]
name[self].errors_current_job assign[=] constant[0]
for taget[name[h]] in starred[name[self].handlers] begin[:]
name[h].n_applied_corrections assign[=] constant[0]
call[name[job].setup, parameter[]]
variable[attempt] assign[=] constant[0]
while <ast.BoolOp object at 0x7da18eb546d0> begin[:]
<ast.AugAssign object at 0x7da18eb54fa0>
call[name[logger].info, parameter[call[constant[Starting job no. {} ({}) attempt no. {}. Total errors and errors in job thus far = {}, {}.].format, parameter[name[job_n], name[job].name, name[attempt], name[self].total_errors, name[self].errors_current_job]]]]
variable[p] assign[=] call[name[job].run, parameter[]]
variable[has_error] assign[=] constant[False]
variable[zero_return_code] assign[=] constant[True]
if call[name[isinstance], parameter[name[p], name[subprocess].Popen]] begin[:]
if name[self].monitors begin[:]
variable[n] assign[=] constant[0]
while constant[True] begin[:]
<ast.AugAssign object at 0x7da18eb56f50>
call[name[time].sleep, parameter[name[self].polling_time_step]]
if compare[call[name[p].poll, parameter[]] is_not constant[None]] begin[:]
break
variable[terminate] assign[=] <ast.BoolOp object at 0x7da18eb54940>
if compare[binary_operation[name[n] <ast.Mod object at 0x7da2590d6920> name[self].monitor_freq] equal[==] constant[0]] begin[:]
variable[has_error] assign[=] call[name[self]._do_check, parameter[name[self].monitors, name[terminate]]]
if <ast.BoolOp object at 0x7da18eb54460> begin[:]
call[name[time].sleep, parameter[name[self].polling_time_step]]
variable[zero_return_code] assign[=] compare[name[p].returncode equal[==] constant[0]]
call[name[logger].info, parameter[call[constant[{}.run has completed. Checking remaining handlers].format, parameter[name[job].name]]]]
if name[has_error] begin[:]
call[name[self]._do_check, parameter[<ast.ListComp object at 0x7da18eb54a00>]]
if name[has_error] begin[:]
call[name[job].terminate, parameter[]]
if <ast.UnaryOp object at 0x7da204961d20> begin[:]
for taget[name[v]] in starred[name[self].validators] begin[:]
if call[name[v].check, parameter[]] begin[:]
call[call[name[self].run_log][<ast.UnaryOp object at 0x7da204962e60>]][constant[validator]] assign[=] name[v]
variable[s] assign[=] call[constant[Validation failed: {}].format, parameter[name[v]]]
<ast.Raise object at 0x7da2049604f0>
if <ast.UnaryOp object at 0x7da204961450> begin[:]
if name[self].terminate_on_nonzero_returncode begin[:]
call[call[name[self].run_log][<ast.UnaryOp object at 0x7da1b0578f10>]][constant[nonzero_return_code]] assign[=] constant[True]
variable[s] assign[=] binary_operation[constant[Job return code is %d. Terminating...] <ast.Mod object at 0x7da2590d6920> name[p].returncode]
call[name[logger].info, parameter[name[s]]]
<ast.Raise object at 0x7da1b057ab90>
call[name[job].postprocess, parameter[]]
return[None]
for taget[name[x]] in starred[call[call[name[self].run_log][<ast.UnaryOp object at 0x7da1b057aec0>]][constant[corrections]]] begin[:]
if <ast.BoolOp object at 0x7da1b057ab60> begin[:]
call[call[name[self].run_log][<ast.UnaryOp object at 0x7da18bc730a0>]][constant[handler]] assign[=] call[name[x]][constant[handler]]
variable[s] assign[=] call[constant[Unrecoverable error for handler: {}].format, parameter[call[name[x]][constant[handler]]]]
<ast.Raise object at 0x7da18bc71e70>
for taget[name[x]] in starred[call[call[name[self].run_log][<ast.UnaryOp object at 0x7da18bc73940>]][constant[corrections]]] begin[:]
if <ast.UnaryOp object at 0x7da18bc72710> begin[:]
call[call[name[self].run_log][<ast.UnaryOp object at 0x7da18bc737f0>]][constant[handler]] assign[=] call[name[x]][constant[handler]]
variable[s] assign[=] binary_operation[constant[Unrecoverable error for handler: %s] <ast.Mod object at 0x7da2590d6920> call[name[x]][constant[handler]]]
<ast.Raise object at 0x7da1b056c430>
if compare[name[self].errors_current_job greater_or_equal[>=] name[self].max_errors_per_job] begin[:]
call[call[name[self].run_log][<ast.UnaryOp object at 0x7da1b056d1e0>]][constant[max_errors_per_job]] assign[=] constant[True]
variable[msg] assign[=] call[constant[Max errors per job reached: {}.].format, parameter[name[self].max_errors_per_job]]
call[name[logger].info, parameter[name[msg]]]
<ast.Raise object at 0x7da1b056c2e0> | keyword[def] identifier[_run_job] ( identifier[self] , identifier[job_n] , identifier[job] ):
literal[string]
identifier[self] . identifier[run_log] . identifier[append] ({ literal[string] : identifier[job] . identifier[as_dict] (), literal[string] :[],
literal[string] : keyword[None] , literal[string] : keyword[None] ,
literal[string] : keyword[False] , literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : keyword[False] })
identifier[self] . identifier[errors_current_job] = literal[int]
keyword[for] identifier[h] keyword[in] identifier[self] . identifier[handlers] :
identifier[h] . identifier[n_applied_corrections] = literal[int]
identifier[job] . identifier[setup] ()
identifier[attempt] = literal[int]
keyword[while] ( identifier[self] . identifier[total_errors] < identifier[self] . identifier[max_errors] keyword[and]
identifier[self] . identifier[errors_current_job] < identifier[self] . identifier[max_errors_per_job] ):
identifier[attempt] += literal[int]
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] (
identifier[job_n] , identifier[job] . identifier[name] , identifier[attempt] , identifier[self] . identifier[total_errors] ,
identifier[self] . identifier[errors_current_job] ))
identifier[p] = identifier[job] . identifier[run] ()
identifier[has_error] = keyword[False]
identifier[zero_return_code] = keyword[True]
keyword[if] identifier[isinstance] ( identifier[p] , identifier[subprocess] . identifier[Popen] ):
keyword[if] identifier[self] . identifier[monitors] :
identifier[n] = literal[int]
keyword[while] keyword[True] :
identifier[n] += literal[int]
identifier[time] . identifier[sleep] ( identifier[self] . identifier[polling_time_step] )
keyword[if] identifier[p] . identifier[poll] () keyword[is] keyword[not] keyword[None] :
keyword[break]
identifier[terminate] = identifier[self] . identifier[terminate_func] keyword[or] identifier[p] . identifier[terminate]
keyword[if] identifier[n] % identifier[self] . identifier[monitor_freq] == literal[int] :
identifier[has_error] = identifier[self] . identifier[_do_check] ( identifier[self] . identifier[monitors] ,
identifier[terminate] )
keyword[if] identifier[terminate] keyword[is] keyword[not] keyword[None] keyword[and] identifier[terminate] != identifier[p] . identifier[terminate] :
identifier[time] . identifier[sleep] ( identifier[self] . identifier[polling_time_step] )
keyword[else] :
identifier[p] . identifier[wait] ()
keyword[if] identifier[self] . identifier[terminate_func] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[terminate_func] != identifier[p] . identifier[terminate] :
identifier[self] . identifier[terminate_func] ()
identifier[time] . identifier[sleep] ( identifier[self] . identifier[polling_time_step] )
identifier[zero_return_code] = identifier[p] . identifier[returncode] == literal[int]
identifier[logger] . identifier[info] ( literal[string]
literal[string] . identifier[format] ( identifier[job] . identifier[name] ))
keyword[if] identifier[has_error] :
identifier[self] . identifier[_do_check] ([ identifier[h] keyword[for] identifier[h] keyword[in] identifier[self] . identifier[handlers]
keyword[if] keyword[not] identifier[h] . identifier[is_monitor] ])
keyword[else] :
identifier[has_error] = identifier[self] . identifier[_do_check] ( identifier[self] . identifier[handlers] )
keyword[if] identifier[has_error] :
identifier[job] . identifier[terminate] ()
keyword[if] keyword[not] identifier[has_error] :
keyword[for] identifier[v] keyword[in] identifier[self] . identifier[validators] :
keyword[if] identifier[v] . identifier[check] ():
identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]= identifier[v]
identifier[s] = literal[string] . identifier[format] ( identifier[v] )
keyword[raise] identifier[ValidationError] ( identifier[s] , keyword[True] , identifier[v] )
keyword[if] keyword[not] identifier[zero_return_code] :
keyword[if] identifier[self] . identifier[terminate_on_nonzero_returncode] :
identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]= keyword[True]
identifier[s] = literal[string] % identifier[p] . identifier[returncode]
identifier[logger] . identifier[info] ( identifier[s] )
keyword[raise] identifier[ReturnCodeError] ( identifier[s] , keyword[True] )
keyword[else] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] )
identifier[job] . identifier[postprocess] ()
keyword[return]
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]:
keyword[if] keyword[not] identifier[x] [ literal[string] ] keyword[and] identifier[x] [ literal[string] ]. identifier[raises_runtime_error] :
identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]= identifier[x] [ literal[string] ]
identifier[s] = literal[string] . identifier[format] ( identifier[x] [ literal[string] ])
keyword[raise] identifier[NonRecoverableError] ( identifier[s] , keyword[True] , identifier[x] [ literal[string] ])
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]:
keyword[if] keyword[not] identifier[x] [ literal[string] ]:
identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]= identifier[x] [ literal[string] ]
identifier[s] = literal[string] % identifier[x] [ literal[string] ]
keyword[raise] identifier[NonRecoverableError] ( identifier[s] , keyword[False] , identifier[x] [ literal[string] ])
keyword[if] identifier[self] . identifier[errors_current_job] >= identifier[self] . identifier[max_errors_per_job] :
identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]= keyword[True]
identifier[msg] = literal[string] . identifier[format] ( identifier[self] . identifier[max_errors_per_job] )
identifier[logger] . identifier[info] ( identifier[msg] )
keyword[raise] identifier[MaxCorrectionsPerJobError] ( identifier[msg] , keyword[True] , identifier[self] . identifier[max_errors_per_job] , identifier[job] )
keyword[else] :
identifier[self] . identifier[run_log] [- literal[int] ][ literal[string] ]= keyword[True]
identifier[msg] = literal[string] . identifier[format] ( identifier[self] . identifier[max_errors] )
identifier[logger] . identifier[info] ( identifier[msg] )
keyword[raise] identifier[MaxCorrectionsError] ( identifier[msg] , keyword[True] , identifier[self] . identifier[max_errors] ) | def _run_job(self, job_n, job):
"""
Runs a single job.
Args:
job_n: job number (1 index)
job: Custodian job
Raises:
ValidationError: if a job fails validation
ReturnCodeError: if the process has a return code different from 0
NonRecoverableError: if an unrecoverable occurs
MaxCorrectionsPerJobError: if max_errors_per_job is reached
MaxCorrectionsError: if max_errors is reached
MaxCorrectionsPerHandlerError: if max_errors_per_handler is reached
"""
self.run_log.append({'job': job.as_dict(), 'corrections': [], 'handler': None, 'validator': None, 'max_errors': False, 'max_errors_per_job': False, 'max_errors_per_handler': False, 'nonzero_return_code': False})
self.errors_current_job = 0
# reset the counters of the number of times a correction has been
# applied for each handler
for h in self.handlers:
h.n_applied_corrections = 0 # depends on [control=['for'], data=['h']]
job.setup()
attempt = 0
while self.total_errors < self.max_errors and self.errors_current_job < self.max_errors_per_job:
attempt += 1
logger.info('Starting job no. {} ({}) attempt no. {}. Total errors and errors in job thus far = {}, {}.'.format(job_n, job.name, attempt, self.total_errors, self.errors_current_job))
p = job.run()
# Check for errors using the error handlers and perform
# corrections.
has_error = False
zero_return_code = True
# While the job is running, we use the handlers that are
# monitors to monitor the job.
if isinstance(p, subprocess.Popen):
if self.monitors:
n = 0
while True:
n += 1
time.sleep(self.polling_time_step)
if p.poll() is not None:
break # depends on [control=['if'], data=[]]
terminate = self.terminate_func or p.terminate
if n % self.monitor_freq == 0:
has_error = self._do_check(self.monitors, terminate) # depends on [control=['if'], data=[]]
if terminate is not None and terminate != p.terminate:
time.sleep(self.polling_time_step) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else:
p.wait()
if self.terminate_func is not None and self.terminate_func != p.terminate:
self.terminate_func()
time.sleep(self.polling_time_step) # depends on [control=['if'], data=[]]
zero_return_code = p.returncode == 0 # depends on [control=['if'], data=[]]
logger.info('{}.run has completed. Checking remaining handlers'.format(job.name))
# Check for errors again, since in some cases non-monitor
# handlers fix the problems detected by monitors
# if an error has been found, not all handlers need to run
if has_error:
self._do_check([h for h in self.handlers if not h.is_monitor]) # depends on [control=['if'], data=[]]
else:
has_error = self._do_check(self.handlers)
if has_error:
# This makes sure the job is killed cleanly for certain systems.
job.terminate() # depends on [control=['if'], data=[]]
# If there are no errors detected, perform
# postprocessing and exit.
if not has_error:
for v in self.validators:
if v.check():
self.run_log[-1]['validator'] = v
s = 'Validation failed: {}'.format(v)
raise ValidationError(s, True, v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']]
if not zero_return_code:
if self.terminate_on_nonzero_returncode:
self.run_log[-1]['nonzero_return_code'] = True
s = 'Job return code is %d. Terminating...' % p.returncode
logger.info(s)
raise ReturnCodeError(s, True) # depends on [control=['if'], data=[]]
else:
warnings.warn('subprocess returned a non-zero return code. Check outputs carefully...') # depends on [control=['if'], data=[]]
job.postprocess()
return # depends on [control=['if'], data=[]]
# Check that all errors could be handled
for x in self.run_log[-1]['corrections']:
if not x['actions'] and x['handler'].raises_runtime_error:
self.run_log[-1]['handler'] = x['handler']
s = 'Unrecoverable error for handler: {}'.format(x['handler'])
raise NonRecoverableError(s, True, x['handler']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
for x in self.run_log[-1]['corrections']:
if not x['actions']:
self.run_log[-1]['handler'] = x['handler']
s = 'Unrecoverable error for handler: %s' % x['handler']
raise NonRecoverableError(s, False, x['handler']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] # depends on [control=['while'], data=[]]
if self.errors_current_job >= self.max_errors_per_job:
self.run_log[-1]['max_errors_per_job'] = True
msg = 'Max errors per job reached: {}.'.format(self.max_errors_per_job)
logger.info(msg)
raise MaxCorrectionsPerJobError(msg, True, self.max_errors_per_job, job) # depends on [control=['if'], data=[]]
else:
self.run_log[-1]['max_errors'] = True
msg = 'Max errors reached: {}.'.format(self.max_errors)
logger.info(msg)
raise MaxCorrectionsError(msg, True, self.max_errors) |
def check_for_blob(self, container_name, blob_name, **kwargs):
"""
Check if a blob exists on Azure Blob Storage.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.exists()` takes.
:type kwargs: object
:return: True if the blob exists, False otherwise.
:rtype: bool
"""
return self.connection.exists(container_name, blob_name, **kwargs) | def function[check_for_blob, parameter[self, container_name, blob_name]]:
constant[
Check if a blob exists on Azure Blob Storage.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.exists()` takes.
:type kwargs: object
:return: True if the blob exists, False otherwise.
:rtype: bool
]
return[call[name[self].connection.exists, parameter[name[container_name], name[blob_name]]]] | keyword[def] identifier[check_for_blob] ( identifier[self] , identifier[container_name] , identifier[blob_name] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[connection] . identifier[exists] ( identifier[container_name] , identifier[blob_name] ,** identifier[kwargs] ) | def check_for_blob(self, container_name, blob_name, **kwargs):
"""
Check if a blob exists on Azure Blob Storage.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.exists()` takes.
:type kwargs: object
:return: True if the blob exists, False otherwise.
:rtype: bool
"""
return self.connection.exists(container_name, blob_name, **kwargs) |
def GradientFilters(ndim, axes, axshp, dtype=None):
r"""
Construct a set of filters for computing gradients in the frequency
domain.
Parameters
----------
ndim : integer
Total number of dimensions in array in which gradients are to be
computed
axes : tuple of integers
Axes on which gradients are to be computed
axshp : tuple of integers
Shape of axes on which gradients are to be computed
dtype : dtype
Data type of output arrays
Returns
-------
Gf : ndarray
Frequency domain gradient operators :math:`\hat{G}_i`
GHGf : ndarray
Sum of products :math:`\sum_i \hat{G}_i^H \hat{G}_i`
"""
if dtype is None:
dtype = np.float32
g = np.zeros([2 if k in axes else 1 for k in range(ndim)] +
[len(axes),], dtype)
for k in axes:
g[(0,) * k + (slice(None),) + (0,) * (g.ndim - 2 - k) + (k,)] = \
np.array([1, -1])
Gf = rfftn(g, axshp, axes=axes)
GHGf = np.sum(np.conj(Gf) * Gf, axis=-1).real
return Gf, GHGf | def function[GradientFilters, parameter[ndim, axes, axshp, dtype]]:
constant[
Construct a set of filters for computing gradients in the frequency
domain.
Parameters
----------
ndim : integer
Total number of dimensions in array in which gradients are to be
computed
axes : tuple of integers
Axes on which gradients are to be computed
axshp : tuple of integers
Shape of axes on which gradients are to be computed
dtype : dtype
Data type of output arrays
Returns
-------
Gf : ndarray
Frequency domain gradient operators :math:`\hat{G}_i`
GHGf : ndarray
Sum of products :math:`\sum_i \hat{G}_i^H \hat{G}_i`
]
if compare[name[dtype] is constant[None]] begin[:]
variable[dtype] assign[=] name[np].float32
variable[g] assign[=] call[name[np].zeros, parameter[binary_operation[<ast.ListComp object at 0x7da1b074bc70> + list[[<ast.Call object at 0x7da1b0749a80>]]], name[dtype]]]
for taget[name[k]] in starred[name[axes]] begin[:]
call[name[g]][binary_operation[binary_operation[binary_operation[binary_operation[tuple[[<ast.Constant object at 0x7da1b074b5b0>]] * name[k]] + tuple[[<ast.Call object at 0x7da1b074ad10>]]] + binary_operation[tuple[[<ast.Constant object at 0x7da1b074abf0>]] * binary_operation[binary_operation[name[g].ndim - constant[2]] - name[k]]]] + tuple[[<ast.Name object at 0x7da1b074bf40>]]]] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b074aec0>, <ast.UnaryOp object at 0x7da1b07481f0>]]]]
variable[Gf] assign[=] call[name[rfftn], parameter[name[g], name[axshp]]]
variable[GHGf] assign[=] call[name[np].sum, parameter[binary_operation[call[name[np].conj, parameter[name[Gf]]] * name[Gf]]]].real
return[tuple[[<ast.Name object at 0x7da1b07498d0>, <ast.Name object at 0x7da1b074a890>]]] | keyword[def] identifier[GradientFilters] ( identifier[ndim] , identifier[axes] , identifier[axshp] , identifier[dtype] = keyword[None] ):
literal[string]
keyword[if] identifier[dtype] keyword[is] keyword[None] :
identifier[dtype] = identifier[np] . identifier[float32]
identifier[g] = identifier[np] . identifier[zeros] ([ literal[int] keyword[if] identifier[k] keyword[in] identifier[axes] keyword[else] literal[int] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[ndim] )]+
[ identifier[len] ( identifier[axes] ),], identifier[dtype] )
keyword[for] identifier[k] keyword[in] identifier[axes] :
identifier[g] [( literal[int] ,)* identifier[k] +( identifier[slice] ( keyword[None] ),)+( literal[int] ,)*( identifier[g] . identifier[ndim] - literal[int] - identifier[k] )+( identifier[k] ,)]= identifier[np] . identifier[array] ([ literal[int] ,- literal[int] ])
identifier[Gf] = identifier[rfftn] ( identifier[g] , identifier[axshp] , identifier[axes] = identifier[axes] )
identifier[GHGf] = identifier[np] . identifier[sum] ( identifier[np] . identifier[conj] ( identifier[Gf] )* identifier[Gf] , identifier[axis] =- literal[int] ). identifier[real]
keyword[return] identifier[Gf] , identifier[GHGf] | def GradientFilters(ndim, axes, axshp, dtype=None):
"""
Construct a set of filters for computing gradients in the frequency
domain.
Parameters
----------
ndim : integer
Total number of dimensions in array in which gradients are to be
computed
axes : tuple of integers
Axes on which gradients are to be computed
axshp : tuple of integers
Shape of axes on which gradients are to be computed
dtype : dtype
Data type of output arrays
Returns
-------
Gf : ndarray
Frequency domain gradient operators :math:`\\hat{G}_i`
GHGf : ndarray
Sum of products :math:`\\sum_i \\hat{G}_i^H \\hat{G}_i`
"""
if dtype is None:
dtype = np.float32 # depends on [control=['if'], data=['dtype']]
g = np.zeros([2 if k in axes else 1 for k in range(ndim)] + [len(axes)], dtype)
for k in axes:
g[(0,) * k + (slice(None),) + (0,) * (g.ndim - 2 - k) + (k,)] = np.array([1, -1]) # depends on [control=['for'], data=['k']]
Gf = rfftn(g, axshp, axes=axes)
GHGf = np.sum(np.conj(Gf) * Gf, axis=-1).real
return (Gf, GHGf) |
def ticket_metric_show(self, ticket_metric_id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/ticket_metrics#show-ticket-metrics"
api_path = "/api/v2/ticket_metrics/{ticket_metric_id}.json"
api_path = api_path.format(ticket_metric_id=ticket_metric_id)
return self.call(api_path, **kwargs) | def function[ticket_metric_show, parameter[self, ticket_metric_id]]:
constant[https://developer.zendesk.com/rest_api/docs/core/ticket_metrics#show-ticket-metrics]
variable[api_path] assign[=] constant[/api/v2/ticket_metrics/{ticket_metric_id}.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[ticket_metric_show] ( identifier[self] , identifier[ticket_metric_id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[ticket_metric_id] = identifier[ticket_metric_id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def ticket_metric_show(self, ticket_metric_id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/ticket_metrics#show-ticket-metrics"""
api_path = '/api/v2/ticket_metrics/{ticket_metric_id}.json'
api_path = api_path.format(ticket_metric_id=ticket_metric_id)
return self.call(api_path, **kwargs) |
def parse_insertion(insertion, gff):
"""
parse insertion to gff format
"""
offset = insertion['offset']
for ins in parse_fasta(insertion['insertion sequence'].split('|')):
strand = insertion['strand']
ID = ins[0].split('>')[1].split()[0]
Start, End = [int(i) for i in ins[0].split('gene-pos=', 1)[1].split()[0].split('-')]
Start, End = abs(Start + offset), abs(End + offset)
if strand == '-':
Start, End = End, Start
gff['#seqname'].append(insertion['ID'])
gff['source'].append(insertion['source'])
gff['feature'].append('IVS')
gff['start'].append(Start)
gff['end'].append(End)
gff['score'].append('.')
gff['strand'].append(strand) # same as rRNA
gff['frame'].append('.')
gff['attribute'].append('ID=%s' % (ID))
return gff | def function[parse_insertion, parameter[insertion, gff]]:
constant[
parse insertion to gff format
]
variable[offset] assign[=] call[name[insertion]][constant[offset]]
for taget[name[ins]] in starred[call[name[parse_fasta], parameter[call[call[name[insertion]][constant[insertion sequence]].split, parameter[constant[|]]]]]] begin[:]
variable[strand] assign[=] call[name[insertion]][constant[strand]]
variable[ID] assign[=] call[call[call[call[call[name[ins]][constant[0]].split, parameter[constant[>]]]][constant[1]].split, parameter[]]][constant[0]]
<ast.Tuple object at 0x7da20c990cd0> assign[=] <ast.ListComp object at 0x7da20c991060>
<ast.Tuple object at 0x7da20c990ee0> assign[=] tuple[[<ast.Call object at 0x7da20c991840>, <ast.Call object at 0x7da20c993130>]]
if compare[name[strand] equal[==] constant[-]] begin[:]
<ast.Tuple object at 0x7da20c992500> assign[=] tuple[[<ast.Name object at 0x7da20c992c80>, <ast.Name object at 0x7da20c990820>]]
call[call[name[gff]][constant[#seqname]].append, parameter[call[name[insertion]][constant[ID]]]]
call[call[name[gff]][constant[source]].append, parameter[call[name[insertion]][constant[source]]]]
call[call[name[gff]][constant[feature]].append, parameter[constant[IVS]]]
call[call[name[gff]][constant[start]].append, parameter[name[Start]]]
call[call[name[gff]][constant[end]].append, parameter[name[End]]]
call[call[name[gff]][constant[score]].append, parameter[constant[.]]]
call[call[name[gff]][constant[strand]].append, parameter[name[strand]]]
call[call[name[gff]][constant[frame]].append, parameter[constant[.]]]
call[call[name[gff]][constant[attribute]].append, parameter[binary_operation[constant[ID=%s] <ast.Mod object at 0x7da2590d6920> name[ID]]]]
return[name[gff]] | keyword[def] identifier[parse_insertion] ( identifier[insertion] , identifier[gff] ):
literal[string]
identifier[offset] = identifier[insertion] [ literal[string] ]
keyword[for] identifier[ins] keyword[in] identifier[parse_fasta] ( identifier[insertion] [ literal[string] ]. identifier[split] ( literal[string] )):
identifier[strand] = identifier[insertion] [ literal[string] ]
identifier[ID] = identifier[ins] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ()[ literal[int] ]
identifier[Start] , identifier[End] =[ identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[ins] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[split] ()[ literal[int] ]. identifier[split] ( literal[string] )]
identifier[Start] , identifier[End] = identifier[abs] ( identifier[Start] + identifier[offset] ), identifier[abs] ( identifier[End] + identifier[offset] )
keyword[if] identifier[strand] == literal[string] :
identifier[Start] , identifier[End] = identifier[End] , identifier[Start]
identifier[gff] [ literal[string] ]. identifier[append] ( identifier[insertion] [ literal[string] ])
identifier[gff] [ literal[string] ]. identifier[append] ( identifier[insertion] [ literal[string] ])
identifier[gff] [ literal[string] ]. identifier[append] ( literal[string] )
identifier[gff] [ literal[string] ]. identifier[append] ( identifier[Start] )
identifier[gff] [ literal[string] ]. identifier[append] ( identifier[End] )
identifier[gff] [ literal[string] ]. identifier[append] ( literal[string] )
identifier[gff] [ literal[string] ]. identifier[append] ( identifier[strand] )
identifier[gff] [ literal[string] ]. identifier[append] ( literal[string] )
identifier[gff] [ literal[string] ]. identifier[append] ( literal[string] %( identifier[ID] ))
keyword[return] identifier[gff] | def parse_insertion(insertion, gff):
"""
parse insertion to gff format
"""
offset = insertion['offset']
for ins in parse_fasta(insertion['insertion sequence'].split('|')):
strand = insertion['strand']
ID = ins[0].split('>')[1].split()[0]
(Start, End) = [int(i) for i in ins[0].split('gene-pos=', 1)[1].split()[0].split('-')]
(Start, End) = (abs(Start + offset), abs(End + offset))
if strand == '-':
(Start, End) = (End, Start) # depends on [control=['if'], data=[]]
gff['#seqname'].append(insertion['ID'])
gff['source'].append(insertion['source'])
gff['feature'].append('IVS')
gff['start'].append(Start)
gff['end'].append(End)
gff['score'].append('.')
gff['strand'].append(strand) # same as rRNA
gff['frame'].append('.')
gff['attribute'].append('ID=%s' % ID) # depends on [control=['for'], data=['ins']]
return gff |
def encode(self, x, layer=None, sample=False, **kwargs):
'''Encode a dataset using the hidden layer activations of our network.
Parameters
----------
x : ndarray
A dataset to encode. Rows of this dataset capture individual data
points, while columns represent the variables in each data point.
layer : str, optional
The name of the hidden layer output to use. By default, we use
the "middle" hidden layer---for example, for a 4,2,4 or 4,3,2,3,4
autoencoder, we use the layer with size 2.
sample : bool, optional
If True, then draw a sample using the hidden activations as
independent Bernoulli probabilities for the encoded data. This
assumes the hidden layer has a logistic sigmoid activation function.
Returns
-------
ndarray :
The given dataset, encoded by the appropriate hidden layer
activation.
'''
enc = self.feed_forward(x, **kwargs)[self._find_output(layer)]
if sample:
return np.random.binomial(n=1, p=enc).astype(np.uint8)
return enc | def function[encode, parameter[self, x, layer, sample]]:
constant[Encode a dataset using the hidden layer activations of our network.
Parameters
----------
x : ndarray
A dataset to encode. Rows of this dataset capture individual data
points, while columns represent the variables in each data point.
layer : str, optional
The name of the hidden layer output to use. By default, we use
the "middle" hidden layer---for example, for a 4,2,4 or 4,3,2,3,4
autoencoder, we use the layer with size 2.
sample : bool, optional
If True, then draw a sample using the hidden activations as
independent Bernoulli probabilities for the encoded data. This
assumes the hidden layer has a logistic sigmoid activation function.
Returns
-------
ndarray :
The given dataset, encoded by the appropriate hidden layer
activation.
]
variable[enc] assign[=] call[call[name[self].feed_forward, parameter[name[x]]]][call[name[self]._find_output, parameter[name[layer]]]]
if name[sample] begin[:]
return[call[call[name[np].random.binomial, parameter[]].astype, parameter[name[np].uint8]]]
return[name[enc]] | keyword[def] identifier[encode] ( identifier[self] , identifier[x] , identifier[layer] = keyword[None] , identifier[sample] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[enc] = identifier[self] . identifier[feed_forward] ( identifier[x] ,** identifier[kwargs] )[ identifier[self] . identifier[_find_output] ( identifier[layer] )]
keyword[if] identifier[sample] :
keyword[return] identifier[np] . identifier[random] . identifier[binomial] ( identifier[n] = literal[int] , identifier[p] = identifier[enc] ). identifier[astype] ( identifier[np] . identifier[uint8] )
keyword[return] identifier[enc] | def encode(self, x, layer=None, sample=False, **kwargs):
"""Encode a dataset using the hidden layer activations of our network.
Parameters
----------
x : ndarray
A dataset to encode. Rows of this dataset capture individual data
points, while columns represent the variables in each data point.
layer : str, optional
The name of the hidden layer output to use. By default, we use
the "middle" hidden layer---for example, for a 4,2,4 or 4,3,2,3,4
autoencoder, we use the layer with size 2.
sample : bool, optional
If True, then draw a sample using the hidden activations as
independent Bernoulli probabilities for the encoded data. This
assumes the hidden layer has a logistic sigmoid activation function.
Returns
-------
ndarray :
The given dataset, encoded by the appropriate hidden layer
activation.
"""
enc = self.feed_forward(x, **kwargs)[self._find_output(layer)]
if sample:
return np.random.binomial(n=1, p=enc).astype(np.uint8) # depends on [control=['if'], data=[]]
return enc |
def run_step0(self):
"""
For the 0th step, store the data and stream data
Returns
-------
None
"""
dae = self.system.dae
system = self.system
self.inc = zeros(dae.m + dae.n, 1)
system.varout.store(self.t, self.step)
self.streaming_step() | def function[run_step0, parameter[self]]:
constant[
For the 0th step, store the data and stream data
Returns
-------
None
]
variable[dae] assign[=] name[self].system.dae
variable[system] assign[=] name[self].system
name[self].inc assign[=] call[name[zeros], parameter[binary_operation[name[dae].m + name[dae].n], constant[1]]]
call[name[system].varout.store, parameter[name[self].t, name[self].step]]
call[name[self].streaming_step, parameter[]] | keyword[def] identifier[run_step0] ( identifier[self] ):
literal[string]
identifier[dae] = identifier[self] . identifier[system] . identifier[dae]
identifier[system] = identifier[self] . identifier[system]
identifier[self] . identifier[inc] = identifier[zeros] ( identifier[dae] . identifier[m] + identifier[dae] . identifier[n] , literal[int] )
identifier[system] . identifier[varout] . identifier[store] ( identifier[self] . identifier[t] , identifier[self] . identifier[step] )
identifier[self] . identifier[streaming_step] () | def run_step0(self):
"""
For the 0th step, store the data and stream data
Returns
-------
None
"""
dae = self.system.dae
system = self.system
self.inc = zeros(dae.m + dae.n, 1)
system.varout.store(self.t, self.step)
self.streaming_step() |
def get_image(image_id, profile, **libcloud_kwargs):
'''
Get an image of a node
:param image_id: Image to fetch
:type image_id: ``str``
:param profile: The profile key
:type profile: ``str``
:param libcloud_kwargs: Extra arguments for the driver's delete_image method
:type libcloud_kwargs: ``dict``
CLI Example:
.. code-block:: bash
salt myminion libcloud_compute.get_image image1 profile1
'''
conn = _get_driver(profile=profile)
libcloud_kwargs = salt.utils.args.clean_kwargs(**libcloud_kwargs)
image = conn.get_image(image_id, **libcloud_kwargs)
return _simple_image(image) | def function[get_image, parameter[image_id, profile]]:
constant[
Get an image of a node
:param image_id: Image to fetch
:type image_id: ``str``
:param profile: The profile key
:type profile: ``str``
:param libcloud_kwargs: Extra arguments for the driver's delete_image method
:type libcloud_kwargs: ``dict``
CLI Example:
.. code-block:: bash
salt myminion libcloud_compute.get_image image1 profile1
]
variable[conn] assign[=] call[name[_get_driver], parameter[]]
variable[libcloud_kwargs] assign[=] call[name[salt].utils.args.clean_kwargs, parameter[]]
variable[image] assign[=] call[name[conn].get_image, parameter[name[image_id]]]
return[call[name[_simple_image], parameter[name[image]]]] | keyword[def] identifier[get_image] ( identifier[image_id] , identifier[profile] ,** identifier[libcloud_kwargs] ):
literal[string]
identifier[conn] = identifier[_get_driver] ( identifier[profile] = identifier[profile] )
identifier[libcloud_kwargs] = identifier[salt] . identifier[utils] . identifier[args] . identifier[clean_kwargs] (** identifier[libcloud_kwargs] )
identifier[image] = identifier[conn] . identifier[get_image] ( identifier[image_id] ,** identifier[libcloud_kwargs] )
keyword[return] identifier[_simple_image] ( identifier[image] ) | def get_image(image_id, profile, **libcloud_kwargs):
"""
Get an image of a node
:param image_id: Image to fetch
:type image_id: ``str``
:param profile: The profile key
:type profile: ``str``
:param libcloud_kwargs: Extra arguments for the driver's delete_image method
:type libcloud_kwargs: ``dict``
CLI Example:
.. code-block:: bash
salt myminion libcloud_compute.get_image image1 profile1
"""
conn = _get_driver(profile=profile)
libcloud_kwargs = salt.utils.args.clean_kwargs(**libcloud_kwargs)
image = conn.get_image(image_id, **libcloud_kwargs)
return _simple_image(image) |
def group_attrib(self):
'''
return a namedtuple containing all attributes attached
to groups of which the given series is a member
for each group of which the series is a member
'''
group_attributes = [g.attrib for g in self.dataset.groups if self in g]
if group_attributes:
return concat_namedtuples(*group_attributes) | def function[group_attrib, parameter[self]]:
constant[
return a namedtuple containing all attributes attached
to groups of which the given series is a member
for each group of which the series is a member
]
variable[group_attributes] assign[=] <ast.ListComp object at 0x7da1b12fdab0>
if name[group_attributes] begin[:]
return[call[name[concat_namedtuples], parameter[<ast.Starred object at 0x7da1b12fee30>]]] | keyword[def] identifier[group_attrib] ( identifier[self] ):
literal[string]
identifier[group_attributes] =[ identifier[g] . identifier[attrib] keyword[for] identifier[g] keyword[in] identifier[self] . identifier[dataset] . identifier[groups] keyword[if] identifier[self] keyword[in] identifier[g] ]
keyword[if] identifier[group_attributes] :
keyword[return] identifier[concat_namedtuples] (* identifier[group_attributes] ) | def group_attrib(self):
"""
return a namedtuple containing all attributes attached
to groups of which the given series is a member
for each group of which the series is a member
"""
group_attributes = [g.attrib for g in self.dataset.groups if self in g]
if group_attributes:
return concat_namedtuples(*group_attributes) # depends on [control=['if'], data=[]] |
def attach_attachment(self, analysis, attachment):
"""
Attach a file or a given set of files to an analysis
:param analysis: analysis where the files are to be attached
:param attachment: files to be attached. This can be either a
single file or a list of files
:return: None
"""
if not attachment:
return
if isinstance(attachment, list):
for attach in attachment:
self.attach_attachment(analysis, attach)
return
# current attachments
an_atts = analysis.getAttachment()
atts_filenames = [att.getAttachmentFile().filename for att in an_atts]
if attachment.getAttachmentFile().filename not in atts_filenames:
an_atts.append(attachment)
logger.info(
"Attaching %s to %s" % (attachment.UID(), analysis))
analysis.setAttachment([att.UID() for att in an_atts])
analysis.reindexObject()
else:
self.warn("Attachment %s was not linked to analysis %s" %
(attachment.UID(), analysis)) | def function[attach_attachment, parameter[self, analysis, attachment]]:
constant[
Attach a file or a given set of files to an analysis
:param analysis: analysis where the files are to be attached
:param attachment: files to be attached. This can be either a
single file or a list of files
:return: None
]
if <ast.UnaryOp object at 0x7da1b2312620> begin[:]
return[None]
if call[name[isinstance], parameter[name[attachment], name[list]]] begin[:]
for taget[name[attach]] in starred[name[attachment]] begin[:]
call[name[self].attach_attachment, parameter[name[analysis], name[attach]]]
return[None]
variable[an_atts] assign[=] call[name[analysis].getAttachment, parameter[]]
variable[atts_filenames] assign[=] <ast.ListComp object at 0x7da1b2312860>
if compare[call[name[attachment].getAttachmentFile, parameter[]].filename <ast.NotIn object at 0x7da2590d7190> name[atts_filenames]] begin[:]
call[name[an_atts].append, parameter[name[attachment]]]
call[name[logger].info, parameter[binary_operation[constant[Attaching %s to %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b2310cd0>, <ast.Name object at 0x7da1b2313670>]]]]]
call[name[analysis].setAttachment, parameter[<ast.ListComp object at 0x7da1b2312740>]]
call[name[analysis].reindexObject, parameter[]] | keyword[def] identifier[attach_attachment] ( identifier[self] , identifier[analysis] , identifier[attachment] ):
literal[string]
keyword[if] keyword[not] identifier[attachment] :
keyword[return]
keyword[if] identifier[isinstance] ( identifier[attachment] , identifier[list] ):
keyword[for] identifier[attach] keyword[in] identifier[attachment] :
identifier[self] . identifier[attach_attachment] ( identifier[analysis] , identifier[attach] )
keyword[return]
identifier[an_atts] = identifier[analysis] . identifier[getAttachment] ()
identifier[atts_filenames] =[ identifier[att] . identifier[getAttachmentFile] (). identifier[filename] keyword[for] identifier[att] keyword[in] identifier[an_atts] ]
keyword[if] identifier[attachment] . identifier[getAttachmentFile] (). identifier[filename] keyword[not] keyword[in] identifier[atts_filenames] :
identifier[an_atts] . identifier[append] ( identifier[attachment] )
identifier[logger] . identifier[info] (
literal[string] %( identifier[attachment] . identifier[UID] (), identifier[analysis] ))
identifier[analysis] . identifier[setAttachment] ([ identifier[att] . identifier[UID] () keyword[for] identifier[att] keyword[in] identifier[an_atts] ])
identifier[analysis] . identifier[reindexObject] ()
keyword[else] :
identifier[self] . identifier[warn] ( literal[string] %
( identifier[attachment] . identifier[UID] (), identifier[analysis] )) | def attach_attachment(self, analysis, attachment):
"""
Attach a file or a given set of files to an analysis
:param analysis: analysis where the files are to be attached
:param attachment: files to be attached. This can be either a
single file or a list of files
:return: None
"""
if not attachment:
return # depends on [control=['if'], data=[]]
if isinstance(attachment, list):
for attach in attachment:
self.attach_attachment(analysis, attach) # depends on [control=['for'], data=['attach']]
return # depends on [control=['if'], data=[]]
# current attachments
an_atts = analysis.getAttachment()
atts_filenames = [att.getAttachmentFile().filename for att in an_atts]
if attachment.getAttachmentFile().filename not in atts_filenames:
an_atts.append(attachment)
logger.info('Attaching %s to %s' % (attachment.UID(), analysis))
analysis.setAttachment([att.UID() for att in an_atts])
analysis.reindexObject() # depends on [control=['if'], data=[]]
else:
self.warn('Attachment %s was not linked to analysis %s' % (attachment.UID(), analysis)) |
def rotate(angle, iaxis):
"""
Calculate the 3x3 rotation matrix generated by a rotation
of a specified angle about a specified axis. This rotation
is thought of as rotating the coordinate system.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotate_c.html
:param angle: Angle of rotation (radians).
:type angle: float
:param iaxis: Axis of rotation X=1, Y=2, Z=3.
:type iaxis: int
:return: Resulting rotation matrix
:rtype: 3x3-Element Array of floats
"""
angle = ctypes.c_double(angle)
iaxis = ctypes.c_int(iaxis)
mout = stypes.emptyDoubleMatrix()
libspice.rotate_c(angle, iaxis, mout)
return stypes.cMatrixToNumpy(mout) | def function[rotate, parameter[angle, iaxis]]:
constant[
Calculate the 3x3 rotation matrix generated by a rotation
of a specified angle about a specified axis. This rotation
is thought of as rotating the coordinate system.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotate_c.html
:param angle: Angle of rotation (radians).
:type angle: float
:param iaxis: Axis of rotation X=1, Y=2, Z=3.
:type iaxis: int
:return: Resulting rotation matrix
:rtype: 3x3-Element Array of floats
]
variable[angle] assign[=] call[name[ctypes].c_double, parameter[name[angle]]]
variable[iaxis] assign[=] call[name[ctypes].c_int, parameter[name[iaxis]]]
variable[mout] assign[=] call[name[stypes].emptyDoubleMatrix, parameter[]]
call[name[libspice].rotate_c, parameter[name[angle], name[iaxis], name[mout]]]
return[call[name[stypes].cMatrixToNumpy, parameter[name[mout]]]] | keyword[def] identifier[rotate] ( identifier[angle] , identifier[iaxis] ):
literal[string]
identifier[angle] = identifier[ctypes] . identifier[c_double] ( identifier[angle] )
identifier[iaxis] = identifier[ctypes] . identifier[c_int] ( identifier[iaxis] )
identifier[mout] = identifier[stypes] . identifier[emptyDoubleMatrix] ()
identifier[libspice] . identifier[rotate_c] ( identifier[angle] , identifier[iaxis] , identifier[mout] )
keyword[return] identifier[stypes] . identifier[cMatrixToNumpy] ( identifier[mout] ) | def rotate(angle, iaxis):
"""
Calculate the 3x3 rotation matrix generated by a rotation
of a specified angle about a specified axis. This rotation
is thought of as rotating the coordinate system.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotate_c.html
:param angle: Angle of rotation (radians).
:type angle: float
:param iaxis: Axis of rotation X=1, Y=2, Z=3.
:type iaxis: int
:return: Resulting rotation matrix
:rtype: 3x3-Element Array of floats
"""
angle = ctypes.c_double(angle)
iaxis = ctypes.c_int(iaxis)
mout = stypes.emptyDoubleMatrix()
libspice.rotate_c(angle, iaxis, mout)
return stypes.cMatrixToNumpy(mout) |
def set_bios_settings(self, data=None, only_allowed_settings=True):
"""Sets current BIOS settings to the provided data.
:param: only_allowed_settings: True when only allowed BIOS settings
are to be set. If False, all the BIOS settings supported by
iLO and present in the 'data' are set.
:param: data: a dictionary of BIOS settings to be applied. Depending
on the 'only_allowed_settings', either only the allowed
settings are set or all the supported settings that are in the
'data' are set.
:raises: IloError, on an error from iLO.
:raises: IloCommandNotSupportedError, if the command is not supported
on the server.
"""
if not data:
raise exception.IloError("Could not apply settings with"
" empty data")
sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID)
if only_allowed_settings:
unsupported_settings = [key for key in data if key not in (
ilo_cons.SUPPORTED_REDFISH_BIOS_PROPERTIES)]
if unsupported_settings:
msg = ("Could not apply settings as one or more settings are"
" not supported. Unsupported settings are %s."
" Supported settings are %s." % (
unsupported_settings,
ilo_cons.SUPPORTED_REDFISH_BIOS_PROPERTIES))
raise exception.IloError(msg)
try:
settings_required = sushy_system.bios_settings.pending_settings
settings_required.update_bios_data_by_patch(data)
except sushy.exceptions.SushyError as e:
msg = (self._('The pending BIOS Settings resource not found.'
' Error %(error)s') %
{'error': str(e)})
LOG.debug(msg)
raise exception.IloError(msg) | def function[set_bios_settings, parameter[self, data, only_allowed_settings]]:
constant[Sets current BIOS settings to the provided data.
:param: only_allowed_settings: True when only allowed BIOS settings
are to be set. If False, all the BIOS settings supported by
iLO and present in the 'data' are set.
:param: data: a dictionary of BIOS settings to be applied. Depending
on the 'only_allowed_settings', either only the allowed
settings are set or all the supported settings that are in the
'data' are set.
:raises: IloError, on an error from iLO.
:raises: IloCommandNotSupportedError, if the command is not supported
on the server.
]
if <ast.UnaryOp object at 0x7da1b193f7f0> begin[:]
<ast.Raise object at 0x7da1b193c9d0>
variable[sushy_system] assign[=] call[name[self]._get_sushy_system, parameter[name[PROLIANT_SYSTEM_ID]]]
if name[only_allowed_settings] begin[:]
variable[unsupported_settings] assign[=] <ast.ListComp object at 0x7da1b193c220>
if name[unsupported_settings] begin[:]
variable[msg] assign[=] binary_operation[constant[Could not apply settings as one or more settings are not supported. Unsupported settings are %s. Supported settings are %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b193c1f0>, <ast.Attribute object at 0x7da1b193ee00>]]]
<ast.Raise object at 0x7da1b193c5b0>
<ast.Try object at 0x7da1b193c2e0> | keyword[def] identifier[set_bios_settings] ( identifier[self] , identifier[data] = keyword[None] , identifier[only_allowed_settings] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[data] :
keyword[raise] identifier[exception] . identifier[IloError] ( literal[string]
literal[string] )
identifier[sushy_system] = identifier[self] . identifier[_get_sushy_system] ( identifier[PROLIANT_SYSTEM_ID] )
keyword[if] identifier[only_allowed_settings] :
identifier[unsupported_settings] =[ identifier[key] keyword[for] identifier[key] keyword[in] identifier[data] keyword[if] identifier[key] keyword[not] keyword[in] (
identifier[ilo_cons] . identifier[SUPPORTED_REDFISH_BIOS_PROPERTIES] )]
keyword[if] identifier[unsupported_settings] :
identifier[msg] =( literal[string]
literal[string]
literal[string] %(
identifier[unsupported_settings] ,
identifier[ilo_cons] . identifier[SUPPORTED_REDFISH_BIOS_PROPERTIES] ))
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
keyword[try] :
identifier[settings_required] = identifier[sushy_system] . identifier[bios_settings] . identifier[pending_settings]
identifier[settings_required] . identifier[update_bios_data_by_patch] ( identifier[data] )
keyword[except] identifier[sushy] . identifier[exceptions] . identifier[SushyError] keyword[as] identifier[e] :
identifier[msg] =( identifier[self] . identifier[_] ( literal[string]
literal[string] )%
{ literal[string] : identifier[str] ( identifier[e] )})
identifier[LOG] . identifier[debug] ( identifier[msg] )
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] ) | def set_bios_settings(self, data=None, only_allowed_settings=True):
"""Sets current BIOS settings to the provided data.
:param: only_allowed_settings: True when only allowed BIOS settings
are to be set. If False, all the BIOS settings supported by
iLO and present in the 'data' are set.
:param: data: a dictionary of BIOS settings to be applied. Depending
on the 'only_allowed_settings', either only the allowed
settings are set or all the supported settings that are in the
'data' are set.
:raises: IloError, on an error from iLO.
:raises: IloCommandNotSupportedError, if the command is not supported
on the server.
"""
if not data:
raise exception.IloError('Could not apply settings with empty data') # depends on [control=['if'], data=[]]
sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID)
if only_allowed_settings:
unsupported_settings = [key for key in data if key not in ilo_cons.SUPPORTED_REDFISH_BIOS_PROPERTIES]
if unsupported_settings:
msg = 'Could not apply settings as one or more settings are not supported. Unsupported settings are %s. Supported settings are %s.' % (unsupported_settings, ilo_cons.SUPPORTED_REDFISH_BIOS_PROPERTIES)
raise exception.IloError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
try:
settings_required = sushy_system.bios_settings.pending_settings
settings_required.update_bios_data_by_patch(data) # depends on [control=['try'], data=[]]
except sushy.exceptions.SushyError as e:
msg = self._('The pending BIOS Settings resource not found. Error %(error)s') % {'error': str(e)}
LOG.debug(msg)
raise exception.IloError(msg) # depends on [control=['except'], data=['e']] |
def glob_handler(extension, folder=None, identifier=None):
"""Return a list of all files matching specified inputs.
Parameters
----------
extension : string
File extension.
folder : string (optional)
Folder to search within. Default is None (current working
directory).
identifier : string
Unique identifier. Default is None.
Returns
-------
list of strings
Full path of matching files.
"""
filepaths = []
if folder:
# comment out [ and ]...
folder = folder.replace("[", "?")
folder = folder.replace("]", "*")
folder = folder.replace("?", "[[]")
folder = folder.replace("*", "[]]")
glob_str = os.path.join(folder, "*" + extension)
else:
glob_str = "*" + extension + "*"
for filepath in glob.glob(glob_str):
if identifier:
if identifier in filepath:
filepaths.append(filepath)
else:
filepaths.append(filepath)
return filepaths | def function[glob_handler, parameter[extension, folder, identifier]]:
constant[Return a list of all files matching specified inputs.
Parameters
----------
extension : string
File extension.
folder : string (optional)
Folder to search within. Default is None (current working
directory).
identifier : string
Unique identifier. Default is None.
Returns
-------
list of strings
Full path of matching files.
]
variable[filepaths] assign[=] list[[]]
if name[folder] begin[:]
variable[folder] assign[=] call[name[folder].replace, parameter[constant[[], constant[?]]]
variable[folder] assign[=] call[name[folder].replace, parameter[constant[]], constant[*]]]
variable[folder] assign[=] call[name[folder].replace, parameter[constant[?], constant[[[]]]]
variable[folder] assign[=] call[name[folder].replace, parameter[constant[*], constant[[]]]]]
variable[glob_str] assign[=] call[name[os].path.join, parameter[name[folder], binary_operation[constant[*] + name[extension]]]]
for taget[name[filepath]] in starred[call[name[glob].glob, parameter[name[glob_str]]]] begin[:]
if name[identifier] begin[:]
if compare[name[identifier] in name[filepath]] begin[:]
call[name[filepaths].append, parameter[name[filepath]]]
return[name[filepaths]] | keyword[def] identifier[glob_handler] ( identifier[extension] , identifier[folder] = keyword[None] , identifier[identifier] = keyword[None] ):
literal[string]
identifier[filepaths] =[]
keyword[if] identifier[folder] :
identifier[folder] = identifier[folder] . identifier[replace] ( literal[string] , literal[string] )
identifier[folder] = identifier[folder] . identifier[replace] ( literal[string] , literal[string] )
identifier[folder] = identifier[folder] . identifier[replace] ( literal[string] , literal[string] )
identifier[folder] = identifier[folder] . identifier[replace] ( literal[string] , literal[string] )
identifier[glob_str] = identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , literal[string] + identifier[extension] )
keyword[else] :
identifier[glob_str] = literal[string] + identifier[extension] + literal[string]
keyword[for] identifier[filepath] keyword[in] identifier[glob] . identifier[glob] ( identifier[glob_str] ):
keyword[if] identifier[identifier] :
keyword[if] identifier[identifier] keyword[in] identifier[filepath] :
identifier[filepaths] . identifier[append] ( identifier[filepath] )
keyword[else] :
identifier[filepaths] . identifier[append] ( identifier[filepath] )
keyword[return] identifier[filepaths] | def glob_handler(extension, folder=None, identifier=None):
"""Return a list of all files matching specified inputs.
Parameters
----------
extension : string
File extension.
folder : string (optional)
Folder to search within. Default is None (current working
directory).
identifier : string
Unique identifier. Default is None.
Returns
-------
list of strings
Full path of matching files.
"""
filepaths = []
if folder:
# comment out [ and ]...
folder = folder.replace('[', '?')
folder = folder.replace(']', '*')
folder = folder.replace('?', '[[]')
folder = folder.replace('*', '[]]')
glob_str = os.path.join(folder, '*' + extension) # depends on [control=['if'], data=[]]
else:
glob_str = '*' + extension + '*'
for filepath in glob.glob(glob_str):
if identifier:
if identifier in filepath:
filepaths.append(filepath) # depends on [control=['if'], data=['filepath']] # depends on [control=['if'], data=[]]
else:
filepaths.append(filepath) # depends on [control=['for'], data=['filepath']]
return filepaths |
def flow_pipe(Diam, HeadLoss, Length, Nu, PipeRough, KMinor):
"""Return the the flow in a straight pipe.
This function works for both major and minor losses and
works whether the flow is laminar or turbulent.
"""
#Inputs do not need to be checked here because they are checked by
#functions this function calls.
if KMinor == 0:
FlowRate = flow_pipemajor(Diam, HeadLoss, Length, Nu,
PipeRough).magnitude
else:
FlowRatePrev = 0
err = 1.0
FlowRate = min(flow_pipemajor(Diam, HeadLoss, Length,
Nu, PipeRough).magnitude,
flow_pipeminor(Diam, HeadLoss, KMinor).magnitude
)
while err > 0.01:
FlowRatePrev = FlowRate
HLFricNew = (HeadLoss * headloss_fric(FlowRate, Diam, Length,
Nu, PipeRough).magnitude
/ (headloss_fric(FlowRate, Diam, Length,
Nu, PipeRough).magnitude
+ headloss_exp(FlowRate, Diam, KMinor).magnitude
)
)
FlowRate = flow_pipemajor(Diam, HLFricNew, Length,
Nu, PipeRough).magnitude
if FlowRate == 0:
err = 0.0
else:
err = (abs(FlowRate - FlowRatePrev)
/ ((FlowRate + FlowRatePrev) / 2)
)
return FlowRate | def function[flow_pipe, parameter[Diam, HeadLoss, Length, Nu, PipeRough, KMinor]]:
constant[Return the the flow in a straight pipe.
This function works for both major and minor losses and
works whether the flow is laminar or turbulent.
]
if compare[name[KMinor] equal[==] constant[0]] begin[:]
variable[FlowRate] assign[=] call[name[flow_pipemajor], parameter[name[Diam], name[HeadLoss], name[Length], name[Nu], name[PipeRough]]].magnitude
return[name[FlowRate]] | keyword[def] identifier[flow_pipe] ( identifier[Diam] , identifier[HeadLoss] , identifier[Length] , identifier[Nu] , identifier[PipeRough] , identifier[KMinor] ):
literal[string]
keyword[if] identifier[KMinor] == literal[int] :
identifier[FlowRate] = identifier[flow_pipemajor] ( identifier[Diam] , identifier[HeadLoss] , identifier[Length] , identifier[Nu] ,
identifier[PipeRough] ). identifier[magnitude]
keyword[else] :
identifier[FlowRatePrev] = literal[int]
identifier[err] = literal[int]
identifier[FlowRate] = identifier[min] ( identifier[flow_pipemajor] ( identifier[Diam] , identifier[HeadLoss] , identifier[Length] ,
identifier[Nu] , identifier[PipeRough] ). identifier[magnitude] ,
identifier[flow_pipeminor] ( identifier[Diam] , identifier[HeadLoss] , identifier[KMinor] ). identifier[magnitude]
)
keyword[while] identifier[err] > literal[int] :
identifier[FlowRatePrev] = identifier[FlowRate]
identifier[HLFricNew] =( identifier[HeadLoss] * identifier[headloss_fric] ( identifier[FlowRate] , identifier[Diam] , identifier[Length] ,
identifier[Nu] , identifier[PipeRough] ). identifier[magnitude]
/( identifier[headloss_fric] ( identifier[FlowRate] , identifier[Diam] , identifier[Length] ,
identifier[Nu] , identifier[PipeRough] ). identifier[magnitude]
+ identifier[headloss_exp] ( identifier[FlowRate] , identifier[Diam] , identifier[KMinor] ). identifier[magnitude]
)
)
identifier[FlowRate] = identifier[flow_pipemajor] ( identifier[Diam] , identifier[HLFricNew] , identifier[Length] ,
identifier[Nu] , identifier[PipeRough] ). identifier[magnitude]
keyword[if] identifier[FlowRate] == literal[int] :
identifier[err] = literal[int]
keyword[else] :
identifier[err] =( identifier[abs] ( identifier[FlowRate] - identifier[FlowRatePrev] )
/(( identifier[FlowRate] + identifier[FlowRatePrev] )/ literal[int] )
)
keyword[return] identifier[FlowRate] | def flow_pipe(Diam, HeadLoss, Length, Nu, PipeRough, KMinor):
"""Return the the flow in a straight pipe.
This function works for both major and minor losses and
works whether the flow is laminar or turbulent.
"""
#Inputs do not need to be checked here because they are checked by
#functions this function calls.
if KMinor == 0:
FlowRate = flow_pipemajor(Diam, HeadLoss, Length, Nu, PipeRough).magnitude # depends on [control=['if'], data=[]]
else:
FlowRatePrev = 0
err = 1.0
FlowRate = min(flow_pipemajor(Diam, HeadLoss, Length, Nu, PipeRough).magnitude, flow_pipeminor(Diam, HeadLoss, KMinor).magnitude)
while err > 0.01:
FlowRatePrev = FlowRate
HLFricNew = HeadLoss * headloss_fric(FlowRate, Diam, Length, Nu, PipeRough).magnitude / (headloss_fric(FlowRate, Diam, Length, Nu, PipeRough).magnitude + headloss_exp(FlowRate, Diam, KMinor).magnitude)
FlowRate = flow_pipemajor(Diam, HLFricNew, Length, Nu, PipeRough).magnitude
if FlowRate == 0:
err = 0.0 # depends on [control=['if'], data=[]]
else:
err = abs(FlowRate - FlowRatePrev) / ((FlowRate + FlowRatePrev) / 2) # depends on [control=['while'], data=['err']]
return FlowRate |
def show_system_monitor_output_switch_status_report_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_monitor = ET.Element("show_system_monitor")
config = show_system_monitor
output = ET.SubElement(show_system_monitor, "output")
switch_status = ET.SubElement(output, "switch-status")
report_time = ET.SubElement(switch_status, "report-time")
report_time.text = kwargs.pop('report_time')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[show_system_monitor_output_switch_status_report_time, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[show_system_monitor] assign[=] call[name[ET].Element, parameter[constant[show_system_monitor]]]
variable[config] assign[=] name[show_system_monitor]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[show_system_monitor], constant[output]]]
variable[switch_status] assign[=] call[name[ET].SubElement, parameter[name[output], constant[switch-status]]]
variable[report_time] assign[=] call[name[ET].SubElement, parameter[name[switch_status], constant[report-time]]]
name[report_time].text assign[=] call[name[kwargs].pop, parameter[constant[report_time]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[show_system_monitor_output_switch_status_report_time] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[show_system_monitor] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[show_system_monitor]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[show_system_monitor] , literal[string] )
identifier[switch_status] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[report_time] = identifier[ET] . identifier[SubElement] ( identifier[switch_status] , literal[string] )
identifier[report_time] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def show_system_monitor_output_switch_status_report_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
show_system_monitor = ET.Element('show_system_monitor')
config = show_system_monitor
output = ET.SubElement(show_system_monitor, 'output')
switch_status = ET.SubElement(output, 'switch-status')
report_time = ET.SubElement(switch_status, 'report-time')
report_time.text = kwargs.pop('report_time')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def apply_files_filter(self, frame, filename, force_check_project_scope):
'''
Should only be called if `self.is_files_filter_enabled == True`.
Note that it covers both the filter by specific paths includes/excludes as well
as the check which filters out libraries if not in the project scope.
:param force_check_project_scope:
Check that the file is in the project scope even if the global setting
is off.
:return bool:
True if it should be excluded when stepping and False if it should be
included.
'''
cache_key = (frame.f_code.co_firstlineno, frame.f_code.co_name, filename, force_check_project_scope)
try:
return self._apply_filter_cache[cache_key]
except KeyError:
if self.plugin is not None and (self.has_plugin_line_breaks or self.has_plugin_exception_breaks):
# If it's explicitly needed by some plugin, we can't skip it.
if not self.plugin.can_skip(self, frame):
# print('include (include by plugins): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False
if self._exclude_filters_enabled:
exclude_by_filter = self._exclude_by_filter(frame, filename)
if exclude_by_filter is not None:
if exclude_by_filter:
# ignore files matching stepping filters
# print('exclude (filtered out): %s' % filename)
self._apply_filter_cache[cache_key] = True
return True
else:
# print('include (explicitly included): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False
if (self._is_libraries_filter_enabled or force_check_project_scope) and not self.in_project_scope(filename):
# print('exclude (not on project): %s' % filename)
# ignore library files while stepping
self._apply_filter_cache[cache_key] = True
return True
# print('include (on project): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False | def function[apply_files_filter, parameter[self, frame, filename, force_check_project_scope]]:
constant[
Should only be called if `self.is_files_filter_enabled == True`.
Note that it covers both the filter by specific paths includes/excludes as well
as the check which filters out libraries if not in the project scope.
:param force_check_project_scope:
Check that the file is in the project scope even if the global setting
is off.
:return bool:
True if it should be excluded when stepping and False if it should be
included.
]
variable[cache_key] assign[=] tuple[[<ast.Attribute object at 0x7da18dc07ee0>, <ast.Attribute object at 0x7da18dc05e40>, <ast.Name object at 0x7da18dc05510>, <ast.Name object at 0x7da18dc05a80>]]
<ast.Try object at 0x7da18dc073a0> | keyword[def] identifier[apply_files_filter] ( identifier[self] , identifier[frame] , identifier[filename] , identifier[force_check_project_scope] ):
literal[string]
identifier[cache_key] =( identifier[frame] . identifier[f_code] . identifier[co_firstlineno] , identifier[frame] . identifier[f_code] . identifier[co_name] , identifier[filename] , identifier[force_check_project_scope] )
keyword[try] :
keyword[return] identifier[self] . identifier[_apply_filter_cache] [ identifier[cache_key] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[self] . identifier[plugin] keyword[is] keyword[not] keyword[None] keyword[and] ( identifier[self] . identifier[has_plugin_line_breaks] keyword[or] identifier[self] . identifier[has_plugin_exception_breaks] ):
keyword[if] keyword[not] identifier[self] . identifier[plugin] . identifier[can_skip] ( identifier[self] , identifier[frame] ):
identifier[self] . identifier[_apply_filter_cache] [ identifier[cache_key] ]= keyword[False]
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_exclude_filters_enabled] :
identifier[exclude_by_filter] = identifier[self] . identifier[_exclude_by_filter] ( identifier[frame] , identifier[filename] )
keyword[if] identifier[exclude_by_filter] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[exclude_by_filter] :
identifier[self] . identifier[_apply_filter_cache] [ identifier[cache_key] ]= keyword[True]
keyword[return] keyword[True]
keyword[else] :
identifier[self] . identifier[_apply_filter_cache] [ identifier[cache_key] ]= keyword[False]
keyword[return] keyword[False]
keyword[if] ( identifier[self] . identifier[_is_libraries_filter_enabled] keyword[or] identifier[force_check_project_scope] ) keyword[and] keyword[not] identifier[self] . identifier[in_project_scope] ( identifier[filename] ):
identifier[self] . identifier[_apply_filter_cache] [ identifier[cache_key] ]= keyword[True]
keyword[return] keyword[True]
identifier[self] . identifier[_apply_filter_cache] [ identifier[cache_key] ]= keyword[False]
keyword[return] keyword[False] | def apply_files_filter(self, frame, filename, force_check_project_scope):
"""
Should only be called if `self.is_files_filter_enabled == True`.
Note that it covers both the filter by specific paths includes/excludes as well
as the check which filters out libraries if not in the project scope.
:param force_check_project_scope:
Check that the file is in the project scope even if the global setting
is off.
:return bool:
True if it should be excluded when stepping and False if it should be
included.
"""
cache_key = (frame.f_code.co_firstlineno, frame.f_code.co_name, filename, force_check_project_scope)
try:
return self._apply_filter_cache[cache_key] # depends on [control=['try'], data=[]]
except KeyError:
if self.plugin is not None and (self.has_plugin_line_breaks or self.has_plugin_exception_breaks):
# If it's explicitly needed by some plugin, we can't skip it.
if not self.plugin.can_skip(self, frame):
# print('include (include by plugins): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self._exclude_filters_enabled:
exclude_by_filter = self._exclude_by_filter(frame, filename)
if exclude_by_filter is not None:
if exclude_by_filter:
# ignore files matching stepping filters
# print('exclude (filtered out): %s' % filename)
self._apply_filter_cache[cache_key] = True
return True # depends on [control=['if'], data=[]]
else:
# print('include (explicitly included): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False # depends on [control=['if'], data=['exclude_by_filter']] # depends on [control=['if'], data=[]]
if (self._is_libraries_filter_enabled or force_check_project_scope) and (not self.in_project_scope(filename)):
# print('exclude (not on project): %s' % filename)
# ignore library files while stepping
self._apply_filter_cache[cache_key] = True
return True # depends on [control=['if'], data=[]]
# print('include (on project): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False # depends on [control=['except'], data=[]] |
def make_table(data, col_names):
"""Code for this RST-formatted table generator comes from
http://stackoverflow.com/a/11350643
"""
n_cols = len(data[0])
assert n_cols == len(col_names)
col_sizes = [max(len(r[i]) for r in data) for i in range(n_cols)]
for i, cname in enumerate(col_names):
if col_sizes[i] < len(cname):
col_sizes[i] = len(cname)
formatter = ' '.join('{:<%d}' % c for c in col_sizes)
rows = '\n'.join([formatter.format(*row) for row in data])
header = formatter.format(*col_names)
divider = formatter.format(*['=' * c for c in col_sizes])
output = '\n'.join((divider, header, divider, rows, divider))
return output | def function[make_table, parameter[data, col_names]]:
constant[Code for this RST-formatted table generator comes from
http://stackoverflow.com/a/11350643
]
variable[n_cols] assign[=] call[name[len], parameter[call[name[data]][constant[0]]]]
assert[compare[name[n_cols] equal[==] call[name[len], parameter[name[col_names]]]]]
variable[col_sizes] assign[=] <ast.ListComp object at 0x7da1b0329420>
for taget[tuple[[<ast.Name object at 0x7da1b032b640>, <ast.Name object at 0x7da1b0328370>]]] in starred[call[name[enumerate], parameter[name[col_names]]]] begin[:]
if compare[call[name[col_sizes]][name[i]] less[<] call[name[len], parameter[name[cname]]]] begin[:]
call[name[col_sizes]][name[i]] assign[=] call[name[len], parameter[name[cname]]]
variable[formatter] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b032a3e0>]]
variable[rows] assign[=] call[constant[
].join, parameter[<ast.ListComp object at 0x7da1b032a2c0>]]
variable[header] assign[=] call[name[formatter].format, parameter[<ast.Starred object at 0x7da1b032b550>]]
variable[divider] assign[=] call[name[formatter].format, parameter[<ast.Starred object at 0x7da1b0329fc0>]]
variable[output] assign[=] call[constant[
].join, parameter[tuple[[<ast.Name object at 0x7da1b032a230>, <ast.Name object at 0x7da1b032a290>, <ast.Name object at 0x7da1b0328dc0>, <ast.Name object at 0x7da1b032a710>, <ast.Name object at 0x7da1b032abc0>]]]]
return[name[output]] | keyword[def] identifier[make_table] ( identifier[data] , identifier[col_names] ):
literal[string]
identifier[n_cols] = identifier[len] ( identifier[data] [ literal[int] ])
keyword[assert] identifier[n_cols] == identifier[len] ( identifier[col_names] )
identifier[col_sizes] =[ identifier[max] ( identifier[len] ( identifier[r] [ identifier[i] ]) keyword[for] identifier[r] keyword[in] identifier[data] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_cols] )]
keyword[for] identifier[i] , identifier[cname] keyword[in] identifier[enumerate] ( identifier[col_names] ):
keyword[if] identifier[col_sizes] [ identifier[i] ]< identifier[len] ( identifier[cname] ):
identifier[col_sizes] [ identifier[i] ]= identifier[len] ( identifier[cname] )
identifier[formatter] = literal[string] . identifier[join] ( literal[string] % identifier[c] keyword[for] identifier[c] keyword[in] identifier[col_sizes] )
identifier[rows] = literal[string] . identifier[join] ([ identifier[formatter] . identifier[format] (* identifier[row] ) keyword[for] identifier[row] keyword[in] identifier[data] ])
identifier[header] = identifier[formatter] . identifier[format] (* identifier[col_names] )
identifier[divider] = identifier[formatter] . identifier[format] (*[ literal[string] * identifier[c] keyword[for] identifier[c] keyword[in] identifier[col_sizes] ])
identifier[output] = literal[string] . identifier[join] (( identifier[divider] , identifier[header] , identifier[divider] , identifier[rows] , identifier[divider] ))
keyword[return] identifier[output] | def make_table(data, col_names):
"""Code for this RST-formatted table generator comes from
http://stackoverflow.com/a/11350643
"""
n_cols = len(data[0])
assert n_cols == len(col_names)
col_sizes = [max((len(r[i]) for r in data)) for i in range(n_cols)]
for (i, cname) in enumerate(col_names):
if col_sizes[i] < len(cname):
col_sizes[i] = len(cname) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
formatter = ' '.join(('{:<%d}' % c for c in col_sizes))
rows = '\n'.join([formatter.format(*row) for row in data])
header = formatter.format(*col_names)
divider = formatter.format(*['=' * c for c in col_sizes])
output = '\n'.join((divider, header, divider, rows, divider))
return output |
def output_to_dict(self, output):
"""
Convert the ROUGE output into python dictionary for further
processing.
"""
#0 ROUGE-1 Average_R: 0.02632 (95%-conf.int. 0.02632 - 0.02632)
pattern = re.compile(
r"(\d+) (ROUGE-\S+) (Average_\w): (\d.\d+) "
r"\(95%-conf.int. (\d.\d+) - (\d.\d+)\)")
results = {}
for line in output.split("\n"):
match = pattern.match(line)
if match:
sys_id, rouge_type, measure, result, conf_begin, conf_end = \
match.groups()
measure = {
'Average_R': 'recall',
'Average_P': 'precision',
'Average_F': 'f_score'
}[measure]
rouge_type = rouge_type.lower().replace("-", '_')
key = "{}_{}".format(rouge_type, measure)
results[key] = float(result)
results["{}_cb".format(key)] = float(conf_begin)
results["{}_ce".format(key)] = float(conf_end)
return results | def function[output_to_dict, parameter[self, output]]:
constant[
Convert the ROUGE output into python dictionary for further
processing.
]
variable[pattern] assign[=] call[name[re].compile, parameter[constant[(\d+) (ROUGE-\S+) (Average_\w): (\d.\d+) \(95%-conf.int. (\d.\d+) - (\d.\d+)\)]]]
variable[results] assign[=] dictionary[[], []]
for taget[name[line]] in starred[call[name[output].split, parameter[constant[
]]]] begin[:]
variable[match] assign[=] call[name[pattern].match, parameter[name[line]]]
if name[match] begin[:]
<ast.Tuple object at 0x7da1b0745bd0> assign[=] call[name[match].groups, parameter[]]
variable[measure] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b07443d0>, <ast.Constant object at 0x7da1b07440a0>, <ast.Constant object at 0x7da1b07452d0>], [<ast.Constant object at 0x7da1b0745750>, <ast.Constant object at 0x7da1b0746590>, <ast.Constant object at 0x7da1b0745f00>]]][name[measure]]
variable[rouge_type] assign[=] call[call[name[rouge_type].lower, parameter[]].replace, parameter[constant[-], constant[_]]]
variable[key] assign[=] call[constant[{}_{}].format, parameter[name[rouge_type], name[measure]]]
call[name[results]][name[key]] assign[=] call[name[float], parameter[name[result]]]
call[name[results]][call[constant[{}_cb].format, parameter[name[key]]]] assign[=] call[name[float], parameter[name[conf_begin]]]
call[name[results]][call[constant[{}_ce].format, parameter[name[key]]]] assign[=] call[name[float], parameter[name[conf_end]]]
return[name[results]] | keyword[def] identifier[output_to_dict] ( identifier[self] , identifier[output] ):
literal[string]
identifier[pattern] = identifier[re] . identifier[compile] (
literal[string]
literal[string] )
identifier[results] ={}
keyword[for] identifier[line] keyword[in] identifier[output] . identifier[split] ( literal[string] ):
identifier[match] = identifier[pattern] . identifier[match] ( identifier[line] )
keyword[if] identifier[match] :
identifier[sys_id] , identifier[rouge_type] , identifier[measure] , identifier[result] , identifier[conf_begin] , identifier[conf_end] = identifier[match] . identifier[groups] ()
identifier[measure] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}[ identifier[measure] ]
identifier[rouge_type] = identifier[rouge_type] . identifier[lower] (). identifier[replace] ( literal[string] , literal[string] )
identifier[key] = literal[string] . identifier[format] ( identifier[rouge_type] , identifier[measure] )
identifier[results] [ identifier[key] ]= identifier[float] ( identifier[result] )
identifier[results] [ literal[string] . identifier[format] ( identifier[key] )]= identifier[float] ( identifier[conf_begin] )
identifier[results] [ literal[string] . identifier[format] ( identifier[key] )]= identifier[float] ( identifier[conf_end] )
keyword[return] identifier[results] | def output_to_dict(self, output):
"""
Convert the ROUGE output into python dictionary for further
processing.
"""
#0 ROUGE-1 Average_R: 0.02632 (95%-conf.int. 0.02632 - 0.02632)
pattern = re.compile('(\\d+) (ROUGE-\\S+) (Average_\\w): (\\d.\\d+) \\(95%-conf.int. (\\d.\\d+) - (\\d.\\d+)\\)')
results = {}
for line in output.split('\n'):
match = pattern.match(line)
if match:
(sys_id, rouge_type, measure, result, conf_begin, conf_end) = match.groups()
measure = {'Average_R': 'recall', 'Average_P': 'precision', 'Average_F': 'f_score'}[measure]
rouge_type = rouge_type.lower().replace('-', '_')
key = '{}_{}'.format(rouge_type, measure)
results[key] = float(result)
results['{}_cb'.format(key)] = float(conf_begin)
results['{}_ce'.format(key)] = float(conf_end) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return results |
def _state_invalid(self):
"""
If the state is invalid for the transition, return details on what didn't match
:return: Tuple of (state manager, current state, label for current state)
"""
for statemanager, conditions in self.statetransition.transitions.items():
current_state = getattr(self.obj, statemanager.propname)
if conditions['from'] is None:
state_valid = True
else:
mstate = conditions['from'].get(current_state)
state_valid = mstate and mstate(self.obj)
if state_valid and conditions['if']:
state_valid = all(v(self.obj) for v in conditions['if'])
if not state_valid:
return statemanager, current_state, statemanager.lenum.get(current_state) | def function[_state_invalid, parameter[self]]:
constant[
If the state is invalid for the transition, return details on what didn't match
:return: Tuple of (state manager, current state, label for current state)
]
for taget[tuple[[<ast.Name object at 0x7da18f00d4e0>, <ast.Name object at 0x7da18f00f250>]]] in starred[call[name[self].statetransition.transitions.items, parameter[]]] begin[:]
variable[current_state] assign[=] call[name[getattr], parameter[name[self].obj, name[statemanager].propname]]
if compare[call[name[conditions]][constant[from]] is constant[None]] begin[:]
variable[state_valid] assign[=] constant[True]
if <ast.BoolOp object at 0x7da18f00dcf0> begin[:]
variable[state_valid] assign[=] call[name[all], parameter[<ast.GeneratorExp object at 0x7da18f00f550>]]
if <ast.UnaryOp object at 0x7da18f00c100> begin[:]
return[tuple[[<ast.Name object at 0x7da18f00e020>, <ast.Name object at 0x7da18f00ebc0>, <ast.Call object at 0x7da18f00fd00>]]] | keyword[def] identifier[_state_invalid] ( identifier[self] ):
literal[string]
keyword[for] identifier[statemanager] , identifier[conditions] keyword[in] identifier[self] . identifier[statetransition] . identifier[transitions] . identifier[items] ():
identifier[current_state] = identifier[getattr] ( identifier[self] . identifier[obj] , identifier[statemanager] . identifier[propname] )
keyword[if] identifier[conditions] [ literal[string] ] keyword[is] keyword[None] :
identifier[state_valid] = keyword[True]
keyword[else] :
identifier[mstate] = identifier[conditions] [ literal[string] ]. identifier[get] ( identifier[current_state] )
identifier[state_valid] = identifier[mstate] keyword[and] identifier[mstate] ( identifier[self] . identifier[obj] )
keyword[if] identifier[state_valid] keyword[and] identifier[conditions] [ literal[string] ]:
identifier[state_valid] = identifier[all] ( identifier[v] ( identifier[self] . identifier[obj] ) keyword[for] identifier[v] keyword[in] identifier[conditions] [ literal[string] ])
keyword[if] keyword[not] identifier[state_valid] :
keyword[return] identifier[statemanager] , identifier[current_state] , identifier[statemanager] . identifier[lenum] . identifier[get] ( identifier[current_state] ) | def _state_invalid(self):
"""
If the state is invalid for the transition, return details on what didn't match
:return: Tuple of (state manager, current state, label for current state)
"""
for (statemanager, conditions) in self.statetransition.transitions.items():
current_state = getattr(self.obj, statemanager.propname)
if conditions['from'] is None:
state_valid = True # depends on [control=['if'], data=[]]
else:
mstate = conditions['from'].get(current_state)
state_valid = mstate and mstate(self.obj)
if state_valid and conditions['if']:
state_valid = all((v(self.obj) for v in conditions['if'])) # depends on [control=['if'], data=[]]
if not state_valid:
return (statemanager, current_state, statemanager.lenum.get(current_state)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def unregister(self, model_or_iterable):
"""
Remove a model or a list of models from the list of models
whose comments will be moderated.
Raise ``NotModerated`` if any of the models are not currently
registered for moderation.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotModerated("The model '%s' is not currently being moderated" % model._meta.module_name)
del self._registry[model] | def function[unregister, parameter[self, model_or_iterable]]:
constant[
Remove a model or a list of models from the list of models
whose comments will be moderated.
Raise ``NotModerated`` if any of the models are not currently
registered for moderation.
]
if call[name[isinstance], parameter[name[model_or_iterable], name[ModelBase]]] begin[:]
variable[model_or_iterable] assign[=] list[[<ast.Name object at 0x7da1b13bbaf0>]]
for taget[name[model]] in starred[name[model_or_iterable]] begin[:]
if compare[name[model] <ast.NotIn object at 0x7da2590d7190> name[self]._registry] begin[:]
<ast.Raise object at 0x7da1b13b9660>
<ast.Delete object at 0x7da1b13bbee0> | keyword[def] identifier[unregister] ( identifier[self] , identifier[model_or_iterable] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[model_or_iterable] , identifier[ModelBase] ):
identifier[model_or_iterable] =[ identifier[model_or_iterable] ]
keyword[for] identifier[model] keyword[in] identifier[model_or_iterable] :
keyword[if] identifier[model] keyword[not] keyword[in] identifier[self] . identifier[_registry] :
keyword[raise] identifier[NotModerated] ( literal[string] % identifier[model] . identifier[_meta] . identifier[module_name] )
keyword[del] identifier[self] . identifier[_registry] [ identifier[model] ] | def unregister(self, model_or_iterable):
"""
Remove a model or a list of models from the list of models
whose comments will be moderated.
Raise ``NotModerated`` if any of the models are not currently
registered for moderation.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable] # depends on [control=['if'], data=[]]
for model in model_or_iterable:
if model not in self._registry:
raise NotModerated("The model '%s' is not currently being moderated" % model._meta.module_name) # depends on [control=['if'], data=['model']]
del self._registry[model] # depends on [control=['for'], data=['model']] |
def getNextSample(self, V):
"""
Generate the next sample for the condorcet model. This algorithm is described in "Computing
Optimal Bayesian Decisions for Rank Aggregation via MCMC Sampling," and is adapted from
code written by Lirong Xia.
:ivar list<list<int> V: A two-dimensional list that for every pair of candidates cand1 and
cand2, V[cand1][cand2] contains 1 if cand1 is ranked above cand2 more times than cand2
is ranked above cand1 and 0 otherwise.
"""
cands = range(len(self.wmg))
W = copy.deepcopy(V)
allPairs = itertools.combinations(cands, 2)
for pair in allPairs:
a = pair[0]
b = pair[1]
if random.random() < 1.0/(1.0+pow(self.phi,self.wmg[a+1][b+1])):
W[a][b] = 1
W[b][a] = 0
else:
W[a][b] = 0
W[b][a] = 1
prMW = 1
prMV = 1
prob = min(1.0, prMW/prMV)
if random.random() <= prob:
V = W
return V | def function[getNextSample, parameter[self, V]]:
constant[
Generate the next sample for the condorcet model. This algorithm is described in "Computing
Optimal Bayesian Decisions for Rank Aggregation via MCMC Sampling," and is adapted from
code written by Lirong Xia.
:ivar list<list<int> V: A two-dimensional list that for every pair of candidates cand1 and
cand2, V[cand1][cand2] contains 1 if cand1 is ranked above cand2 more times than cand2
is ranked above cand1 and 0 otherwise.
]
variable[cands] assign[=] call[name[range], parameter[call[name[len], parameter[name[self].wmg]]]]
variable[W] assign[=] call[name[copy].deepcopy, parameter[name[V]]]
variable[allPairs] assign[=] call[name[itertools].combinations, parameter[name[cands], constant[2]]]
for taget[name[pair]] in starred[name[allPairs]] begin[:]
variable[a] assign[=] call[name[pair]][constant[0]]
variable[b] assign[=] call[name[pair]][constant[1]]
if compare[call[name[random].random, parameter[]] less[<] binary_operation[constant[1.0] / binary_operation[constant[1.0] + call[name[pow], parameter[name[self].phi, call[call[name[self].wmg][binary_operation[name[a] + constant[1]]]][binary_operation[name[b] + constant[1]]]]]]]] begin[:]
call[call[name[W]][name[a]]][name[b]] assign[=] constant[1]
call[call[name[W]][name[b]]][name[a]] assign[=] constant[0]
variable[prMW] assign[=] constant[1]
variable[prMV] assign[=] constant[1]
variable[prob] assign[=] call[name[min], parameter[constant[1.0], binary_operation[name[prMW] / name[prMV]]]]
if compare[call[name[random].random, parameter[]] less_or_equal[<=] name[prob]] begin[:]
variable[V] assign[=] name[W]
return[name[V]] | keyword[def] identifier[getNextSample] ( identifier[self] , identifier[V] ):
literal[string]
identifier[cands] = identifier[range] ( identifier[len] ( identifier[self] . identifier[wmg] ))
identifier[W] = identifier[copy] . identifier[deepcopy] ( identifier[V] )
identifier[allPairs] = identifier[itertools] . identifier[combinations] ( identifier[cands] , literal[int] )
keyword[for] identifier[pair] keyword[in] identifier[allPairs] :
identifier[a] = identifier[pair] [ literal[int] ]
identifier[b] = identifier[pair] [ literal[int] ]
keyword[if] identifier[random] . identifier[random] ()< literal[int] /( literal[int] + identifier[pow] ( identifier[self] . identifier[phi] , identifier[self] . identifier[wmg] [ identifier[a] + literal[int] ][ identifier[b] + literal[int] ])):
identifier[W] [ identifier[a] ][ identifier[b] ]= literal[int]
identifier[W] [ identifier[b] ][ identifier[a] ]= literal[int]
keyword[else] :
identifier[W] [ identifier[a] ][ identifier[b] ]= literal[int]
identifier[W] [ identifier[b] ][ identifier[a] ]= literal[int]
identifier[prMW] = literal[int]
identifier[prMV] = literal[int]
identifier[prob] = identifier[min] ( literal[int] , identifier[prMW] / identifier[prMV] )
keyword[if] identifier[random] . identifier[random] ()<= identifier[prob] :
identifier[V] = identifier[W]
keyword[return] identifier[V] | def getNextSample(self, V):
"""
Generate the next sample for the condorcet model. This algorithm is described in "Computing
Optimal Bayesian Decisions for Rank Aggregation via MCMC Sampling," and is adapted from
code written by Lirong Xia.
:ivar list<list<int> V: A two-dimensional list that for every pair of candidates cand1 and
cand2, V[cand1][cand2] contains 1 if cand1 is ranked above cand2 more times than cand2
is ranked above cand1 and 0 otherwise.
"""
cands = range(len(self.wmg))
W = copy.deepcopy(V)
allPairs = itertools.combinations(cands, 2)
for pair in allPairs:
a = pair[0]
b = pair[1]
if random.random() < 1.0 / (1.0 + pow(self.phi, self.wmg[a + 1][b + 1])):
W[a][b] = 1
W[b][a] = 0 # depends on [control=['if'], data=[]]
else:
W[a][b] = 0
W[b][a] = 1 # depends on [control=['for'], data=['pair']]
prMW = 1
prMV = 1
prob = min(1.0, prMW / prMV)
if random.random() <= prob:
V = W # depends on [control=['if'], data=[]]
return V |
def patched_fax_init(self, twilio):
"""
Initialize the Fax Domain
:returns: Domain for Fax
:rtype: twilio.rest.fax.Fax
"""
super(TwilioFax, self).__init__(twilio)
self.base_url = ''
self.account_sid = twilio.account_sid
# Versions
self._v1 = None | def function[patched_fax_init, parameter[self, twilio]]:
constant[
Initialize the Fax Domain
:returns: Domain for Fax
:rtype: twilio.rest.fax.Fax
]
call[call[name[super], parameter[name[TwilioFax], name[self]]].__init__, parameter[name[twilio]]]
name[self].base_url assign[=] constant[]
name[self].account_sid assign[=] name[twilio].account_sid
name[self]._v1 assign[=] constant[None] | keyword[def] identifier[patched_fax_init] ( identifier[self] , identifier[twilio] ):
literal[string]
identifier[super] ( identifier[TwilioFax] , identifier[self] ). identifier[__init__] ( identifier[twilio] )
identifier[self] . identifier[base_url] = literal[string]
identifier[self] . identifier[account_sid] = identifier[twilio] . identifier[account_sid]
identifier[self] . identifier[_v1] = keyword[None] | def patched_fax_init(self, twilio):
"""
Initialize the Fax Domain
:returns: Domain for Fax
:rtype: twilio.rest.fax.Fax
"""
super(TwilioFax, self).__init__(twilio)
self.base_url = ''
self.account_sid = twilio.account_sid
# Versions
self._v1 = None |
def terminal(self, out=None, border=None):
"""\
Serializes the matrix as ANSI escape code.
:param out: Filename or a file-like object supporting to write text.
If ``None`` (default), the matrix is written to ``sys.stdout``.
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
"""
if out is None and sys.platform == 'win32': # pragma: no cover
# Windows < 10 does not support ANSI escape sequences, try to
# call the a Windows specific terminal output which uses the
# Windows API.
try:
writers.write_terminal_win(self.matrix, self._version, border)
except OSError:
# Use the standard output even if it may print garbage
writers.write_terminal(self.matrix, self._version, sys.stdout,
border)
else:
writers.write_terminal(self.matrix, self._version, out or sys.stdout,
border) | def function[terminal, parameter[self, out, border]]:
constant[ Serializes the matrix as ANSI escape code.
:param out: Filename or a file-like object supporting to write text.
If ``None`` (default), the matrix is written to ``sys.stdout``.
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
]
if <ast.BoolOp object at 0x7da1b0c500d0> begin[:]
<ast.Try object at 0x7da1b0c53790> | keyword[def] identifier[terminal] ( identifier[self] , identifier[out] = keyword[None] , identifier[border] = keyword[None] ):
literal[string]
keyword[if] identifier[out] keyword[is] keyword[None] keyword[and] identifier[sys] . identifier[platform] == literal[string] :
keyword[try] :
identifier[writers] . identifier[write_terminal_win] ( identifier[self] . identifier[matrix] , identifier[self] . identifier[_version] , identifier[border] )
keyword[except] identifier[OSError] :
identifier[writers] . identifier[write_terminal] ( identifier[self] . identifier[matrix] , identifier[self] . identifier[_version] , identifier[sys] . identifier[stdout] ,
identifier[border] )
keyword[else] :
identifier[writers] . identifier[write_terminal] ( identifier[self] . identifier[matrix] , identifier[self] . identifier[_version] , identifier[out] keyword[or] identifier[sys] . identifier[stdout] ,
identifier[border] ) | def terminal(self, out=None, border=None):
""" Serializes the matrix as ANSI escape code.
:param out: Filename or a file-like object supporting to write text.
If ``None`` (default), the matrix is written to ``sys.stdout``.
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
"""
if out is None and sys.platform == 'win32': # pragma: no cover
# Windows < 10 does not support ANSI escape sequences, try to
# call the a Windows specific terminal output which uses the
# Windows API.
try:
writers.write_terminal_win(self.matrix, self._version, border) # depends on [control=['try'], data=[]]
except OSError:
# Use the standard output even if it may print garbage
writers.write_terminal(self.matrix, self._version, sys.stdout, border) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
writers.write_terminal(self.matrix, self._version, out or sys.stdout, border) |
def cli(env):
"""List options for creating a placement group."""
manager = PlacementManager(env.client)
routers = manager.get_routers()
env.fout(get_router_table(routers))
rules = manager.get_all_rules()
env.fout(get_rule_table(rules)) | def function[cli, parameter[env]]:
constant[List options for creating a placement group.]
variable[manager] assign[=] call[name[PlacementManager], parameter[name[env].client]]
variable[routers] assign[=] call[name[manager].get_routers, parameter[]]
call[name[env].fout, parameter[call[name[get_router_table], parameter[name[routers]]]]]
variable[rules] assign[=] call[name[manager].get_all_rules, parameter[]]
call[name[env].fout, parameter[call[name[get_rule_table], parameter[name[rules]]]]] | keyword[def] identifier[cli] ( identifier[env] ):
literal[string]
identifier[manager] = identifier[PlacementManager] ( identifier[env] . identifier[client] )
identifier[routers] = identifier[manager] . identifier[get_routers] ()
identifier[env] . identifier[fout] ( identifier[get_router_table] ( identifier[routers] ))
identifier[rules] = identifier[manager] . identifier[get_all_rules] ()
identifier[env] . identifier[fout] ( identifier[get_rule_table] ( identifier[rules] )) | def cli(env):
"""List options for creating a placement group."""
manager = PlacementManager(env.client)
routers = manager.get_routers()
env.fout(get_router_table(routers))
rules = manager.get_all_rules()
env.fout(get_rule_table(rules)) |
def autodiscover():
"""
Auto-discover INSTALLED_APPS translation.py modules and fail silently when
not present. This forces an import on them to register.
Also import explicit modules.
"""
import os
import sys
import copy
from django.utils.module_loading import module_has_submodule
from modeltranslation.translator import translator
from modeltranslation.settings import TRANSLATION_FILES, DEBUG
from importlib import import_module
from django.apps import apps
mods = [(app_config.name, app_config.module) for app_config in apps.get_app_configs()]
for (app, mod) in mods:
# Attempt to import the app's translation module.
module = '%s.translation' % app
before_import_registry = copy.copy(translator._registry)
try:
import_module(module)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
translator._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an translation module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'translation'):
raise
for module in TRANSLATION_FILES:
import_module(module)
# In debug mode, print a list of registered models and pid to stdout.
# Note: Differing model order is fine, we don't rely on a particular
# order, as far as base classes are registered before subclasses.
if DEBUG:
try:
if sys.argv[1] in ('runserver', 'runserver_plus'):
models = translator.get_registered_models()
names = ', '.join(m.__name__ for m in models)
print('modeltranslation: Registered %d models for translation'
' (%s) [pid: %d].' % (len(models), names, os.getpid()))
except IndexError:
pass | def function[autodiscover, parameter[]]:
constant[
Auto-discover INSTALLED_APPS translation.py modules and fail silently when
not present. This forces an import on them to register.
Also import explicit modules.
]
import module[os]
import module[sys]
import module[copy]
from relative_module[django.utils.module_loading] import module[module_has_submodule]
from relative_module[modeltranslation.translator] import module[translator]
from relative_module[modeltranslation.settings] import module[TRANSLATION_FILES], module[DEBUG]
from relative_module[importlib] import module[import_module]
from relative_module[django.apps] import module[apps]
variable[mods] assign[=] <ast.ListComp object at 0x7da1b1d55bd0>
for taget[tuple[[<ast.Name object at 0x7da1b1d563b0>, <ast.Name object at 0x7da1b1d570d0>]]] in starred[name[mods]] begin[:]
variable[module] assign[=] binary_operation[constant[%s.translation] <ast.Mod object at 0x7da2590d6920> name[app]]
variable[before_import_registry] assign[=] call[name[copy].copy, parameter[name[translator]._registry]]
<ast.Try object at 0x7da1b1d54340>
for taget[name[module]] in starred[name[TRANSLATION_FILES]] begin[:]
call[name[import_module], parameter[name[module]]]
if name[DEBUG] begin[:]
<ast.Try object at 0x7da1b1d545e0> | keyword[def] identifier[autodiscover] ():
literal[string]
keyword[import] identifier[os]
keyword[import] identifier[sys]
keyword[import] identifier[copy]
keyword[from] identifier[django] . identifier[utils] . identifier[module_loading] keyword[import] identifier[module_has_submodule]
keyword[from] identifier[modeltranslation] . identifier[translator] keyword[import] identifier[translator]
keyword[from] identifier[modeltranslation] . identifier[settings] keyword[import] identifier[TRANSLATION_FILES] , identifier[DEBUG]
keyword[from] identifier[importlib] keyword[import] identifier[import_module]
keyword[from] identifier[django] . identifier[apps] keyword[import] identifier[apps]
identifier[mods] =[( identifier[app_config] . identifier[name] , identifier[app_config] . identifier[module] ) keyword[for] identifier[app_config] keyword[in] identifier[apps] . identifier[get_app_configs] ()]
keyword[for] ( identifier[app] , identifier[mod] ) keyword[in] identifier[mods] :
identifier[module] = literal[string] % identifier[app]
identifier[before_import_registry] = identifier[copy] . identifier[copy] ( identifier[translator] . identifier[_registry] )
keyword[try] :
identifier[import_module] ( identifier[module] )
keyword[except] :
identifier[translator] . identifier[_registry] = identifier[before_import_registry]
keyword[if] identifier[module_has_submodule] ( identifier[mod] , literal[string] ):
keyword[raise]
keyword[for] identifier[module] keyword[in] identifier[TRANSLATION_FILES] :
identifier[import_module] ( identifier[module] )
keyword[if] identifier[DEBUG] :
keyword[try] :
keyword[if] identifier[sys] . identifier[argv] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[models] = identifier[translator] . identifier[get_registered_models] ()
identifier[names] = literal[string] . identifier[join] ( identifier[m] . identifier[__name__] keyword[for] identifier[m] keyword[in] identifier[models] )
identifier[print] ( literal[string]
literal[string] %( identifier[len] ( identifier[models] ), identifier[names] , identifier[os] . identifier[getpid] ()))
keyword[except] identifier[IndexError] :
keyword[pass] | def autodiscover():
"""
Auto-discover INSTALLED_APPS translation.py modules and fail silently when
not present. This forces an import on them to register.
Also import explicit modules.
"""
import os
import sys
import copy
from django.utils.module_loading import module_has_submodule
from modeltranslation.translator import translator
from modeltranslation.settings import TRANSLATION_FILES, DEBUG
from importlib import import_module
from django.apps import apps
mods = [(app_config.name, app_config.module) for app_config in apps.get_app_configs()]
for (app, mod) in mods:
# Attempt to import the app's translation module.
module = '%s.translation' % app
before_import_registry = copy.copy(translator._registry)
try:
import_module(module) # depends on [control=['try'], data=[]]
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
translator._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an translation module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'translation'):
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
for module in TRANSLATION_FILES:
import_module(module) # depends on [control=['for'], data=['module']]
# In debug mode, print a list of registered models and pid to stdout.
# Note: Differing model order is fine, we don't rely on a particular
# order, as far as base classes are registered before subclasses.
if DEBUG:
try:
if sys.argv[1] in ('runserver', 'runserver_plus'):
models = translator.get_registered_models()
names = ', '.join((m.__name__ for m in models))
print('modeltranslation: Registered %d models for translation (%s) [pid: %d].' % (len(models), names, os.getpid())) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except IndexError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def _proc_ellipse(self, tokens, filled):
""" Returns the components of an ellipse. """
component = Ellipse(pen=self.pen,
x_origin=tokens["x0"],
y_origin=tokens["y0"],
e_width=tokens["w"],
e_height=tokens["h"],
filled=filled)
return component | def function[_proc_ellipse, parameter[self, tokens, filled]]:
constant[ Returns the components of an ellipse. ]
variable[component] assign[=] call[name[Ellipse], parameter[]]
return[name[component]] | keyword[def] identifier[_proc_ellipse] ( identifier[self] , identifier[tokens] , identifier[filled] ):
literal[string]
identifier[component] = identifier[Ellipse] ( identifier[pen] = identifier[self] . identifier[pen] ,
identifier[x_origin] = identifier[tokens] [ literal[string] ],
identifier[y_origin] = identifier[tokens] [ literal[string] ],
identifier[e_width] = identifier[tokens] [ literal[string] ],
identifier[e_height] = identifier[tokens] [ literal[string] ],
identifier[filled] = identifier[filled] )
keyword[return] identifier[component] | def _proc_ellipse(self, tokens, filled):
""" Returns the components of an ellipse. """
component = Ellipse(pen=self.pen, x_origin=tokens['x0'], y_origin=tokens['y0'], e_width=tokens['w'], e_height=tokens['h'], filled=filled)
return component |
def list_traces(
self,
project_id,
view=None,
page_size=None,
start_time=None,
end_time=None,
filter_=None,
order_by=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns of a list of traces that match the specified filter conditions.
Example:
>>> from google.cloud import trace_v1
>>>
>>> client = trace_v1.TraceServiceClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # Iterate over all results
>>> for element in client.list_traces(project_id):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_traces(project_id).pages:
... for element in page:
... # process element
... pass
Args:
project_id (str): ID of the Cloud project where the trace data is stored.
view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is
``MINIMAL``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
filter_ (str): An optional filter against labels for the request.
By default, searches use prefix matching. To specify exact match,
prepend a plus symbol (``+``) to the search term. Multiple terms are
ANDed. Syntax:
- ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root
span starts with ``NAME_PREFIX``.
- ``+root:NAME`` or ``+NAME``: Return traces where any root span's name
is exactly ``NAME``.
- ``span:NAME_PREFIX``: Return traces where any span starts with
``NAME_PREFIX``.
- ``+span:NAME``: Return traces where any span's name is exactly
``NAME``.
- ``latency:DURATION``: Return traces whose overall latency is greater
or equal to than ``DURATION``. Accepted units are nanoseconds
(``ns``), milliseconds (``ms``), and seconds (``s``). Default is
``ms``. For example, ``latency:24ms`` returns traces whose overall
latency is greater than or equal to 24 milliseconds.
- ``label:LABEL_KEY``: Return all traces containing the specified label
key (exact match, case-sensitive) regardless of the key:value pair's
value (including empty values).
- ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the
specified label key (exact match, case-sensitive) whose value starts
with ``VALUE_PREFIX``. Both a key and a value must be specified.
- ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair
exactly matching the specified text. Both a key and a value must be
specified.
- ``method:VALUE``: Equivalent to ``/http/method:VALUE``.
- ``url:VALUE``: Equivalent to ``/http/url:VALUE``.
order_by (str): Field used to sort the returned traces. Optional. Can be one of the
following:
- ``trace_id``
- ``name`` (``name`` field of root span in the trace)
- ``duration`` (difference between ``end_time`` and ``start_time``
fields of the root span)
- ``start`` (``start_time`` field of the root span)
Descending order can be specified by appending ``desc`` to the sort
field (for example, ``name desc``).
Only one sort field is permitted.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_traces" not in self._inner_api_calls:
self._inner_api_calls[
"list_traces"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_traces,
default_retry=self._method_configs["ListTraces"].retry,
default_timeout=self._method_configs["ListTraces"].timeout,
client_info=self._client_info,
)
request = trace_pb2.ListTracesRequest(
project_id=project_id,
view=view,
page_size=page_size,
start_time=start_time,
end_time=end_time,
filter=filter_,
order_by=order_by,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_id", project_id)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_traces"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="traces",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator | def function[list_traces, parameter[self, project_id, view, page_size, start_time, end_time, filter_, order_by, retry, timeout, metadata]]:
constant[
Returns of a list of traces that match the specified filter conditions.
Example:
>>> from google.cloud import trace_v1
>>>
>>> client = trace_v1.TraceServiceClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # Iterate over all results
>>> for element in client.list_traces(project_id):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_traces(project_id).pages:
... for element in page:
... # process element
... pass
Args:
project_id (str): ID of the Cloud project where the trace data is stored.
view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is
``MINIMAL``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
filter_ (str): An optional filter against labels for the request.
By default, searches use prefix matching. To specify exact match,
prepend a plus symbol (``+``) to the search term. Multiple terms are
ANDed. Syntax:
- ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root
span starts with ``NAME_PREFIX``.
- ``+root:NAME`` or ``+NAME``: Return traces where any root span's name
is exactly ``NAME``.
- ``span:NAME_PREFIX``: Return traces where any span starts with
``NAME_PREFIX``.
- ``+span:NAME``: Return traces where any span's name is exactly
``NAME``.
- ``latency:DURATION``: Return traces whose overall latency is greater
or equal to than ``DURATION``. Accepted units are nanoseconds
(``ns``), milliseconds (``ms``), and seconds (``s``). Default is
``ms``. For example, ``latency:24ms`` returns traces whose overall
latency is greater than or equal to 24 milliseconds.
- ``label:LABEL_KEY``: Return all traces containing the specified label
key (exact match, case-sensitive) regardless of the key:value pair's
value (including empty values).
- ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the
specified label key (exact match, case-sensitive) whose value starts
with ``VALUE_PREFIX``. Both a key and a value must be specified.
- ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair
exactly matching the specified text. Both a key and a value must be
specified.
- ``method:VALUE``: Equivalent to ``/http/method:VALUE``.
- ``url:VALUE``: Equivalent to ``/http/url:VALUE``.
order_by (str): Field used to sort the returned traces. Optional. Can be one of the
following:
- ``trace_id``
- ``name`` (``name`` field of root span in the trace)
- ``duration`` (difference between ``end_time`` and ``start_time``
fields of the root span)
- ``start`` (``start_time`` field of the root span)
Descending order can be specified by appending ``desc`` to the sort
field (for example, ``name desc``).
Only one sort field is permitted.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
]
if compare[constant[list_traces] <ast.NotIn object at 0x7da2590d7190> name[self]._inner_api_calls] begin[:]
call[name[self]._inner_api_calls][constant[list_traces]] assign[=] call[name[google].api_core.gapic_v1.method.wrap_method, parameter[name[self].transport.list_traces]]
variable[request] assign[=] call[name[trace_pb2].ListTracesRequest, parameter[]]
if compare[name[metadata] is constant[None]] begin[:]
variable[metadata] assign[=] list[[]]
variable[metadata] assign[=] call[name[list], parameter[name[metadata]]]
<ast.Try object at 0x7da207f03910>
variable[iterator] assign[=] call[name[google].api_core.page_iterator.GRPCIterator, parameter[]]
return[name[iterator]] | keyword[def] identifier[list_traces] (
identifier[self] ,
identifier[project_id] ,
identifier[view] = keyword[None] ,
identifier[page_size] = keyword[None] ,
identifier[start_time] = keyword[None] ,
identifier[end_time] = keyword[None] ,
identifier[filter_] = keyword[None] ,
identifier[order_by] = keyword[None] ,
identifier[retry] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[timeout] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[metadata] = keyword[None] ,
):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_inner_api_calls] :
identifier[self] . identifier[_inner_api_calls] [
literal[string]
]= identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[wrap_method] (
identifier[self] . identifier[transport] . identifier[list_traces] ,
identifier[default_retry] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[retry] ,
identifier[default_timeout] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[timeout] ,
identifier[client_info] = identifier[self] . identifier[_client_info] ,
)
identifier[request] = identifier[trace_pb2] . identifier[ListTracesRequest] (
identifier[project_id] = identifier[project_id] ,
identifier[view] = identifier[view] ,
identifier[page_size] = identifier[page_size] ,
identifier[start_time] = identifier[start_time] ,
identifier[end_time] = identifier[end_time] ,
identifier[filter] = identifier[filter_] ,
identifier[order_by] = identifier[order_by] ,
)
keyword[if] identifier[metadata] keyword[is] keyword[None] :
identifier[metadata] =[]
identifier[metadata] = identifier[list] ( identifier[metadata] )
keyword[try] :
identifier[routing_header] =[( literal[string] , identifier[project_id] )]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
identifier[routing_metadata] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[routing_header] . identifier[to_grpc_metadata] (
identifier[routing_header]
)
identifier[metadata] . identifier[append] ( identifier[routing_metadata] )
identifier[iterator] = identifier[google] . identifier[api_core] . identifier[page_iterator] . identifier[GRPCIterator] (
identifier[client] = keyword[None] ,
identifier[method] = identifier[functools] . identifier[partial] (
identifier[self] . identifier[_inner_api_calls] [ literal[string] ],
identifier[retry] = identifier[retry] ,
identifier[timeout] = identifier[timeout] ,
identifier[metadata] = identifier[metadata] ,
),
identifier[request] = identifier[request] ,
identifier[items_field] = literal[string] ,
identifier[request_token_field] = literal[string] ,
identifier[response_token_field] = literal[string] ,
)
keyword[return] identifier[iterator] | def list_traces(self, project_id, view=None, page_size=None, start_time=None, end_time=None, filter_=None, order_by=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None):
"""
Returns of a list of traces that match the specified filter conditions.
Example:
>>> from google.cloud import trace_v1
>>>
>>> client = trace_v1.TraceServiceClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # Iterate over all results
>>> for element in client.list_traces(project_id):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_traces(project_id).pages:
... for element in page:
... # process element
... pass
Args:
project_id (str): ID of the Cloud project where the trace data is stored.
view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is
``MINIMAL``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
filter_ (str): An optional filter against labels for the request.
By default, searches use prefix matching. To specify exact match,
prepend a plus symbol (``+``) to the search term. Multiple terms are
ANDed. Syntax:
- ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root
span starts with ``NAME_PREFIX``.
- ``+root:NAME`` or ``+NAME``: Return traces where any root span's name
is exactly ``NAME``.
- ``span:NAME_PREFIX``: Return traces where any span starts with
``NAME_PREFIX``.
- ``+span:NAME``: Return traces where any span's name is exactly
``NAME``.
- ``latency:DURATION``: Return traces whose overall latency is greater
or equal to than ``DURATION``. Accepted units are nanoseconds
(``ns``), milliseconds (``ms``), and seconds (``s``). Default is
``ms``. For example, ``latency:24ms`` returns traces whose overall
latency is greater than or equal to 24 milliseconds.
- ``label:LABEL_KEY``: Return all traces containing the specified label
key (exact match, case-sensitive) regardless of the key:value pair's
value (including empty values).
- ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the
specified label key (exact match, case-sensitive) whose value starts
with ``VALUE_PREFIX``. Both a key and a value must be specified.
- ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair
exactly matching the specified text. Both a key and a value must be
specified.
- ``method:VALUE``: Equivalent to ``/http/method:VALUE``.
- ``url:VALUE``: Equivalent to ``/http/url:VALUE``.
order_by (str): Field used to sort the returned traces. Optional. Can be one of the
following:
- ``trace_id``
- ``name`` (``name`` field of root span in the trace)
- ``duration`` (difference between ``end_time`` and ``start_time``
fields of the root span)
- ``start`` (``start_time`` field of the root span)
Descending order can be specified by appending ``desc`` to the sort
field (for example, ``name desc``).
Only one sort field is permitted.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_traces' not in self._inner_api_calls:
self._inner_api_calls['list_traces'] = google.api_core.gapic_v1.method.wrap_method(self.transport.list_traces, default_retry=self._method_configs['ListTraces'].retry, default_timeout=self._method_configs['ListTraces'].timeout, client_info=self._client_info) # depends on [control=['if'], data=[]]
request = trace_pb2.ListTracesRequest(project_id=project_id, view=view, page_size=page_size, start_time=start_time, end_time=end_time, filter=filter_, order_by=order_by)
if metadata is None:
metadata = [] # depends on [control=['if'], data=['metadata']]
metadata = list(metadata)
try:
routing_header = [('project_id', project_id)] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(routing_header)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(client=None, method=functools.partial(self._inner_api_calls['list_traces'], retry=retry, timeout=timeout, metadata=metadata), request=request, items_field='traces', request_token_field='page_token', response_token_field='next_page_token')
return iterator |
def build(self, builder):
"""Build XML by appending to builder"""
params = {}
if self.study_oid:
params.update(dict(StudyOID=self.study_oid))
builder.start("AdminData", params)
for user in self.users:
user.build(builder)
for location in self.locations:
location.build(builder)
builder.end("AdminData") | def function[build, parameter[self, builder]]:
constant[Build XML by appending to builder]
variable[params] assign[=] dictionary[[], []]
if name[self].study_oid begin[:]
call[name[params].update, parameter[call[name[dict], parameter[]]]]
call[name[builder].start, parameter[constant[AdminData], name[params]]]
for taget[name[user]] in starred[name[self].users] begin[:]
call[name[user].build, parameter[name[builder]]]
for taget[name[location]] in starred[name[self].locations] begin[:]
call[name[location].build, parameter[name[builder]]]
call[name[builder].end, parameter[constant[AdminData]]] | keyword[def] identifier[build] ( identifier[self] , identifier[builder] ):
literal[string]
identifier[params] ={}
keyword[if] identifier[self] . identifier[study_oid] :
identifier[params] . identifier[update] ( identifier[dict] ( identifier[StudyOID] = identifier[self] . identifier[study_oid] ))
identifier[builder] . identifier[start] ( literal[string] , identifier[params] )
keyword[for] identifier[user] keyword[in] identifier[self] . identifier[users] :
identifier[user] . identifier[build] ( identifier[builder] )
keyword[for] identifier[location] keyword[in] identifier[self] . identifier[locations] :
identifier[location] . identifier[build] ( identifier[builder] )
identifier[builder] . identifier[end] ( literal[string] ) | def build(self, builder):
"""Build XML by appending to builder"""
params = {}
if self.study_oid:
params.update(dict(StudyOID=self.study_oid)) # depends on [control=['if'], data=[]]
builder.start('AdminData', params)
for user in self.users:
user.build(builder) # depends on [control=['for'], data=['user']]
for location in self.locations:
location.build(builder) # depends on [control=['for'], data=['location']]
builder.end('AdminData') |
def get_color_for_name(module_name):
"""
Create a custom color for a given string.
This allows the screenshots to each have a unique color but also for that
color to be consistent.
"""
# all screenshots of the same module should be a uniform color
module_name = module_name.split("-")[0]
saturation = 0.5
value = 243.2
try:
# we must be bytes to allow the md5 hash to be calculated
module_name = module_name.encode("utf-8")
except AttributeError:
pass
hue = int(md5(module_name).hexdigest(), 16) / 16 ** 32
hue *= 6
hue += 3.708
r, g, b = (
(
value,
value - value * saturation * abs(1 - hue % 2),
value - value * saturation,
)
* 3
)[5 ** int(hue) // 3 % 3 :: int(hue) % 2 + 1][:3]
return "#" + "%02x" * 3 % (int(r), int(g), int(b)) | def function[get_color_for_name, parameter[module_name]]:
constant[
Create a custom color for a given string.
This allows the screenshots to each have a unique color but also for that
color to be consistent.
]
variable[module_name] assign[=] call[call[name[module_name].split, parameter[constant[-]]]][constant[0]]
variable[saturation] assign[=] constant[0.5]
variable[value] assign[=] constant[243.2]
<ast.Try object at 0x7da1b1d0d960>
variable[hue] assign[=] binary_operation[call[name[int], parameter[call[call[name[md5], parameter[name[module_name]]].hexdigest, parameter[]], constant[16]]] / binary_operation[constant[16] ** constant[32]]]
<ast.AugAssign object at 0x7da1b1d0c640>
<ast.AugAssign object at 0x7da1b1d0d690>
<ast.Tuple object at 0x7da1b1d0d780> assign[=] call[call[binary_operation[tuple[[<ast.Name object at 0x7da1b1d0ff10>, <ast.BinOp object at 0x7da1b1d0cc40>, <ast.BinOp object at 0x7da1b1d0ef20>]] * constant[3]]][<ast.Slice object at 0x7da1b1d0ee00>]][<ast.Slice object at 0x7da1b1d0c7f0>]
return[binary_operation[constant[#] + binary_operation[binary_operation[constant[%02x] * constant[3]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1d0eb30>, <ast.Call object at 0x7da1b1d0eb00>, <ast.Call object at 0x7da1b1d0c5e0>]]]]] | keyword[def] identifier[get_color_for_name] ( identifier[module_name] ):
literal[string]
identifier[module_name] = identifier[module_name] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[saturation] = literal[int]
identifier[value] = literal[int]
keyword[try] :
identifier[module_name] = identifier[module_name] . identifier[encode] ( literal[string] )
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[hue] = identifier[int] ( identifier[md5] ( identifier[module_name] ). identifier[hexdigest] (), literal[int] )/ literal[int] ** literal[int]
identifier[hue] *= literal[int]
identifier[hue] += literal[int]
identifier[r] , identifier[g] , identifier[b] =(
(
identifier[value] ,
identifier[value] - identifier[value] * identifier[saturation] * identifier[abs] ( literal[int] - identifier[hue] % literal[int] ),
identifier[value] - identifier[value] * identifier[saturation] ,
)
* literal[int]
)[ literal[int] ** identifier[int] ( identifier[hue] )// literal[int] % literal[int] :: identifier[int] ( identifier[hue] )% literal[int] + literal[int] ][: literal[int] ]
keyword[return] literal[string] + literal[string] * literal[int] %( identifier[int] ( identifier[r] ), identifier[int] ( identifier[g] ), identifier[int] ( identifier[b] )) | def get_color_for_name(module_name):
"""
Create a custom color for a given string.
This allows the screenshots to each have a unique color but also for that
color to be consistent.
"""
# all screenshots of the same module should be a uniform color
module_name = module_name.split('-')[0]
saturation = 0.5
value = 243.2
try:
# we must be bytes to allow the md5 hash to be calculated
module_name = module_name.encode('utf-8') # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
hue = int(md5(module_name).hexdigest(), 16) / 16 ** 32
hue *= 6
hue += 3.708
(r, g, b) = ((value, value - value * saturation * abs(1 - hue % 2), value - value * saturation) * 3)[5 ** int(hue) // 3 % 3::int(hue) % 2 + 1][:3]
return '#' + '%02x' * 3 % (int(r), int(g), int(b)) |
def _AtildeTerm(N, h, m, k, dW, Km0, Pm0):
"""kth term in the sum for Atilde (Wiktorsson2001 p481, 1st eqn)"""
M = m*(m-1)//2
Xk = np.random.normal(0.0, 1.0, (N, m, 1))
Yk = np.random.normal(0.0, 1.0, (N, m, 1))
factor1 = np.dot(Km0, Pm0 - np.eye(m**2))
factor1 = broadcast_to(factor1, (N, M, m**2))
factor2 = _kp(Yk + np.sqrt(2.0/h)*dW, Xk)
return _dot(factor1, factor2)/k | def function[_AtildeTerm, parameter[N, h, m, k, dW, Km0, Pm0]]:
constant[kth term in the sum for Atilde (Wiktorsson2001 p481, 1st eqn)]
variable[M] assign[=] binary_operation[binary_operation[name[m] * binary_operation[name[m] - constant[1]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
variable[Xk] assign[=] call[name[np].random.normal, parameter[constant[0.0], constant[1.0], tuple[[<ast.Name object at 0x7da1b12abac0>, <ast.Name object at 0x7da1b12a9780>, <ast.Constant object at 0x7da1b12a9ea0>]]]]
variable[Yk] assign[=] call[name[np].random.normal, parameter[constant[0.0], constant[1.0], tuple[[<ast.Name object at 0x7da1b12a8d90>, <ast.Name object at 0x7da1b12a8250>, <ast.Constant object at 0x7da1b12abeb0>]]]]
variable[factor1] assign[=] call[name[np].dot, parameter[name[Km0], binary_operation[name[Pm0] - call[name[np].eye, parameter[binary_operation[name[m] ** constant[2]]]]]]]
variable[factor1] assign[=] call[name[broadcast_to], parameter[name[factor1], tuple[[<ast.Name object at 0x7da1b119a410>, <ast.Name object at 0x7da1b119ab60>, <ast.BinOp object at 0x7da1b119ac20>]]]]
variable[factor2] assign[=] call[name[_kp], parameter[binary_operation[name[Yk] + binary_operation[call[name[np].sqrt, parameter[binary_operation[constant[2.0] / name[h]]]] * name[dW]]], name[Xk]]]
return[binary_operation[call[name[_dot], parameter[name[factor1], name[factor2]]] / name[k]]] | keyword[def] identifier[_AtildeTerm] ( identifier[N] , identifier[h] , identifier[m] , identifier[k] , identifier[dW] , identifier[Km0] , identifier[Pm0] ):
literal[string]
identifier[M] = identifier[m] *( identifier[m] - literal[int] )// literal[int]
identifier[Xk] = identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( identifier[N] , identifier[m] , literal[int] ))
identifier[Yk] = identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( identifier[N] , identifier[m] , literal[int] ))
identifier[factor1] = identifier[np] . identifier[dot] ( identifier[Km0] , identifier[Pm0] - identifier[np] . identifier[eye] ( identifier[m] ** literal[int] ))
identifier[factor1] = identifier[broadcast_to] ( identifier[factor1] ,( identifier[N] , identifier[M] , identifier[m] ** literal[int] ))
identifier[factor2] = identifier[_kp] ( identifier[Yk] + identifier[np] . identifier[sqrt] ( literal[int] / identifier[h] )* identifier[dW] , identifier[Xk] )
keyword[return] identifier[_dot] ( identifier[factor1] , identifier[factor2] )/ identifier[k] | def _AtildeTerm(N, h, m, k, dW, Km0, Pm0):
"""kth term in the sum for Atilde (Wiktorsson2001 p481, 1st eqn)"""
M = m * (m - 1) // 2
Xk = np.random.normal(0.0, 1.0, (N, m, 1))
Yk = np.random.normal(0.0, 1.0, (N, m, 1))
factor1 = np.dot(Km0, Pm0 - np.eye(m ** 2))
factor1 = broadcast_to(factor1, (N, M, m ** 2))
factor2 = _kp(Yk + np.sqrt(2.0 / h) * dW, Xk)
return _dot(factor1, factor2) / k |
def send_quick_chat(queue_holder, index, team, team_only, quick_chat):
"""
Sends a quick chat to the general queue for everyone to pull from
:param queue_holder:
:param index: The index of the player sending the message
:param team: The team of the player sending the message
:param team_only: if the message is team only
:param quick_chat: The contents of the quick chat
:return:
"""
queue_holder["output"].put((index, team, team_only, quick_chat)) | def function[send_quick_chat, parameter[queue_holder, index, team, team_only, quick_chat]]:
constant[
Sends a quick chat to the general queue for everyone to pull from
:param queue_holder:
:param index: The index of the player sending the message
:param team: The team of the player sending the message
:param team_only: if the message is team only
:param quick_chat: The contents of the quick chat
:return:
]
call[call[name[queue_holder]][constant[output]].put, parameter[tuple[[<ast.Name object at 0x7da2044c01f0>, <ast.Name object at 0x7da2041d90c0>, <ast.Name object at 0x7da2041d9c60>, <ast.Name object at 0x7da2041da9b0>]]]] | keyword[def] identifier[send_quick_chat] ( identifier[queue_holder] , identifier[index] , identifier[team] , identifier[team_only] , identifier[quick_chat] ):
literal[string]
identifier[queue_holder] [ literal[string] ]. identifier[put] (( identifier[index] , identifier[team] , identifier[team_only] , identifier[quick_chat] )) | def send_quick_chat(queue_holder, index, team, team_only, quick_chat):
"""
Sends a quick chat to the general queue for everyone to pull from
:param queue_holder:
:param index: The index of the player sending the message
:param team: The team of the player sending the message
:param team_only: if the message is team only
:param quick_chat: The contents of the quick chat
:return:
"""
queue_holder['output'].put((index, team, team_only, quick_chat)) |
def bots_delete(self, bot):
"""
Delete existing bot
:param bot: bot to delete
:type bot: Bot
"""
self.client.bots.__getattr__(bot.name).__call__(_method="DELETE", _params=dict(botName=bot.name)) | def function[bots_delete, parameter[self, bot]]:
constant[
Delete existing bot
:param bot: bot to delete
:type bot: Bot
]
call[call[name[self].client.bots.__getattr__, parameter[name[bot].name]].__call__, parameter[]] | keyword[def] identifier[bots_delete] ( identifier[self] , identifier[bot] ):
literal[string]
identifier[self] . identifier[client] . identifier[bots] . identifier[__getattr__] ( identifier[bot] . identifier[name] ). identifier[__call__] ( identifier[_method] = literal[string] , identifier[_params] = identifier[dict] ( identifier[botName] = identifier[bot] . identifier[name] )) | def bots_delete(self, bot):
"""
Delete existing bot
:param bot: bot to delete
:type bot: Bot
"""
self.client.bots.__getattr__(bot.name).__call__(_method='DELETE', _params=dict(botName=bot.name)) |
def initialize_acceptance_criteria(**kwargs):
'''
initialize acceptance criteria with NULL values for thellier_gui and demag_gui
acceptance criteria format is doctionaries:
acceptance_criteria={}
acceptance_criteria[crit]={}
acceptance_criteria[crit]['category']=
acceptance_criteria[crit]['criterion_name']=
acceptance_criteria[crit]['value']=
acceptance_criteria[crit]['threshold_type']
acceptance_criteria[crit]['decimal_points']
'category':
'DE-SPEC','DE-SAMP'..etc
'criterion_name':
MagIC name
'value':
a number (for 'regular criteria')
a string (for 'flag')
1 for True (if criteria is bullean)
0 for False (if criteria is bullean)
-999 means N/A
'threshold_type':
'low'for low threshold value
'high'for high threshold value
[flag1.flag2]: for flags
'bool' for boolean flags (can be 'g','b' or True/Flase or 1/0)
'decimal_points':
number of decimal points in rounding
(this is used in displaying criteria in the dialog box)
-999 means Exponent with 3 descimal points for floats and string for string
'''
acceptance_criteria = {}
# --------------------------------
# 'DE-SPEC'
# --------------------------------
# low cutoff value
category = 'DE-SPEC'
for crit in ['specimen_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
category = 'DE-SPEC'
for crit in ['specimen_mad', 'specimen_dang', 'specimen_alpha95']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
acceptance_criteria[crit]['decimal_points'] = 1
# flag
for crit in ['specimen_direction_type']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
if crit == 'specimen_direction_type':
acceptance_criteria[crit]['threshold_type'] = ['l', 'p']
if crit == 'specimen_polarity':
acceptance_criteria[crit]['threshold_type'] = [
'n', 'r', 't', 'e', 'i']
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'DE-SAMP'
# --------------------------------
# low cutoff value
category = 'DE-SAMP'
for crit in ['sample_n', 'sample_n_lines', 'sample_n_planes']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
category = 'DE-SAMP'
for crit in ['sample_r', 'sample_alpha95', 'sample_sigma', 'sample_k', 'sample_tilt_correction']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['sample_tilt_correction']:
acceptance_criteria[crit]['decimal_points'] = 0
elif crit in ['sample_alpha95']:
acceptance_criteria[crit]['decimal_points'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# flag
for crit in ['sample_direction_type', 'sample_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
if crit == 'sample_direction_type':
acceptance_criteria[crit]['threshold_type'] = ['l', 'p']
if crit == 'sample_polarity':
acceptance_criteria[crit]['threshold_type'] = [
'n', 'r', 't', 'e', 'i']
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'DE-SITE'
# --------------------------------
# low cutoff value
category = 'DE-SITE'
for crit in ['site_n', 'site_n_lines', 'site_n_planes']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['site_k', 'site_r', 'site_alpha95', 'site_sigma', 'site_tilt_correction']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['site_tilt_correction']:
acceptance_criteria[crit]['decimal_points'] = 0
else:
acceptance_criteria[crit]['decimal_points'] = 1
# flag
for crit in ['site_direction_type', 'site_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
if crit == 'site_direction_type':
acceptance_criteria[crit]['threshold_type'] = ['l', 'p']
if crit == 'site_polarity':
acceptance_criteria[crit]['threshold_type'] = [
'n', 'r', 't', 'e', 'i']
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'DE-STUDY'
# --------------------------------
category = 'DE-STUDY'
# low cutoff value
for crit in ['average_k', 'average_n', 'average_nn', 'average_nnn', 'average_r']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
if crit in ['average_n', 'average_nn', 'average_nnn']:
acceptance_criteria[crit]['decimal_points'] = 0
elif crit in ['average_alpha95']:
acceptance_criteria[crit]['decimal_points'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# high cutoff value
for crit in ['average_alpha95', 'average_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['average_alpha95']:
acceptance_criteria[crit]['decimal_points'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'IE-SPEC' (a long list from SPD.v.1.0)
# --------------------------------
category = 'IE-SPEC'
# low cutoff value
for crit in ['specimen_int_n', 'specimen_f', 'specimen_fvds', 'specimen_frac', 'specimen_q', 'specimen_w', 'specimen_r_sq', 'specimen_int_ptrm_n',
'specimen_int_ptrm_tail_n', 'specimen_ac_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
if crit in ['specimen_int_n', 'specimen_int_ptrm_n', 'specimen_int_ptrm_tail_n', 'specimen_ac_n']:
acceptance_criteria[crit]['decimal_points'] = 0
elif crit in ['specimen_f', 'specimen_fvds', 'specimen_frac', 'specimen_q']:
acceptance_criteria[crit]['decimal_points'] = 2
else:
acceptance_criteria[crit]['decimal_points'] = -999
# high cutoff value
for crit in ['specimen_b_sigma', 'specimen_b_beta', 'specimen_g', 'specimen_gmax', 'specimen_k', 'specimen_k_sse', 'specimen_k_prime', 'specimen_k_prime_sse',
'specimen_coeff_det_sq', 'specimen_z', 'specimen_z_md', 'specimen_int_mad', 'specimen_int_mad_anc', 'specimen_int_alpha', 'specimen_alpha', 'specimen_alpha_prime',
'specimen_theta', 'specimen_int_dang', 'specimen_int_crm', 'specimen_ptrm', 'specimen_dck', 'specimen_drat', 'specimen_maxdev', 'specimen_cdrat',
'specimen_drats', 'specimen_mdrat', 'specimen_mdev', 'specimen_dpal', 'specimen_tail_drat', 'specimen_dtr', 'specimen_md', 'specimen_dt', 'specimen_dac', 'specimen_gamma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['specimen_int_mad', 'specimen_int_mad_anc', 'specimen_int_dang', 'specimen_drat', 'specimen_cdrat', 'specimen_drats', 'specimen_tail_drat', 'specimen_dtr', 'specimen_md', 'specimen_dac', 'specimen_gamma']:
acceptance_criteria[crit]['decimal_points'] = 1
elif crit in ['specimen_gmax']:
acceptance_criteria[crit]['decimal_points'] = 2
elif crit in ['specimen_b_sigma', 'specimen_b_beta', 'specimen_g', 'specimen_k', 'specimen_k_prime']:
acceptance_criteria[crit]['decimal_points'] = 3
else:
acceptance_criteria[crit]['decimal_points'] = -999
# flags
for crit in ['specimen_scat']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'bool'
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'IE-SAMP'
# --------------------------------
category = 'IE-SAMP'
# low cutoff value
for crit in ['sample_int_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['sample_int_rel_sigma', 'sample_int_rel_sigma_perc', 'sample_int_sigma', 'sample_int_sigma_perc']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['sample_int_rel_sigma_perc', 'sample_int_sigma_perc']:
acceptance_criteria[crit]['decimal_points'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'IE-SITE'
# --------------------------------
category = 'IE-SITE'
# low cutoff value
for crit in ['site_int_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['site_int_rel_sigma', 'site_int_rel_sigma_perc', 'site_int_sigma', 'site_int_sigma_perc']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['site_int_rel_sigma_perc', 'site_int_sigma_perc']:
acceptance_criteria[crit]['decimal_points'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'IE-STUDY'
# --------------------------------
category = 'IE-STUDY'
# low cutoff value
for crit in ['average_int_n', 'average_int_n', 'average_int_nn', 'average_int_nnn', ]:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['average_int_rel_sigma', 'average_int_rel_sigma_perc', 'average_int_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
if crit in ['average_int_rel_sigma_perc']:
acceptance_criteria[crit]['decimal_points'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'NPOLE'
# --------------------------------
category = 'NPOLE'
# flags
for crit in ['site_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = ['n', 'r']
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'NPOLE'
# --------------------------------
category = 'RPOLE'
# flags
for crit in ['site_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = ['n', 'r']
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'VADM'
# --------------------------------
category = 'VADM'
# low cutoff value
for crit in ['vadm_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
if crit in ['vadm_n']:
acceptance_criteria[crit]['decimal_points'] = 0
else:
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'VADM'
# --------------------------------
category = 'VADM'
# low cutoff value
for crit in ['vadm_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['vadm_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'VADM'
# --------------------------------
category = 'VDM'
# low cutoff value
for crit in ['vdm_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['vdm_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'VGP'
# --------------------------------
category = 'VDM'
# low cutoff value
for crit in ['vgp_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = 0
# high cutoff value
for crit in ['vgp_alpha95', 'vgp_dm', 'vgp_dp', 'vgp_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
if crit in ['vgp_alpha95']:
acceptance_criteria[crit]['decimal_points', 'vgp_dm', 'vgp_dp'] = 1
else:
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'AGE'
# --------------------------------
category = 'AGE'
# low cutoff value
for crit in ['average_age_min']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "low"
acceptance_criteria[crit]['decimal_points'] = -999
# high cutoff value
for crit in ['average_age_max', 'average_age_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
acceptance_criteria[crit]['decimal_points'] = -999
# flags
for crit in ['average_age_unit']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = [
'Ga', 'Ka', 'Ma', 'Years AD (+/-)', 'Years BP', 'Years Cal AD (+/-)', 'Years Cal BP']
acceptance_criteria[crit]['decimal_points'] = -999
# --------------------------------
# 'ANI'
# --------------------------------
category = 'ANI'
# high cutoff value
for crit in ['anisotropy_alt', 'sample_aniso_mean', 'site_aniso_mean']: # value is in precent
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = "high"
acceptance_criteria[crit]['decimal_points'] = 3
# flags
for crit in ['specimen_aniso_ftest_flag']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'bool'
acceptance_criteria[crit]['decimal_points'] = -999
return(acceptance_criteria) | def function[initialize_acceptance_criteria, parameter[]]:
constant[
initialize acceptance criteria with NULL values for thellier_gui and demag_gui
acceptance criteria format is doctionaries:
acceptance_criteria={}
acceptance_criteria[crit]={}
acceptance_criteria[crit]['category']=
acceptance_criteria[crit]['criterion_name']=
acceptance_criteria[crit]['value']=
acceptance_criteria[crit]['threshold_type']
acceptance_criteria[crit]['decimal_points']
'category':
'DE-SPEC','DE-SAMP'..etc
'criterion_name':
MagIC name
'value':
a number (for 'regular criteria')
a string (for 'flag')
1 for True (if criteria is bullean)
0 for False (if criteria is bullean)
-999 means N/A
'threshold_type':
'low'for low threshold value
'high'for high threshold value
[flag1.flag2]: for flags
'bool' for boolean flags (can be 'g','b' or True/Flase or 1/0)
'decimal_points':
number of decimal points in rounding
(this is used in displaying criteria in the dialog box)
-999 means Exponent with 3 descimal points for floats and string for string
]
variable[acceptance_criteria] assign[=] dictionary[[], []]
variable[category] assign[=] constant[DE-SPEC]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04a4340>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04a4850>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
variable[category] assign[=] constant[DE-SPEC]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04a4c70>, <ast.Constant object at 0x7da1b04a4ca0>, <ast.Constant object at 0x7da1b04a4cd0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04a51e0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[1]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04a5570>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04a5a50>
if compare[name[crit] equal[==] constant[specimen_direction_type]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da1b04a5cc0>, <ast.Constant object at 0x7da1b04a5cf0>]]
if compare[name[crit] equal[==] constant[specimen_polarity]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da1b04a5f60>, <ast.Constant object at 0x7da1b04a5f90>, <ast.Constant object at 0x7da1b04a5fc0>, <ast.Constant object at 0x7da1b04a5ff0>, <ast.Constant object at 0x7da1b04a6020>]]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da1b04a6170>
variable[category] assign[=] constant[DE-SAMP]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04a62f0>, <ast.Constant object at 0x7da1b04a6320>, <ast.Constant object at 0x7da1b04a6350>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04a6860>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
variable[category] assign[=] constant[DE-SAMP]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04a6cb0>, <ast.Constant object at 0x7da1b04a6ce0>, <ast.Constant object at 0x7da1b04a6d10>, <ast.Constant object at 0x7da1b04a6d40>, <ast.Constant object at 0x7da1b04a6d70>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04a7280>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da1b04a74f0>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b049c910>, <ast.Constant object at 0x7da1b049c040>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b049c490>
if compare[name[crit] equal[==] constant[sample_direction_type]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da1b049c6d0>, <ast.Constant object at 0x7da1b049c790>]]
if compare[name[crit] equal[==] constant[sample_polarity]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da1b049cf70>, <ast.Constant object at 0x7da1b049cdf0>, <ast.Constant object at 0x7da1b049ce80>, <ast.Constant object at 0x7da1b049d0f0>, <ast.Constant object at 0x7da1b049cf10>]]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da1b049d150>
variable[category] assign[=] constant[DE-SITE]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b049d120>, <ast.Constant object at 0x7da1b049d2a0>, <ast.Constant object at 0x7da1b049cf40>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b049dcf0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b049ded0>, <ast.Constant object at 0x7da1b049dea0>, <ast.Constant object at 0x7da1b049de40>, <ast.Constant object at 0x7da1b049de10>, <ast.Constant object at 0x7da1b049dff0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04bcfd0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da1b04bc730>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04bc790>, <ast.Constant object at 0x7da1b04bcac0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04d6a70>
if compare[name[crit] equal[==] constant[site_direction_type]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da1b04d7d00>, <ast.Constant object at 0x7da1b04d5f90>]]
if compare[name[crit] equal[==] constant[site_polarity]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da1b04d65c0>, <ast.Constant object at 0x7da1b04d5ba0>, <ast.Constant object at 0x7da1b04d7df0>, <ast.Constant object at 0x7da1b04d64a0>, <ast.Constant object at 0x7da1b04d6950>]]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da1b04d7430>
variable[category] assign[=] constant[DE-STUDY]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04d5e10>, <ast.Constant object at 0x7da1b04d7040>, <ast.Constant object at 0x7da1b04d56c0>, <ast.Constant object at 0x7da1b04d7d90>, <ast.Constant object at 0x7da1b04d4850>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04d59f0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
if compare[name[crit] in list[[<ast.Constant object at 0x7da1b04d4ee0>, <ast.Constant object at 0x7da1b04d6470>, <ast.Constant object at 0x7da1b04d6170>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b04d4c10>, <ast.Constant object at 0x7da1b04d4250>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b04d64d0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da1b04d78b0>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[1]
variable[category] assign[=] constant[IE-SPEC]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2044c2ad0>, <ast.Constant object at 0x7da2044c0b20>, <ast.Constant object at 0x7da2044c2500>, <ast.Constant object at 0x7da2044c1150>, <ast.Constant object at 0x7da2044c2800>, <ast.Constant object at 0x7da2044c0670>, <ast.Constant object at 0x7da2044c2f80>, <ast.Constant object at 0x7da2044c1780>, <ast.Constant object at 0x7da2044c3be0>, <ast.Constant object at 0x7da2044c3190>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2044c14b0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
if compare[name[crit] in list[[<ast.Constant object at 0x7da2044c3dc0>, <ast.Constant object at 0x7da2044c3820>, <ast.Constant object at 0x7da2044c1c00>, <ast.Constant object at 0x7da2044c1120>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2044c1510>, <ast.Constant object at 0x7da2044c3400>, <ast.Constant object at 0x7da2044c3b20>, <ast.Constant object at 0x7da2044c0b50>, <ast.Constant object at 0x7da2044c3d00>, <ast.Constant object at 0x7da2044c1f60>, <ast.Constant object at 0x7da2044c2890>, <ast.Constant object at 0x7da2044c3610>, <ast.Constant object at 0x7da2044c1ff0>, <ast.Constant object at 0x7da2044c3160>, <ast.Constant object at 0x7da2044c32e0>, <ast.Constant object at 0x7da2044c0e20>, <ast.Constant object at 0x7da2044c0e50>, <ast.Constant object at 0x7da2044c3b50>, <ast.Constant object at 0x7da2044c3880>, <ast.Constant object at 0x7da2044c2dd0>, <ast.Constant object at 0x7da2044c2860>, <ast.Constant object at 0x7da2044c0370>, <ast.Constant object at 0x7da2044c1720>, <ast.Constant object at 0x7da2044c2e30>, <ast.Constant object at 0x7da2044c1fc0>, <ast.Constant object at 0x7da2044c1f00>, <ast.Constant object at 0x7da2044c1030>, <ast.Constant object at 0x7da2044c2950>, <ast.Constant object at 0x7da2044c03d0>, <ast.Constant object at 0x7da2044c2620>, <ast.Constant object at 0x7da2044c2c20>, <ast.Constant object at 0x7da2044c0b80>, <ast.Constant object at 0x7da2044c3af0>, <ast.Constant object at 0x7da2044c3460>, <ast.Constant object at 0x7da2044c0d30>, <ast.Constant object at 0x7da2044c3df0>, <ast.Constant object at 0x7da2044c1bd0>, <ast.Constant object at 0x7da2044c1ae0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2044c2080>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da2044c0850>, <ast.Constant object at 0x7da2044c38e0>, <ast.Constant object at 0x7da2044c3040>, <ast.Constant object at 0x7da2044c2b30>, <ast.Constant object at 0x7da2044c3100>, <ast.Constant object at 0x7da2044c1e40>, <ast.Constant object at 0x7da1b05663e0>, <ast.Constant object at 0x7da1b05664a0>, <ast.Constant object at 0x7da1b0566110>, <ast.Constant object at 0x7da1b05661a0>, <ast.Constant object at 0x7da1b0566350>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[1]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b0564a90>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b05661d0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[bool]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da1b0566050>
variable[category] assign[=] constant[IE-SAMP]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da1b0566320>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c795180>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c795d20>, <ast.Constant object at 0x7da20c794d60>, <ast.Constant object at 0x7da20c794fa0>, <ast.Constant object at 0x7da20c794100>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c795ea0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da20c795960>, <ast.Constant object at 0x7da20c7946a0>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[1]
variable[category] assign[=] constant[IE-SITE]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7949d0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c795690>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c795900>, <ast.Constant object at 0x7da20c794a90>, <ast.Constant object at 0x7da20c7964a0>, <ast.Constant object at 0x7da20c794670>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c795570>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da20c794f70>, <ast.Constant object at 0x7da20c7945b0>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[1]
variable[category] assign[=] constant[IE-STUDY]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da207f01450>, <ast.Constant object at 0x7da207f018a0>, <ast.Constant object at 0x7da207f017e0>, <ast.Constant object at 0x7da207f027d0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da207f00c70>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da207f026b0>, <ast.Constant object at 0x7da207f03790>, <ast.Constant object at 0x7da207f015a0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da207f02230>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
if compare[name[crit] in list[[<ast.Constant object at 0x7da207f03f10>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[1]
variable[category] assign[=] constant[NPOLE]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da207f021a0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041d9ea0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da2041d9ab0>, <ast.Constant object at 0x7da2041d9120>]]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da2041da9b0>
variable[category] assign[=] constant[RPOLE]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2041d9000>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041dbbe0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da2041dbe20>, <ast.Constant object at 0x7da2041dbb50>]]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da2041d9630>
variable[category] assign[=] constant[VADM]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2041d8c10>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041d8cd0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
if compare[name[crit] in list[[<ast.Constant object at 0x7da2041daa40>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
variable[category] assign[=] constant[VADM]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2041da260>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041d9570>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2041d9d50>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041d8d00>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da2041d8220>
variable[category] assign[=] constant[VDM]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2041db670>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041d8e80>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da2041dad40>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da2041db730>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da2041d8d30>
variable[category] assign[=] constant[VDM]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7c9750>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c7cb9d0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[0]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7cac50>, <ast.Constant object at 0x7da20c7c9150>, <ast.Constant object at 0x7da20c7c9540>, <ast.Constant object at 0x7da20c7cbe50>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c7cafe0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
if compare[name[crit] in list[[<ast.Constant object at 0x7da20c7c8250>]]] begin[:]
call[call[name[acceptance_criteria]][name[crit]]][tuple[[<ast.Constant object at 0x7da20c7caa40>, <ast.Constant object at 0x7da20c7c8ac0>, <ast.Constant object at 0x7da20c7c9e40>]]] assign[=] constant[1]
variable[category] assign[=] constant[AGE]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7c9030>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c7c8f70>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[low]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da20c7cbe80>
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7c8100>, <ast.Constant object at 0x7da20c7caad0>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c7c9d50>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da20c7c8a90>
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7c9480>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c7cbc10>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] list[[<ast.Constant object at 0x7da20c7cadd0>, <ast.Constant object at 0x7da20c7cb5e0>, <ast.Constant object at 0x7da20c7c9a20>, <ast.Constant object at 0x7da20c7c8b20>, <ast.Constant object at 0x7da20c7c9270>, <ast.Constant object at 0x7da20c7c9180>, <ast.Constant object at 0x7da20c7cbfa0>]]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da20c7ca830>
variable[category] assign[=] constant[ANI]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7c8d90>, <ast.Constant object at 0x7da20c7cb910>, <ast.Constant object at 0x7da20c7c9d80>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da20c7caec0>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[high]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] constant[3]
for taget[name[crit]] in starred[list[[<ast.Constant object at 0x7da20c7cad10>]]] begin[:]
call[name[acceptance_criteria]][name[crit]] assign[=] dictionary[[], []]
call[call[name[acceptance_criteria]][name[crit]]][constant[category]] assign[=] name[category]
call[call[name[acceptance_criteria]][name[crit]]][constant[criterion_name]] assign[=] name[crit]
call[call[name[acceptance_criteria]][name[crit]]][constant[value]] assign[=] <ast.UnaryOp object at 0x7da1b05d8040>
call[call[name[acceptance_criteria]][name[crit]]][constant[threshold_type]] assign[=] constant[bool]
call[call[name[acceptance_criteria]][name[crit]]][constant[decimal_points]] assign[=] <ast.UnaryOp object at 0x7da1b05d8a90>
return[name[acceptance_criteria]] | keyword[def] identifier[initialize_acceptance_criteria] (** identifier[kwargs] ):
literal[string]
identifier[acceptance_criteria] ={}
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[if] identifier[crit] == literal[string] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[ literal[string] , literal[string] ]
keyword[if] identifier[crit] == literal[string] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[elif] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[if] identifier[crit] == literal[string] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[ literal[string] , literal[string] ]
keyword[if] identifier[crit] == literal[string] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[if] identifier[crit] == literal[string] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[ literal[string] , literal[string] ]
keyword[if] identifier[crit] == literal[string] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[elif] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[if] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[elif] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[elif] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[elif] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ,]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[ literal[string] , literal[string] ]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[ literal[string] , literal[string] ]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
keyword[if] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] , literal[string] , literal[string] ]= literal[int]
keyword[else] :
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[category] = literal[string]
keyword[for] identifier[crit] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[int]
keyword[for] identifier[crit] keyword[in] [ literal[string] ]:
identifier[acceptance_criteria] [ identifier[crit] ]={}
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[category]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= identifier[crit]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]= literal[string]
identifier[acceptance_criteria] [ identifier[crit] ][ literal[string] ]=- literal[int]
keyword[return] ( identifier[acceptance_criteria] ) | def initialize_acceptance_criteria(**kwargs):
"""
initialize acceptance criteria with NULL values for thellier_gui and demag_gui
acceptance criteria format is doctionaries:
acceptance_criteria={}
acceptance_criteria[crit]={}
acceptance_criteria[crit]['category']=
acceptance_criteria[crit]['criterion_name']=
acceptance_criteria[crit]['value']=
acceptance_criteria[crit]['threshold_type']
acceptance_criteria[crit]['decimal_points']
'category':
'DE-SPEC','DE-SAMP'..etc
'criterion_name':
MagIC name
'value':
a number (for 'regular criteria')
a string (for 'flag')
1 for True (if criteria is bullean)
0 for False (if criteria is bullean)
-999 means N/A
'threshold_type':
'low'for low threshold value
'high'for high threshold value
[flag1.flag2]: for flags
'bool' for boolean flags (can be 'g','b' or True/Flase or 1/0)
'decimal_points':
number of decimal points in rounding
(this is used in displaying criteria in the dialog box)
-999 means Exponent with 3 descimal points for floats and string for string
"""
acceptance_criteria = {}
# --------------------------------
# 'DE-SPEC'
# --------------------------------
# low cutoff value
category = 'DE-SPEC'
for crit in ['specimen_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
category = 'DE-SPEC'
for crit in ['specimen_mad', 'specimen_dang', 'specimen_alpha95']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['for'], data=['crit']]
# flag
for crit in ['specimen_direction_type']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
if crit == 'specimen_direction_type':
acceptance_criteria[crit]['threshold_type'] = ['l', 'p'] # depends on [control=['if'], data=['crit']]
if crit == 'specimen_polarity':
acceptance_criteria[crit]['threshold_type'] = ['n', 'r', 't', 'e', 'i'] # depends on [control=['if'], data=['crit']]
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'DE-SAMP'
# --------------------------------
# low cutoff value
category = 'DE-SAMP'
for crit in ['sample_n', 'sample_n_lines', 'sample_n_planes']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
category = 'DE-SAMP'
for crit in ['sample_r', 'sample_alpha95', 'sample_sigma', 'sample_k', 'sample_tilt_correction']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['sample_tilt_correction']:
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['if'], data=['crit']]
elif crit in ['sample_alpha95']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# flag
for crit in ['sample_direction_type', 'sample_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
if crit == 'sample_direction_type':
acceptance_criteria[crit]['threshold_type'] = ['l', 'p'] # depends on [control=['if'], data=['crit']]
if crit == 'sample_polarity':
acceptance_criteria[crit]['threshold_type'] = ['n', 'r', 't', 'e', 'i'] # depends on [control=['if'], data=['crit']]
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'DE-SITE'
# --------------------------------
# low cutoff value
category = 'DE-SITE'
for crit in ['site_n', 'site_n_lines', 'site_n_planes']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['site_k', 'site_r', 'site_alpha95', 'site_sigma', 'site_tilt_correction']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['site_tilt_correction']:
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['for'], data=['crit']]
# flag
for crit in ['site_direction_type', 'site_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
if crit == 'site_direction_type':
acceptance_criteria[crit]['threshold_type'] = ['l', 'p'] # depends on [control=['if'], data=['crit']]
if crit == 'site_polarity':
acceptance_criteria[crit]['threshold_type'] = ['n', 'r', 't', 'e', 'i'] # depends on [control=['if'], data=['crit']]
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'DE-STUDY'
# --------------------------------
category = 'DE-STUDY'
# low cutoff value
for crit in ['average_k', 'average_n', 'average_nn', 'average_nnn', 'average_r']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
if crit in ['average_n', 'average_nn', 'average_nnn']:
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['if'], data=['crit']]
elif crit in ['average_alpha95']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['average_alpha95', 'average_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['average_alpha95']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'IE-SPEC' (a long list from SPD.v.1.0)
# --------------------------------
category = 'IE-SPEC'
# low cutoff value
for crit in ['specimen_int_n', 'specimen_f', 'specimen_fvds', 'specimen_frac', 'specimen_q', 'specimen_w', 'specimen_r_sq', 'specimen_int_ptrm_n', 'specimen_int_ptrm_tail_n', 'specimen_ac_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0
if crit in ['specimen_int_n', 'specimen_int_ptrm_n', 'specimen_int_ptrm_tail_n', 'specimen_ac_n']:
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['if'], data=['crit']]
elif crit in ['specimen_f', 'specimen_fvds', 'specimen_frac', 'specimen_q']:
acceptance_criteria[crit]['decimal_points'] = 2 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['specimen_b_sigma', 'specimen_b_beta', 'specimen_g', 'specimen_gmax', 'specimen_k', 'specimen_k_sse', 'specimen_k_prime', 'specimen_k_prime_sse', 'specimen_coeff_det_sq', 'specimen_z', 'specimen_z_md', 'specimen_int_mad', 'specimen_int_mad_anc', 'specimen_int_alpha', 'specimen_alpha', 'specimen_alpha_prime', 'specimen_theta', 'specimen_int_dang', 'specimen_int_crm', 'specimen_ptrm', 'specimen_dck', 'specimen_drat', 'specimen_maxdev', 'specimen_cdrat', 'specimen_drats', 'specimen_mdrat', 'specimen_mdev', 'specimen_dpal', 'specimen_tail_drat', 'specimen_dtr', 'specimen_md', 'specimen_dt', 'specimen_dac', 'specimen_gamma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['specimen_int_mad', 'specimen_int_mad_anc', 'specimen_int_dang', 'specimen_drat', 'specimen_cdrat', 'specimen_drats', 'specimen_tail_drat', 'specimen_dtr', 'specimen_md', 'specimen_dac', 'specimen_gamma']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
elif crit in ['specimen_gmax']:
acceptance_criteria[crit]['decimal_points'] = 2 # depends on [control=['if'], data=['crit']]
elif crit in ['specimen_b_sigma', 'specimen_b_beta', 'specimen_g', 'specimen_k', 'specimen_k_prime']:
acceptance_criteria[crit]['decimal_points'] = 3 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# flags
for crit in ['specimen_scat']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'bool'
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'IE-SAMP'
# --------------------------------
category = 'IE-SAMP'
# low cutoff value
for crit in ['sample_int_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['sample_int_rel_sigma', 'sample_int_rel_sigma_perc', 'sample_int_sigma', 'sample_int_sigma_perc']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['sample_int_rel_sigma_perc', 'sample_int_sigma_perc']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'IE-SITE'
# --------------------------------
category = 'IE-SITE'
# low cutoff value
for crit in ['site_int_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['site_int_rel_sigma', 'site_int_rel_sigma_perc', 'site_int_sigma', 'site_int_sigma_perc']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['site_int_rel_sigma_perc', 'site_int_sigma_perc']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'IE-STUDY'
# --------------------------------
category = 'IE-STUDY'
# low cutoff value
for crit in ['average_int_n', 'average_int_n', 'average_int_nn', 'average_int_nnn']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['average_int_rel_sigma', 'average_int_rel_sigma_perc', 'average_int_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
if crit in ['average_int_rel_sigma_perc']:
acceptance_criteria[crit]['decimal_points'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'NPOLE'
# --------------------------------
category = 'NPOLE'
# flags
for crit in ['site_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = ['n', 'r']
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'NPOLE'
# --------------------------------
category = 'RPOLE'
# flags
for crit in ['site_polarity']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = ['n', 'r']
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'VADM'
# --------------------------------
category = 'VADM'
# low cutoff value
for crit in ['vadm_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
if crit in ['vadm_n']:
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'VADM'
# --------------------------------
category = 'VADM'
# low cutoff value
for crit in ['vadm_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['vadm_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'VADM'
# --------------------------------
category = 'VDM'
# low cutoff value
for crit in ['vdm_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['vdm_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'VGP'
# --------------------------------
category = 'VDM'
# low cutoff value
for crit in ['vgp_n']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = 0 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['vgp_alpha95', 'vgp_dm', 'vgp_dp', 'vgp_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
if crit in ['vgp_alpha95']:
acceptance_criteria[crit]['decimal_points', 'vgp_dm', 'vgp_dp'] = 1 # depends on [control=['if'], data=['crit']]
else:
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'AGE'
# --------------------------------
category = 'AGE'
# low cutoff value
for crit in ['average_age_min']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'low'
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# high cutoff value
for crit in ['average_age_max', 'average_age_sigma']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# flags
for crit in ['average_age_unit']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = ['Ga', 'Ka', 'Ma', 'Years AD (+/-)', 'Years BP', 'Years Cal AD (+/-)', 'Years Cal BP']
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
# --------------------------------
# 'ANI'
# --------------------------------
category = 'ANI'
# high cutoff value
for crit in ['anisotropy_alt', 'sample_aniso_mean', 'site_aniso_mean']: # value is in precent
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'high'
acceptance_criteria[crit]['decimal_points'] = 3 # depends on [control=['for'], data=['crit']]
# flags
for crit in ['specimen_aniso_ftest_flag']:
acceptance_criteria[crit] = {}
acceptance_criteria[crit]['category'] = category
acceptance_criteria[crit]['criterion_name'] = crit
acceptance_criteria[crit]['value'] = -999
acceptance_criteria[crit]['threshold_type'] = 'bool'
acceptance_criteria[crit]['decimal_points'] = -999 # depends on [control=['for'], data=['crit']]
return acceptance_criteria |
def _create_random_starter(node_count):
"""Creates the random starter for the random walk.
:param node_count: number of nodes to create the random vector.
:returns: list -- list of starting probabilities for each node.
"""
pi = np.zeros(node_count, dtype=float)
for i in range(node_count):
pi[i] = random.random()
summation = np.sum(pi)
for i in range(node_count):
pi[i] = pi[i] / summation
return pi | def function[_create_random_starter, parameter[node_count]]:
constant[Creates the random starter for the random walk.
:param node_count: number of nodes to create the random vector.
:returns: list -- list of starting probabilities for each node.
]
variable[pi] assign[=] call[name[np].zeros, parameter[name[node_count]]]
for taget[name[i]] in starred[call[name[range], parameter[name[node_count]]]] begin[:]
call[name[pi]][name[i]] assign[=] call[name[random].random, parameter[]]
variable[summation] assign[=] call[name[np].sum, parameter[name[pi]]]
for taget[name[i]] in starred[call[name[range], parameter[name[node_count]]]] begin[:]
call[name[pi]][name[i]] assign[=] binary_operation[call[name[pi]][name[i]] / name[summation]]
return[name[pi]] | keyword[def] identifier[_create_random_starter] ( identifier[node_count] ):
literal[string]
identifier[pi] = identifier[np] . identifier[zeros] ( identifier[node_count] , identifier[dtype] = identifier[float] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[node_count] ):
identifier[pi] [ identifier[i] ]= identifier[random] . identifier[random] ()
identifier[summation] = identifier[np] . identifier[sum] ( identifier[pi] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[node_count] ):
identifier[pi] [ identifier[i] ]= identifier[pi] [ identifier[i] ]/ identifier[summation]
keyword[return] identifier[pi] | def _create_random_starter(node_count):
"""Creates the random starter for the random walk.
:param node_count: number of nodes to create the random vector.
:returns: list -- list of starting probabilities for each node.
"""
pi = np.zeros(node_count, dtype=float)
for i in range(node_count):
pi[i] = random.random() # depends on [control=['for'], data=['i']]
summation = np.sum(pi)
for i in range(node_count):
pi[i] = pi[i] / summation # depends on [control=['for'], data=['i']]
return pi |
def get_filter_events(
self,
contract_address: Address,
topics: List[str] = None,
from_block: BlockSpecification = 0,
to_block: BlockSpecification = 'latest',
) -> List[Dict]:
""" Get events for the given query. """
logs_blocks_sanity_check(from_block, to_block)
return self.web3.eth.getLogs({
'fromBlock': from_block,
'toBlock': to_block,
'address': to_checksum_address(contract_address),
'topics': topics,
}) | def function[get_filter_events, parameter[self, contract_address, topics, from_block, to_block]]:
constant[ Get events for the given query. ]
call[name[logs_blocks_sanity_check], parameter[name[from_block], name[to_block]]]
return[call[name[self].web3.eth.getLogs, parameter[dictionary[[<ast.Constant object at 0x7da1b17133d0>, <ast.Constant object at 0x7da1b17108b0>, <ast.Constant object at 0x7da1b1710850>, <ast.Constant object at 0x7da1b1710f40>], [<ast.Name object at 0x7da1b1713220>, <ast.Name object at 0x7da1b1711000>, <ast.Call object at 0x7da1b1713d30>, <ast.Name object at 0x7da1b1711150>]]]]] | keyword[def] identifier[get_filter_events] (
identifier[self] ,
identifier[contract_address] : identifier[Address] ,
identifier[topics] : identifier[List] [ identifier[str] ]= keyword[None] ,
identifier[from_block] : identifier[BlockSpecification] = literal[int] ,
identifier[to_block] : identifier[BlockSpecification] = literal[string] ,
)-> identifier[List] [ identifier[Dict] ]:
literal[string]
identifier[logs_blocks_sanity_check] ( identifier[from_block] , identifier[to_block] )
keyword[return] identifier[self] . identifier[web3] . identifier[eth] . identifier[getLogs] ({
literal[string] : identifier[from_block] ,
literal[string] : identifier[to_block] ,
literal[string] : identifier[to_checksum_address] ( identifier[contract_address] ),
literal[string] : identifier[topics] ,
}) | def get_filter_events(self, contract_address: Address, topics: List[str]=None, from_block: BlockSpecification=0, to_block: BlockSpecification='latest') -> List[Dict]:
""" Get events for the given query. """
logs_blocks_sanity_check(from_block, to_block)
return self.web3.eth.getLogs({'fromBlock': from_block, 'toBlock': to_block, 'address': to_checksum_address(contract_address), 'topics': topics}) |
def transformChildrenFromNative(self, clearBehavior=True):
"""
Recursively transform native children to vanilla representations.
"""
for childArray in self.contents.values():
for child in childArray:
child = child.transformFromNative()
child.transformChildrenFromNative(clearBehavior)
if clearBehavior:
child.behavior = None
child.parentBehavior = None | def function[transformChildrenFromNative, parameter[self, clearBehavior]]:
constant[
Recursively transform native children to vanilla representations.
]
for taget[name[childArray]] in starred[call[name[self].contents.values, parameter[]]] begin[:]
for taget[name[child]] in starred[name[childArray]] begin[:]
variable[child] assign[=] call[name[child].transformFromNative, parameter[]]
call[name[child].transformChildrenFromNative, parameter[name[clearBehavior]]]
if name[clearBehavior] begin[:]
name[child].behavior assign[=] constant[None]
name[child].parentBehavior assign[=] constant[None] | keyword[def] identifier[transformChildrenFromNative] ( identifier[self] , identifier[clearBehavior] = keyword[True] ):
literal[string]
keyword[for] identifier[childArray] keyword[in] identifier[self] . identifier[contents] . identifier[values] ():
keyword[for] identifier[child] keyword[in] identifier[childArray] :
identifier[child] = identifier[child] . identifier[transformFromNative] ()
identifier[child] . identifier[transformChildrenFromNative] ( identifier[clearBehavior] )
keyword[if] identifier[clearBehavior] :
identifier[child] . identifier[behavior] = keyword[None]
identifier[child] . identifier[parentBehavior] = keyword[None] | def transformChildrenFromNative(self, clearBehavior=True):
"""
Recursively transform native children to vanilla representations.
"""
for childArray in self.contents.values():
for child in childArray:
child = child.transformFromNative()
child.transformChildrenFromNative(clearBehavior)
if clearBehavior:
child.behavior = None
child.parentBehavior = None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] # depends on [control=['for'], data=['childArray']] |
def _determinebase_address(self):
"""
The basic idea is simple: start from a specific point, try to construct
functions as much as we can, and maintain a function distribution graph
and a call graph simultaneously. Repeat searching until we come to the
end that there is no new function to be found.
A function should start with:
# some addresses that a call exit leads to, or
# certain instructions. They are recoreded in SimArch.
For a better performance, instead of blindly scanning the entire process
space, we first try to search for instruction patterns that a function
may start with, and start scanning at those positions. Then we try to
decode anything that is left.
"""
traced_address = set()
self.functions = set()
self.call_map = networkx.DiGraph()
self.cfg = networkx.DiGraph()
initial_state = self.project.factory.blank_state(mode="fastpath")
initial_options = initial_state.options - { o.TRACK_CONSTRAINTS } - o.refs
initial_options |= { o.SUPER_FASTPATH }
# initial_options.remove(o.COW_STATES)
initial_state.options = initial_options
# Sadly, not all calls to functions are explicitly made by call
# instruction - they could be a jmp or b, or something else. So we
# should record all exits from a single function, and then add
# necessary calling edges in our call map during the post-processing
# phase.
function_exits = defaultdict(set)
dump_file_prefix = self.project.filename
if self._pickle_intermediate_results and \
os.path.exists(dump_file_prefix + "_indirect_jumps.angr"):
l.debug("Loading existing intermediate results.")
self._indirect_jumps = pickle.load(open(dump_file_prefix + "_indirect_jumps.angr", "rb"))
self.cfg = pickle.load(open(dump_file_prefix + "_coercecfg.angr", "rb"))
self._unassured_functions = pickle.load(open(dump_file_prefix + "_unassured_functions.angr", "rb"))
else:
# Performance boost :-)
# Scan for existing function prologues
self._scan_function_prologues(traced_address, function_exits, initial_state)
if self._pickle_intermediate_results:
l.debug("Dumping intermediate results.")
pickle.dump(self._indirect_jumps, open(dump_file_prefix + "_indirect_jumps.angr", "wb"), -1)
pickle.dump(self.cfg, open(dump_file_prefix + "_coercecfg.angr", "wb"), -1)
pickle.dump(self._unassured_functions, open(dump_file_prefix + "_unassured_functions.angr", "wb"), -1)
if len(self._indirect_jumps):
# We got some indirect jumps!
# Gotta execute each basic block and see where it wants to jump to
function_starts = self._process_indirect_jumps()
self.base_address = self._solve_forbase_address(function_starts, self._unassured_functions)
l.info("Base address should be 0x%x", self.base_address)
else:
l.debug("No indirect jumps are found. We switch to the slowpath mode.")
# TODO: Slowpath mode...
while True:
next_addr = self._get_next_code_addr(initial_state)
percentage = self._seg_list.occupied_size * 100.0 / (self._valid_memory_region_size)
l.info("Analyzing %xh, progress %0.04f%%", next_addr, percentage)
if next_addr is None:
break
self.call_map.add_node(next_addr)
self._scan_code(traced_address, function_exits, initial_state, next_addr)
# Post-processing: Map those calls that are not made by call/blr
# instructions to their targets in our map
for src, s in function_exits.items():
if src in self.call_map:
for target in s:
if target in self.call_map:
self.call_map.add_edge(src, target)
nodes = sorted(self.call_map.nodes())
for i in range(len(nodes) - 1):
if nodes[i] >= nodes[i + 1] - 4:
for dst in self.call_map.successors(nodes[i + 1]):
self.call_map.add_edge(nodes[i], dst)
for src in self.call_map.predecessors(nodes[i + 1]):
self.call_map.add_edge(src, nodes[i])
self.call_map.remove_node(nodes[i + 1])
l.debug("Construction finished.") | def function[_determinebase_address, parameter[self]]:
constant[
The basic idea is simple: start from a specific point, try to construct
functions as much as we can, and maintain a function distribution graph
and a call graph simultaneously. Repeat searching until we come to the
end that there is no new function to be found.
A function should start with:
# some addresses that a call exit leads to, or
# certain instructions. They are recoreded in SimArch.
For a better performance, instead of blindly scanning the entire process
space, we first try to search for instruction patterns that a function
may start with, and start scanning at those positions. Then we try to
decode anything that is left.
]
variable[traced_address] assign[=] call[name[set], parameter[]]
name[self].functions assign[=] call[name[set], parameter[]]
name[self].call_map assign[=] call[name[networkx].DiGraph, parameter[]]
name[self].cfg assign[=] call[name[networkx].DiGraph, parameter[]]
variable[initial_state] assign[=] call[name[self].project.factory.blank_state, parameter[]]
variable[initial_options] assign[=] binary_operation[binary_operation[name[initial_state].options - <ast.Set object at 0x7da204567130>] - name[o].refs]
<ast.AugAssign object at 0x7da18bc73580>
name[initial_state].options assign[=] name[initial_options]
variable[function_exits] assign[=] call[name[defaultdict], parameter[name[set]]]
variable[dump_file_prefix] assign[=] name[self].project.filename
if <ast.BoolOp object at 0x7da18bc70be0> begin[:]
call[name[l].debug, parameter[constant[Loading existing intermediate results.]]]
name[self]._indirect_jumps assign[=] call[name[pickle].load, parameter[call[name[open], parameter[binary_operation[name[dump_file_prefix] + constant[_indirect_jumps.angr]], constant[rb]]]]]
name[self].cfg assign[=] call[name[pickle].load, parameter[call[name[open], parameter[binary_operation[name[dump_file_prefix] + constant[_coercecfg.angr]], constant[rb]]]]]
name[self]._unassured_functions assign[=] call[name[pickle].load, parameter[call[name[open], parameter[binary_operation[name[dump_file_prefix] + constant[_unassured_functions.angr]], constant[rb]]]]]
if call[name[len], parameter[name[self]._indirect_jumps]] begin[:]
variable[function_starts] assign[=] call[name[self]._process_indirect_jumps, parameter[]]
name[self].base_address assign[=] call[name[self]._solve_forbase_address, parameter[name[function_starts], name[self]._unassured_functions]]
call[name[l].info, parameter[constant[Base address should be 0x%x], name[self].base_address]]
for taget[tuple[[<ast.Name object at 0x7da18bc722c0>, <ast.Name object at 0x7da18bc72bf0>]]] in starred[call[name[function_exits].items, parameter[]]] begin[:]
if compare[name[src] in name[self].call_map] begin[:]
for taget[name[target]] in starred[name[s]] begin[:]
if compare[name[target] in name[self].call_map] begin[:]
call[name[self].call_map.add_edge, parameter[name[src], name[target]]]
variable[nodes] assign[=] call[name[sorted], parameter[call[name[self].call_map.nodes, parameter[]]]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[nodes]]] - constant[1]]]]] begin[:]
if compare[call[name[nodes]][name[i]] greater_or_equal[>=] binary_operation[call[name[nodes]][binary_operation[name[i] + constant[1]]] - constant[4]]] begin[:]
for taget[name[dst]] in starred[call[name[self].call_map.successors, parameter[call[name[nodes]][binary_operation[name[i] + constant[1]]]]]] begin[:]
call[name[self].call_map.add_edge, parameter[call[name[nodes]][name[i]], name[dst]]]
for taget[name[src]] in starred[call[name[self].call_map.predecessors, parameter[call[name[nodes]][binary_operation[name[i] + constant[1]]]]]] begin[:]
call[name[self].call_map.add_edge, parameter[name[src], call[name[nodes]][name[i]]]]
call[name[self].call_map.remove_node, parameter[call[name[nodes]][binary_operation[name[i] + constant[1]]]]]
call[name[l].debug, parameter[constant[Construction finished.]]] | keyword[def] identifier[_determinebase_address] ( identifier[self] ):
literal[string]
identifier[traced_address] = identifier[set] ()
identifier[self] . identifier[functions] = identifier[set] ()
identifier[self] . identifier[call_map] = identifier[networkx] . identifier[DiGraph] ()
identifier[self] . identifier[cfg] = identifier[networkx] . identifier[DiGraph] ()
identifier[initial_state] = identifier[self] . identifier[project] . identifier[factory] . identifier[blank_state] ( identifier[mode] = literal[string] )
identifier[initial_options] = identifier[initial_state] . identifier[options] -{ identifier[o] . identifier[TRACK_CONSTRAINTS] }- identifier[o] . identifier[refs]
identifier[initial_options] |={ identifier[o] . identifier[SUPER_FASTPATH] }
identifier[initial_state] . identifier[options] = identifier[initial_options]
identifier[function_exits] = identifier[defaultdict] ( identifier[set] )
identifier[dump_file_prefix] = identifier[self] . identifier[project] . identifier[filename]
keyword[if] identifier[self] . identifier[_pickle_intermediate_results] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[dump_file_prefix] + literal[string] ):
identifier[l] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_indirect_jumps] = identifier[pickle] . identifier[load] ( identifier[open] ( identifier[dump_file_prefix] + literal[string] , literal[string] ))
identifier[self] . identifier[cfg] = identifier[pickle] . identifier[load] ( identifier[open] ( identifier[dump_file_prefix] + literal[string] , literal[string] ))
identifier[self] . identifier[_unassured_functions] = identifier[pickle] . identifier[load] ( identifier[open] ( identifier[dump_file_prefix] + literal[string] , literal[string] ))
keyword[else] :
identifier[self] . identifier[_scan_function_prologues] ( identifier[traced_address] , identifier[function_exits] , identifier[initial_state] )
keyword[if] identifier[self] . identifier[_pickle_intermediate_results] :
identifier[l] . identifier[debug] ( literal[string] )
identifier[pickle] . identifier[dump] ( identifier[self] . identifier[_indirect_jumps] , identifier[open] ( identifier[dump_file_prefix] + literal[string] , literal[string] ),- literal[int] )
identifier[pickle] . identifier[dump] ( identifier[self] . identifier[cfg] , identifier[open] ( identifier[dump_file_prefix] + literal[string] , literal[string] ),- literal[int] )
identifier[pickle] . identifier[dump] ( identifier[self] . identifier[_unassured_functions] , identifier[open] ( identifier[dump_file_prefix] + literal[string] , literal[string] ),- literal[int] )
keyword[if] identifier[len] ( identifier[self] . identifier[_indirect_jumps] ):
identifier[function_starts] = identifier[self] . identifier[_process_indirect_jumps] ()
identifier[self] . identifier[base_address] = identifier[self] . identifier[_solve_forbase_address] ( identifier[function_starts] , identifier[self] . identifier[_unassured_functions] )
identifier[l] . identifier[info] ( literal[string] , identifier[self] . identifier[base_address] )
keyword[else] :
identifier[l] . identifier[debug] ( literal[string] )
keyword[while] keyword[True] :
identifier[next_addr] = identifier[self] . identifier[_get_next_code_addr] ( identifier[initial_state] )
identifier[percentage] = identifier[self] . identifier[_seg_list] . identifier[occupied_size] * literal[int] /( identifier[self] . identifier[_valid_memory_region_size] )
identifier[l] . identifier[info] ( literal[string] , identifier[next_addr] , identifier[percentage] )
keyword[if] identifier[next_addr] keyword[is] keyword[None] :
keyword[break]
identifier[self] . identifier[call_map] . identifier[add_node] ( identifier[next_addr] )
identifier[self] . identifier[_scan_code] ( identifier[traced_address] , identifier[function_exits] , identifier[initial_state] , identifier[next_addr] )
keyword[for] identifier[src] , identifier[s] keyword[in] identifier[function_exits] . identifier[items] ():
keyword[if] identifier[src] keyword[in] identifier[self] . identifier[call_map] :
keyword[for] identifier[target] keyword[in] identifier[s] :
keyword[if] identifier[target] keyword[in] identifier[self] . identifier[call_map] :
identifier[self] . identifier[call_map] . identifier[add_edge] ( identifier[src] , identifier[target] )
identifier[nodes] = identifier[sorted] ( identifier[self] . identifier[call_map] . identifier[nodes] ())
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[nodes] )- literal[int] ):
keyword[if] identifier[nodes] [ identifier[i] ]>= identifier[nodes] [ identifier[i] + literal[int] ]- literal[int] :
keyword[for] identifier[dst] keyword[in] identifier[self] . identifier[call_map] . identifier[successors] ( identifier[nodes] [ identifier[i] + literal[int] ]):
identifier[self] . identifier[call_map] . identifier[add_edge] ( identifier[nodes] [ identifier[i] ], identifier[dst] )
keyword[for] identifier[src] keyword[in] identifier[self] . identifier[call_map] . identifier[predecessors] ( identifier[nodes] [ identifier[i] + literal[int] ]):
identifier[self] . identifier[call_map] . identifier[add_edge] ( identifier[src] , identifier[nodes] [ identifier[i] ])
identifier[self] . identifier[call_map] . identifier[remove_node] ( identifier[nodes] [ identifier[i] + literal[int] ])
identifier[l] . identifier[debug] ( literal[string] ) | def _determinebase_address(self):
"""
The basic idea is simple: start from a specific point, try to construct
functions as much as we can, and maintain a function distribution graph
and a call graph simultaneously. Repeat searching until we come to the
end that there is no new function to be found.
A function should start with:
# some addresses that a call exit leads to, or
# certain instructions. They are recoreded in SimArch.
For a better performance, instead of blindly scanning the entire process
space, we first try to search for instruction patterns that a function
may start with, and start scanning at those positions. Then we try to
decode anything that is left.
"""
traced_address = set()
self.functions = set()
self.call_map = networkx.DiGraph()
self.cfg = networkx.DiGraph()
initial_state = self.project.factory.blank_state(mode='fastpath')
initial_options = initial_state.options - {o.TRACK_CONSTRAINTS} - o.refs
initial_options |= {o.SUPER_FASTPATH}
# initial_options.remove(o.COW_STATES)
initial_state.options = initial_options
# Sadly, not all calls to functions are explicitly made by call
# instruction - they could be a jmp or b, or something else. So we
# should record all exits from a single function, and then add
# necessary calling edges in our call map during the post-processing
# phase.
function_exits = defaultdict(set)
dump_file_prefix = self.project.filename
if self._pickle_intermediate_results and os.path.exists(dump_file_prefix + '_indirect_jumps.angr'):
l.debug('Loading existing intermediate results.')
self._indirect_jumps = pickle.load(open(dump_file_prefix + '_indirect_jumps.angr', 'rb'))
self.cfg = pickle.load(open(dump_file_prefix + '_coercecfg.angr', 'rb'))
self._unassured_functions = pickle.load(open(dump_file_prefix + '_unassured_functions.angr', 'rb')) # depends on [control=['if'], data=[]]
else:
# Performance boost :-)
# Scan for existing function prologues
self._scan_function_prologues(traced_address, function_exits, initial_state)
if self._pickle_intermediate_results:
l.debug('Dumping intermediate results.')
pickle.dump(self._indirect_jumps, open(dump_file_prefix + '_indirect_jumps.angr', 'wb'), -1)
pickle.dump(self.cfg, open(dump_file_prefix + '_coercecfg.angr', 'wb'), -1)
pickle.dump(self._unassured_functions, open(dump_file_prefix + '_unassured_functions.angr', 'wb'), -1) # depends on [control=['if'], data=[]]
if len(self._indirect_jumps):
# We got some indirect jumps!
# Gotta execute each basic block and see where it wants to jump to
function_starts = self._process_indirect_jumps()
self.base_address = self._solve_forbase_address(function_starts, self._unassured_functions)
l.info('Base address should be 0x%x', self.base_address) # depends on [control=['if'], data=[]]
else:
l.debug('No indirect jumps are found. We switch to the slowpath mode.')
# TODO: Slowpath mode...
while True:
next_addr = self._get_next_code_addr(initial_state)
percentage = self._seg_list.occupied_size * 100.0 / self._valid_memory_region_size
l.info('Analyzing %xh, progress %0.04f%%', next_addr, percentage)
if next_addr is None:
break # depends on [control=['if'], data=[]]
self.call_map.add_node(next_addr)
self._scan_code(traced_address, function_exits, initial_state, next_addr) # depends on [control=['while'], data=[]]
# Post-processing: Map those calls that are not made by call/blr
# instructions to their targets in our map
for (src, s) in function_exits.items():
if src in self.call_map:
for target in s:
if target in self.call_map:
self.call_map.add_edge(src, target) # depends on [control=['if'], data=['target']] # depends on [control=['for'], data=['target']] # depends on [control=['if'], data=['src']] # depends on [control=['for'], data=[]]
nodes = sorted(self.call_map.nodes())
for i in range(len(nodes) - 1):
if nodes[i] >= nodes[i + 1] - 4:
for dst in self.call_map.successors(nodes[i + 1]):
self.call_map.add_edge(nodes[i], dst) # depends on [control=['for'], data=['dst']]
for src in self.call_map.predecessors(nodes[i + 1]):
self.call_map.add_edge(src, nodes[i]) # depends on [control=['for'], data=['src']]
self.call_map.remove_node(nodes[i + 1]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
l.debug('Construction finished.') |
def get_relative_error(self):
"""
Returns the relative error statistic (e_rel), defined by Frohlich &
Davis (1999): `e_rel = sqrt((U:U) / (M:M))` where M is the moment
tensor, U is the uncertainty tensor and : is the tensor dot product
"""
if not self.moment_tensor:
raise ValueError('Moment tensor not defined!')
numer = np.tensordot(self.moment_tensor.tensor_sigma,
self.moment_tensor.tensor_sigma)
denom = np.tensordot(self.moment_tensor.tensor,
self.moment_tensor.tensor)
self.e_rel = sqrt(numer / denom)
return self.e_rel | def function[get_relative_error, parameter[self]]:
constant[
Returns the relative error statistic (e_rel), defined by Frohlich &
Davis (1999): `e_rel = sqrt((U:U) / (M:M))` where M is the moment
tensor, U is the uncertainty tensor and : is the tensor dot product
]
if <ast.UnaryOp object at 0x7da18f00d450> begin[:]
<ast.Raise object at 0x7da18f00df30>
variable[numer] assign[=] call[name[np].tensordot, parameter[name[self].moment_tensor.tensor_sigma, name[self].moment_tensor.tensor_sigma]]
variable[denom] assign[=] call[name[np].tensordot, parameter[name[self].moment_tensor.tensor, name[self].moment_tensor.tensor]]
name[self].e_rel assign[=] call[name[sqrt], parameter[binary_operation[name[numer] / name[denom]]]]
return[name[self].e_rel] | keyword[def] identifier[get_relative_error] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[moment_tensor] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[numer] = identifier[np] . identifier[tensordot] ( identifier[self] . identifier[moment_tensor] . identifier[tensor_sigma] ,
identifier[self] . identifier[moment_tensor] . identifier[tensor_sigma] )
identifier[denom] = identifier[np] . identifier[tensordot] ( identifier[self] . identifier[moment_tensor] . identifier[tensor] ,
identifier[self] . identifier[moment_tensor] . identifier[tensor] )
identifier[self] . identifier[e_rel] = identifier[sqrt] ( identifier[numer] / identifier[denom] )
keyword[return] identifier[self] . identifier[e_rel] | def get_relative_error(self):
"""
Returns the relative error statistic (e_rel), defined by Frohlich &
Davis (1999): `e_rel = sqrt((U:U) / (M:M))` where M is the moment
tensor, U is the uncertainty tensor and : is the tensor dot product
"""
if not self.moment_tensor:
raise ValueError('Moment tensor not defined!') # depends on [control=['if'], data=[]]
numer = np.tensordot(self.moment_tensor.tensor_sigma, self.moment_tensor.tensor_sigma)
denom = np.tensordot(self.moment_tensor.tensor, self.moment_tensor.tensor)
self.e_rel = sqrt(numer / denom)
return self.e_rel |
def genenare_callmap_sif(self, filepath):
"""
Generate a sif file from the call map
"""
graph = self.call_map
if graph is None:
raise AngrGirlScoutError('Please generate the call graph first.')
f = open(filepath, "wb")
for src, dst in graph.edges():
f.write("0x%x\tDirectEdge\t0x%x\n" % (src, dst))
f.close() | def function[genenare_callmap_sif, parameter[self, filepath]]:
constant[
Generate a sif file from the call map
]
variable[graph] assign[=] name[self].call_map
if compare[name[graph] is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc998d0>
variable[f] assign[=] call[name[open], parameter[name[filepath], constant[wb]]]
for taget[tuple[[<ast.Name object at 0x7da18dc9bd60>, <ast.Name object at 0x7da18dc9ae90>]]] in starred[call[name[graph].edges, parameter[]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[0x%x DirectEdge 0x%x
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc9bb20>, <ast.Name object at 0x7da18dc9a710>]]]]]
call[name[f].close, parameter[]] | keyword[def] identifier[genenare_callmap_sif] ( identifier[self] , identifier[filepath] ):
literal[string]
identifier[graph] = identifier[self] . identifier[call_map]
keyword[if] identifier[graph] keyword[is] keyword[None] :
keyword[raise] identifier[AngrGirlScoutError] ( literal[string] )
identifier[f] = identifier[open] ( identifier[filepath] , literal[string] )
keyword[for] identifier[src] , identifier[dst] keyword[in] identifier[graph] . identifier[edges] ():
identifier[f] . identifier[write] ( literal[string] %( identifier[src] , identifier[dst] ))
identifier[f] . identifier[close] () | def genenare_callmap_sif(self, filepath):
"""
Generate a sif file from the call map
"""
graph = self.call_map
if graph is None:
raise AngrGirlScoutError('Please generate the call graph first.') # depends on [control=['if'], data=[]]
f = open(filepath, 'wb')
for (src, dst) in graph.edges():
f.write('0x%x\tDirectEdge\t0x%x\n' % (src, dst)) # depends on [control=['for'], data=[]]
f.close() |
def get_token(http, service_account='default'):
"""Fetch an oauth token for the
Args:
http: an object to be used to make HTTP requests.
service_account: An email specifying the service account this token
should represent. Default will be a token for the "default" service
account of the current compute engine instance.
Returns:
A tuple of (access token, token expiration), where access token is the
access token as a string and token expiration is a datetime object
that indicates when the access token will expire.
"""
token_json = get(
http,
'instance/service-accounts/{0}/token'.format(service_account))
token_expiry = client._UTCNOW() + datetime.timedelta(
seconds=token_json['expires_in'])
return token_json['access_token'], token_expiry | def function[get_token, parameter[http, service_account]]:
constant[Fetch an oauth token for the
Args:
http: an object to be used to make HTTP requests.
service_account: An email specifying the service account this token
should represent. Default will be a token for the "default" service
account of the current compute engine instance.
Returns:
A tuple of (access token, token expiration), where access token is the
access token as a string and token expiration is a datetime object
that indicates when the access token will expire.
]
variable[token_json] assign[=] call[name[get], parameter[name[http], call[constant[instance/service-accounts/{0}/token].format, parameter[name[service_account]]]]]
variable[token_expiry] assign[=] binary_operation[call[name[client]._UTCNOW, parameter[]] + call[name[datetime].timedelta, parameter[]]]
return[tuple[[<ast.Subscript object at 0x7da1b014df90>, <ast.Name object at 0x7da1b014caf0>]]] | keyword[def] identifier[get_token] ( identifier[http] , identifier[service_account] = literal[string] ):
literal[string]
identifier[token_json] = identifier[get] (
identifier[http] ,
literal[string] . identifier[format] ( identifier[service_account] ))
identifier[token_expiry] = identifier[client] . identifier[_UTCNOW] ()+ identifier[datetime] . identifier[timedelta] (
identifier[seconds] = identifier[token_json] [ literal[string] ])
keyword[return] identifier[token_json] [ literal[string] ], identifier[token_expiry] | def get_token(http, service_account='default'):
"""Fetch an oauth token for the
Args:
http: an object to be used to make HTTP requests.
service_account: An email specifying the service account this token
should represent. Default will be a token for the "default" service
account of the current compute engine instance.
Returns:
A tuple of (access token, token expiration), where access token is the
access token as a string and token expiration is a datetime object
that indicates when the access token will expire.
"""
token_json = get(http, 'instance/service-accounts/{0}/token'.format(service_account))
token_expiry = client._UTCNOW() + datetime.timedelta(seconds=token_json['expires_in'])
return (token_json['access_token'], token_expiry) |
def upsert(self, dataset_identifier, payload, content_type="json"):
'''
Insert, update or delete data to/from an existing dataset. Currently
supports json and csv file objects. See here for the upsert
documentation:
http://dev.socrata.com/publishers/upsert.html
'''
resource = _format_new_api_request(dataid=dataset_identifier, content_type=content_type)
return self._perform_update("post", resource, payload) | def function[upsert, parameter[self, dataset_identifier, payload, content_type]]:
constant[
Insert, update or delete data to/from an existing dataset. Currently
supports json and csv file objects. See here for the upsert
documentation:
http://dev.socrata.com/publishers/upsert.html
]
variable[resource] assign[=] call[name[_format_new_api_request], parameter[]]
return[call[name[self]._perform_update, parameter[constant[post], name[resource], name[payload]]]] | keyword[def] identifier[upsert] ( identifier[self] , identifier[dataset_identifier] , identifier[payload] , identifier[content_type] = literal[string] ):
literal[string]
identifier[resource] = identifier[_format_new_api_request] ( identifier[dataid] = identifier[dataset_identifier] , identifier[content_type] = identifier[content_type] )
keyword[return] identifier[self] . identifier[_perform_update] ( literal[string] , identifier[resource] , identifier[payload] ) | def upsert(self, dataset_identifier, payload, content_type='json'):
"""
Insert, update or delete data to/from an existing dataset. Currently
supports json and csv file objects. See here for the upsert
documentation:
http://dev.socrata.com/publishers/upsert.html
"""
resource = _format_new_api_request(dataid=dataset_identifier, content_type=content_type)
return self._perform_update('post', resource, payload) |
def _set_get_vcs_details(self, v, load=False):
"""
Setter method for get_vcs_details, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_vcs_details is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_get_vcs_details() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """get_vcs_details must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True)""",
})
self.__get_vcs_details = t
if hasattr(self, '_set'):
self._set() | def function[_set_get_vcs_details, parameter[self, v, load]]:
constant[
Setter method for get_vcs_details, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_vcs_details is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_get_vcs_details() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f722380>
name[self].__get_vcs_details assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_get_vcs_details] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[get_vcs_details] . identifier[get_vcs_details] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__get_vcs_details] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_get_vcs_details(self, v, load=False):
"""
Setter method for get_vcs_details, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_vcs_details is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_get_vcs_details() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name='get-vcs-details', rest_name='get-vcs-details', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'get_vcs_details must be of a type compatible with rpc', 'defined-type': 'rpc', 'generated-type': 'YANGDynClass(base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'hidden\': u\'rpccmd\', u\'actionpoint\': u\'getvcsdetails-action-point\'}}, namespace=\'urn:brocade.com:mgmt:brocade-vcs\', defining_module=\'brocade-vcs\', yang_type=\'rpc\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__get_vcs_details = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def _has_commit(version, debug=False):
"""
Determine a version is a local git commit sha or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
"""
if _has_tag(version, debug) or _has_branch(version, debug):
return False
cmd = sh.git.bake('cat-file', '-e', version)
try:
util.run_command(cmd, debug=debug)
return True
except sh.ErrorReturnCode:
return False | def function[_has_commit, parameter[version, debug]]:
constant[
Determine a version is a local git commit sha or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
]
if <ast.BoolOp object at 0x7da1b12622c0> begin[:]
return[constant[False]]
variable[cmd] assign[=] call[name[sh].git.bake, parameter[constant[cat-file], constant[-e], name[version]]]
<ast.Try object at 0x7da1b1279870> | keyword[def] identifier[_has_commit] ( identifier[version] , identifier[debug] = keyword[False] ):
literal[string]
keyword[if] identifier[_has_tag] ( identifier[version] , identifier[debug] ) keyword[or] identifier[_has_branch] ( identifier[version] , identifier[debug] ):
keyword[return] keyword[False]
identifier[cmd] = identifier[sh] . identifier[git] . identifier[bake] ( literal[string] , literal[string] , identifier[version] )
keyword[try] :
identifier[util] . identifier[run_command] ( identifier[cmd] , identifier[debug] = identifier[debug] )
keyword[return] keyword[True]
keyword[except] identifier[sh] . identifier[ErrorReturnCode] :
keyword[return] keyword[False] | def _has_commit(version, debug=False):
"""
Determine a version is a local git commit sha or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
"""
if _has_tag(version, debug) or _has_branch(version, debug):
return False # depends on [control=['if'], data=[]]
cmd = sh.git.bake('cat-file', '-e', version)
try:
util.run_command(cmd, debug=debug)
return True # depends on [control=['try'], data=[]]
except sh.ErrorReturnCode:
return False # depends on [control=['except'], data=[]] |
def index(
config, date=None, directory=None, concurrency=5, accounts=None,
tag=None, verbose=False):
"""index traildbs directly from s3 for multiple accounts.
context: assumes a daily traildb file in s3 with dated key path
"""
logging.basicConfig(level=(verbose and logging.DEBUG or logging.INFO))
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('elasticsearch').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('c7n.worker').setLevel(logging.INFO)
with open(config) as fh:
config = yaml.safe_load(fh.read())
jsonschema.validate(config, CONFIG_SCHEMA)
date = get_date_path(date, delta=24)
directory = directory or "/tmp"
with ProcessPoolExecutor(max_workers=concurrency) as w:
futures = {}
jobs = []
for account in config.get('accounts'):
if accounts and account['name'] not in accounts:
continue
if tag:
found = False
for t in account['tags'].values():
if tag == t:
found = True
break
if not found:
continue
for region in account.get('regions'):
p = (config, account, region, date, directory)
jobs.append(p)
for j in jobs:
log.debug("submit account:{} region:{} date:{}".format(
j[1]['name'], j[2], j[3]))
futures[w.submit(index_account_trails, *j)] = j
# Process completed
for f in as_completed(futures):
config, account, region, date, directory = futures[f]
if f.exception():
log.warning("error account:{} region:{} error:{}".format(
account['name'], region, f.exception()))
continue
log.info("complete account:{} region:{}".format(
account['name'], region)) | def function[index, parameter[config, date, directory, concurrency, accounts, tag, verbose]]:
constant[index traildbs directly from s3 for multiple accounts.
context: assumes a daily traildb file in s3 with dated key path
]
call[name[logging].basicConfig, parameter[]]
call[call[name[logging].getLogger, parameter[constant[botocore]]].setLevel, parameter[name[logging].WARNING]]
call[call[name[logging].getLogger, parameter[constant[elasticsearch]]].setLevel, parameter[name[logging].WARNING]]
call[call[name[logging].getLogger, parameter[constant[urllib3]]].setLevel, parameter[name[logging].WARNING]]
call[call[name[logging].getLogger, parameter[constant[requests]]].setLevel, parameter[name[logging].WARNING]]
call[call[name[logging].getLogger, parameter[constant[c7n.worker]]].setLevel, parameter[name[logging].INFO]]
with call[name[open], parameter[name[config]]] begin[:]
variable[config] assign[=] call[name[yaml].safe_load, parameter[call[name[fh].read, parameter[]]]]
call[name[jsonschema].validate, parameter[name[config], name[CONFIG_SCHEMA]]]
variable[date] assign[=] call[name[get_date_path], parameter[name[date]]]
variable[directory] assign[=] <ast.BoolOp object at 0x7da18f58cf40>
with call[name[ProcessPoolExecutor], parameter[]] begin[:]
variable[futures] assign[=] dictionary[[], []]
variable[jobs] assign[=] list[[]]
for taget[name[account]] in starred[call[name[config].get, parameter[constant[accounts]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1ff43a0> begin[:]
continue
if name[tag] begin[:]
variable[found] assign[=] constant[False]
for taget[name[t]] in starred[call[call[name[account]][constant[tags]].values, parameter[]]] begin[:]
if compare[name[tag] equal[==] name[t]] begin[:]
variable[found] assign[=] constant[True]
break
if <ast.UnaryOp object at 0x7da1b1ff5060> begin[:]
continue
for taget[name[region]] in starred[call[name[account].get, parameter[constant[regions]]]] begin[:]
variable[p] assign[=] tuple[[<ast.Name object at 0x7da1b1ff4460>, <ast.Name object at 0x7da1b1ff4700>, <ast.Name object at 0x7da1b1ff4220>, <ast.Name object at 0x7da1b1ff4940>, <ast.Name object at 0x7da1b1ff4070>]]
call[name[jobs].append, parameter[name[p]]]
for taget[name[j]] in starred[name[jobs]] begin[:]
call[name[log].debug, parameter[call[constant[submit account:{} region:{} date:{}].format, parameter[call[call[name[j]][constant[1]]][constant[name]], call[name[j]][constant[2]], call[name[j]][constant[3]]]]]]
call[name[futures]][call[name[w].submit, parameter[name[index_account_trails], <ast.Starred object at 0x7da1b1ff5000>]]] assign[=] name[j]
for taget[name[f]] in starred[call[name[as_completed], parameter[name[futures]]]] begin[:]
<ast.Tuple object at 0x7da1b1ff41c0> assign[=] call[name[futures]][name[f]]
if call[name[f].exception, parameter[]] begin[:]
call[name[log].warning, parameter[call[constant[error account:{} region:{} error:{}].format, parameter[call[name[account]][constant[name]], name[region], call[name[f].exception, parameter[]]]]]]
continue
call[name[log].info, parameter[call[constant[complete account:{} region:{}].format, parameter[call[name[account]][constant[name]], name[region]]]]] | keyword[def] identifier[index] (
identifier[config] , identifier[date] = keyword[None] , identifier[directory] = keyword[None] , identifier[concurrency] = literal[int] , identifier[accounts] = keyword[None] ,
identifier[tag] = keyword[None] , identifier[verbose] = keyword[False] ):
literal[string]
identifier[logging] . identifier[basicConfig] ( identifier[level] =( identifier[verbose] keyword[and] identifier[logging] . identifier[DEBUG] keyword[or] identifier[logging] . identifier[INFO] ))
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[INFO] )
keyword[with] identifier[open] ( identifier[config] ) keyword[as] identifier[fh] :
identifier[config] = identifier[yaml] . identifier[safe_load] ( identifier[fh] . identifier[read] ())
identifier[jsonschema] . identifier[validate] ( identifier[config] , identifier[CONFIG_SCHEMA] )
identifier[date] = identifier[get_date_path] ( identifier[date] , identifier[delta] = literal[int] )
identifier[directory] = identifier[directory] keyword[or] literal[string]
keyword[with] identifier[ProcessPoolExecutor] ( identifier[max_workers] = identifier[concurrency] ) keyword[as] identifier[w] :
identifier[futures] ={}
identifier[jobs] =[]
keyword[for] identifier[account] keyword[in] identifier[config] . identifier[get] ( literal[string] ):
keyword[if] identifier[accounts] keyword[and] identifier[account] [ literal[string] ] keyword[not] keyword[in] identifier[accounts] :
keyword[continue]
keyword[if] identifier[tag] :
identifier[found] = keyword[False]
keyword[for] identifier[t] keyword[in] identifier[account] [ literal[string] ]. identifier[values] ():
keyword[if] identifier[tag] == identifier[t] :
identifier[found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[found] :
keyword[continue]
keyword[for] identifier[region] keyword[in] identifier[account] . identifier[get] ( literal[string] ):
identifier[p] =( identifier[config] , identifier[account] , identifier[region] , identifier[date] , identifier[directory] )
identifier[jobs] . identifier[append] ( identifier[p] )
keyword[for] identifier[j] keyword[in] identifier[jobs] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] (
identifier[j] [ literal[int] ][ literal[string] ], identifier[j] [ literal[int] ], identifier[j] [ literal[int] ]))
identifier[futures] [ identifier[w] . identifier[submit] ( identifier[index_account_trails] ,* identifier[j] )]= identifier[j]
keyword[for] identifier[f] keyword[in] identifier[as_completed] ( identifier[futures] ):
identifier[config] , identifier[account] , identifier[region] , identifier[date] , identifier[directory] = identifier[futures] [ identifier[f] ]
keyword[if] identifier[f] . identifier[exception] ():
identifier[log] . identifier[warning] ( literal[string] . identifier[format] (
identifier[account] [ literal[string] ], identifier[region] , identifier[f] . identifier[exception] ()))
keyword[continue]
identifier[log] . identifier[info] ( literal[string] . identifier[format] (
identifier[account] [ literal[string] ], identifier[region] )) | def index(config, date=None, directory=None, concurrency=5, accounts=None, tag=None, verbose=False):
"""index traildbs directly from s3 for multiple accounts.
context: assumes a daily traildb file in s3 with dated key path
"""
logging.basicConfig(level=verbose and logging.DEBUG or logging.INFO)
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('elasticsearch').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('c7n.worker').setLevel(logging.INFO)
with open(config) as fh:
config = yaml.safe_load(fh.read()) # depends on [control=['with'], data=['fh']]
jsonschema.validate(config, CONFIG_SCHEMA)
date = get_date_path(date, delta=24)
directory = directory or '/tmp'
with ProcessPoolExecutor(max_workers=concurrency) as w:
futures = {}
jobs = []
for account in config.get('accounts'):
if accounts and account['name'] not in accounts:
continue # depends on [control=['if'], data=[]]
if tag:
found = False
for t in account['tags'].values():
if tag == t:
found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']]
if not found:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
for region in account.get('regions'):
p = (config, account, region, date, directory)
jobs.append(p) # depends on [control=['for'], data=['region']] # depends on [control=['for'], data=['account']]
for j in jobs:
log.debug('submit account:{} region:{} date:{}'.format(j[1]['name'], j[2], j[3]))
futures[w.submit(index_account_trails, *j)] = j # depends on [control=['for'], data=['j']]
# Process completed
for f in as_completed(futures):
(config, account, region, date, directory) = futures[f]
if f.exception():
log.warning('error account:{} region:{} error:{}'.format(account['name'], region, f.exception()))
continue # depends on [control=['if'], data=[]]
log.info('complete account:{} region:{}'.format(account['name'], region)) # depends on [control=['for'], data=['f']] # depends on [control=['with'], data=['w']] |
def _unsetLearningMode(self):
"""
Unsets the learning mode, to start inference.
"""
for region in self.TMRegions:
region.setParameter("learn", False)
super(L4TMExperiment, self)._unsetLearningMode() | def function[_unsetLearningMode, parameter[self]]:
constant[
Unsets the learning mode, to start inference.
]
for taget[name[region]] in starred[name[self].TMRegions] begin[:]
call[name[region].setParameter, parameter[constant[learn], constant[False]]]
call[call[name[super], parameter[name[L4TMExperiment], name[self]]]._unsetLearningMode, parameter[]] | keyword[def] identifier[_unsetLearningMode] ( identifier[self] ):
literal[string]
keyword[for] identifier[region] keyword[in] identifier[self] . identifier[TMRegions] :
identifier[region] . identifier[setParameter] ( literal[string] , keyword[False] )
identifier[super] ( identifier[L4TMExperiment] , identifier[self] ). identifier[_unsetLearningMode] () | def _unsetLearningMode(self):
"""
Unsets the learning mode, to start inference.
"""
for region in self.TMRegions:
region.setParameter('learn', False) # depends on [control=['for'], data=['region']]
super(L4TMExperiment, self)._unsetLearningMode() |
def read_filepath(filepath, encoding='utf-8'):
"""Returns the text content of `filepath`"""
with codecs.open(filepath, 'r', encoding=encoding) as fo:
return fo.read() | def function[read_filepath, parameter[filepath, encoding]]:
constant[Returns the text content of `filepath`]
with call[name[codecs].open, parameter[name[filepath], constant[r]]] begin[:]
return[call[name[fo].read, parameter[]]] | keyword[def] identifier[read_filepath] ( identifier[filepath] , identifier[encoding] = literal[string] ):
literal[string]
keyword[with] identifier[codecs] . identifier[open] ( identifier[filepath] , literal[string] , identifier[encoding] = identifier[encoding] ) keyword[as] identifier[fo] :
keyword[return] identifier[fo] . identifier[read] () | def read_filepath(filepath, encoding='utf-8'):
"""Returns the text content of `filepath`"""
with codecs.open(filepath, 'r', encoding=encoding) as fo:
return fo.read() # depends on [control=['with'], data=['fo']] |
def continueLine(self):
"""Insert line continuation symbol at end of line."""
if not (self.isLong and self.is_regular):
self.line_conv = self.line_conv.rstrip() + " &\n"
else:
temp = self.line_conv[:72].rstrip() + " &"
self.line_conv = temp.ljust(72) + self.excess_line | def function[continueLine, parameter[self]]:
constant[Insert line continuation symbol at end of line.]
if <ast.UnaryOp object at 0x7da1b17deaa0> begin[:]
name[self].line_conv assign[=] binary_operation[call[name[self].line_conv.rstrip, parameter[]] + constant[ &
]] | keyword[def] identifier[continueLine] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] ( identifier[self] . identifier[isLong] keyword[and] identifier[self] . identifier[is_regular] ):
identifier[self] . identifier[line_conv] = identifier[self] . identifier[line_conv] . identifier[rstrip] ()+ literal[string]
keyword[else] :
identifier[temp] = identifier[self] . identifier[line_conv] [: literal[int] ]. identifier[rstrip] ()+ literal[string]
identifier[self] . identifier[line_conv] = identifier[temp] . identifier[ljust] ( literal[int] )+ identifier[self] . identifier[excess_line] | def continueLine(self):
"""Insert line continuation symbol at end of line."""
if not (self.isLong and self.is_regular):
self.line_conv = self.line_conv.rstrip() + ' &\n' # depends on [control=['if'], data=[]]
else:
temp = self.line_conv[:72].rstrip() + ' &'
self.line_conv = temp.ljust(72) + self.excess_line |
def view_vector(self, vector, viewup=None):
"""Point the camera in the direction of the given vector"""
focal_pt = self.center
if viewup is None:
viewup = rcParams['camera']['viewup']
cpos = [vector + np.array(focal_pt),
focal_pt, viewup]
self.camera_position = cpos
return self.reset_camera() | def function[view_vector, parameter[self, vector, viewup]]:
constant[Point the camera in the direction of the given vector]
variable[focal_pt] assign[=] name[self].center
if compare[name[viewup] is constant[None]] begin[:]
variable[viewup] assign[=] call[call[name[rcParams]][constant[camera]]][constant[viewup]]
variable[cpos] assign[=] list[[<ast.BinOp object at 0x7da18f58db70>, <ast.Name object at 0x7da18f58c4c0>, <ast.Name object at 0x7da18f58f5b0>]]
name[self].camera_position assign[=] name[cpos]
return[call[name[self].reset_camera, parameter[]]] | keyword[def] identifier[view_vector] ( identifier[self] , identifier[vector] , identifier[viewup] = keyword[None] ):
literal[string]
identifier[focal_pt] = identifier[self] . identifier[center]
keyword[if] identifier[viewup] keyword[is] keyword[None] :
identifier[viewup] = identifier[rcParams] [ literal[string] ][ literal[string] ]
identifier[cpos] =[ identifier[vector] + identifier[np] . identifier[array] ( identifier[focal_pt] ),
identifier[focal_pt] , identifier[viewup] ]
identifier[self] . identifier[camera_position] = identifier[cpos]
keyword[return] identifier[self] . identifier[reset_camera] () | def view_vector(self, vector, viewup=None):
"""Point the camera in the direction of the given vector"""
focal_pt = self.center
if viewup is None:
viewup = rcParams['camera']['viewup'] # depends on [control=['if'], data=['viewup']]
cpos = [vector + np.array(focal_pt), focal_pt, viewup]
self.camera_position = cpos
return self.reset_camera() |
def _truthyConfValue(v):
''' Determine yotta-config truthiness. In yotta config land truthiness is
different to python or json truthiness (in order to map nicely only
preprocessor and CMake definediness):
json -> python -> truthy/falsey
false -> False -> Falsey
null -> None -> Falsey
undefined -> None -> Falsey
0 -> 0 -> Falsey
"" -> "" -> Truthy (different from python)
"0" -> "0" -> Truthy
{} -> {} -> Truthy (different from python)
[] -> [] -> Truthy (different from python)
everything else is truthy
'''
if v is False:
return False
elif v is None:
return False
elif v == 0:
return False
else:
# everything else is truthy!
return True | def function[_truthyConfValue, parameter[v]]:
constant[ Determine yotta-config truthiness. In yotta config land truthiness is
different to python or json truthiness (in order to map nicely only
preprocessor and CMake definediness):
json -> python -> truthy/falsey
false -> False -> Falsey
null -> None -> Falsey
undefined -> None -> Falsey
0 -> 0 -> Falsey
"" -> "" -> Truthy (different from python)
"0" -> "0" -> Truthy
{} -> {} -> Truthy (different from python)
[] -> [] -> Truthy (different from python)
everything else is truthy
]
if compare[name[v] is constant[False]] begin[:]
return[constant[False]] | keyword[def] identifier[_truthyConfValue] ( identifier[v] ):
literal[string]
keyword[if] identifier[v] keyword[is] keyword[False] :
keyword[return] keyword[False]
keyword[elif] identifier[v] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[elif] identifier[v] == literal[int] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True] | def _truthyConfValue(v):
""" Determine yotta-config truthiness. In yotta config land truthiness is
different to python or json truthiness (in order to map nicely only
preprocessor and CMake definediness):
json -> python -> truthy/falsey
false -> False -> Falsey
null -> None -> Falsey
undefined -> None -> Falsey
0 -> 0 -> Falsey
"" -> "" -> Truthy (different from python)
"0" -> "0" -> Truthy
{} -> {} -> Truthy (different from python)
[] -> [] -> Truthy (different from python)
everything else is truthy
"""
if v is False:
return False # depends on [control=['if'], data=[]]
elif v is None:
return False # depends on [control=['if'], data=[]]
elif v == 0:
return False # depends on [control=['if'], data=[]]
else:
# everything else is truthy!
return True |
def search(self, query):
""" Perform request tracker search """
# Prepare the path
log.debug("Query: {0}".format(query))
path = self.url.path + '?Format=__id__+__Subject__'
path += "&Order=ASC&OrderBy=id&Query=" + urllib.quote(query)
# Get the tickets
lines = self.get(path)
log.info(u"Fetched tickets: {0}".format(len(lines)))
return [self.parent.ticket(line, self.parent) for line in lines] | def function[search, parameter[self, query]]:
constant[ Perform request tracker search ]
call[name[log].debug, parameter[call[constant[Query: {0}].format, parameter[name[query]]]]]
variable[path] assign[=] binary_operation[name[self].url.path + constant[?Format=__id__+__Subject__]]
<ast.AugAssign object at 0x7da1b208aa40>
variable[lines] assign[=] call[name[self].get, parameter[name[path]]]
call[name[log].info, parameter[call[constant[Fetched tickets: {0}].format, parameter[call[name[len], parameter[name[lines]]]]]]]
return[<ast.ListComp object at 0x7da1b208bac0>] | keyword[def] identifier[search] ( identifier[self] , identifier[query] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[query] ))
identifier[path] = identifier[self] . identifier[url] . identifier[path] + literal[string]
identifier[path] += literal[string] + identifier[urllib] . identifier[quote] ( identifier[query] )
identifier[lines] = identifier[self] . identifier[get] ( identifier[path] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[len] ( identifier[lines] )))
keyword[return] [ identifier[self] . identifier[parent] . identifier[ticket] ( identifier[line] , identifier[self] . identifier[parent] ) keyword[for] identifier[line] keyword[in] identifier[lines] ] | def search(self, query):
""" Perform request tracker search """
# Prepare the path
log.debug('Query: {0}'.format(query))
path = self.url.path + '?Format=__id__+__Subject__'
path += '&Order=ASC&OrderBy=id&Query=' + urllib.quote(query)
# Get the tickets
lines = self.get(path)
log.info(u'Fetched tickets: {0}'.format(len(lines)))
return [self.parent.ticket(line, self.parent) for line in lines] |
def from_json_file(cls, path):
"""
Return a template from a json allocated in a path.
:param path: string
:return: ServiceAgreementTemplate
"""
with open(path) as jsf:
template_json = json.load(jsf)
return cls(template_json=template_json) | def function[from_json_file, parameter[cls, path]]:
constant[
Return a template from a json allocated in a path.
:param path: string
:return: ServiceAgreementTemplate
]
with call[name[open], parameter[name[path]]] begin[:]
variable[template_json] assign[=] call[name[json].load, parameter[name[jsf]]]
return[call[name[cls], parameter[]]] | keyword[def] identifier[from_json_file] ( identifier[cls] , identifier[path] ):
literal[string]
keyword[with] identifier[open] ( identifier[path] ) keyword[as] identifier[jsf] :
identifier[template_json] = identifier[json] . identifier[load] ( identifier[jsf] )
keyword[return] identifier[cls] ( identifier[template_json] = identifier[template_json] ) | def from_json_file(cls, path):
"""
Return a template from a json allocated in a path.
:param path: string
:return: ServiceAgreementTemplate
"""
with open(path) as jsf:
template_json = json.load(jsf)
return cls(template_json=template_json) # depends on [control=['with'], data=['jsf']] |
def snr_ratio(in1, in2):
"""
The following function simply calculates the signal to noise ratio between two signals.
INPUTS:
in1 (no default): Array containing values for signal 1.
in2 (no default): Array containing values for signal 2.
OUTPUTS:
out1 The ratio of the signal to noise ratios of two signals.
"""
out1 = 20*(np.log10(np.linalg.norm(in1)/np.linalg.norm(in1-in2)))
return out1 | def function[snr_ratio, parameter[in1, in2]]:
constant[
The following function simply calculates the signal to noise ratio between two signals.
INPUTS:
in1 (no default): Array containing values for signal 1.
in2 (no default): Array containing values for signal 2.
OUTPUTS:
out1 The ratio of the signal to noise ratios of two signals.
]
variable[out1] assign[=] binary_operation[constant[20] * call[name[np].log10, parameter[binary_operation[call[name[np].linalg.norm, parameter[name[in1]]] / call[name[np].linalg.norm, parameter[binary_operation[name[in1] - name[in2]]]]]]]]
return[name[out1]] | keyword[def] identifier[snr_ratio] ( identifier[in1] , identifier[in2] ):
literal[string]
identifier[out1] = literal[int] *( identifier[np] . identifier[log10] ( identifier[np] . identifier[linalg] . identifier[norm] ( identifier[in1] )/ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[in1] - identifier[in2] )))
keyword[return] identifier[out1] | def snr_ratio(in1, in2):
"""
The following function simply calculates the signal to noise ratio between two signals.
INPUTS:
in1 (no default): Array containing values for signal 1.
in2 (no default): Array containing values for signal 2.
OUTPUTS:
out1 The ratio of the signal to noise ratios of two signals.
"""
out1 = 20 * np.log10(np.linalg.norm(in1) / np.linalg.norm(in1 - in2))
return out1 |
def ziegler_nichols(self,ku,tu,control_type='pid'):
'''
ku = ultimate gain
tu = period of oscillation at ultimate gain
'''
converter = dict(
p = lambda ku,tu: (.5*ku, 0, 0),
pi = lambda ku,tu: (.45*ku, 1.2*(.45*ku)/tu, 0),
pd = lambda ku,tu: (.8*ku, 0, (.8*ku)*tu/8),
pid = lambda ku,tu: (.6*ku, 2*(.6*ku)/tu, (.6*ku)*tu/8),
pessen = lambda ku,tu: (.7*ku, 2.5*(.7*ku)/tu, 3*(.7*ku)*tu/20),
some_overshoot = lambda ku,tu: (.33*ku, 2*(.33*ku)/tu, (.33*ku)*tu/3),
no_overshoot = lambda ku,tu: (.2*ku, 2*(.2*ku)/tu, (.2*ku)*tu/3)
)
self.kp,self.ki,self.kd = converter[control_type.lower()](ku,tu) | def function[ziegler_nichols, parameter[self, ku, tu, control_type]]:
constant[
ku = ultimate gain
tu = period of oscillation at ultimate gain
]
variable[converter] assign[=] call[name[dict], parameter[]]
<ast.Tuple object at 0x7da18f09ec50> assign[=] call[call[name[converter]][call[name[control_type].lower, parameter[]]], parameter[name[ku], name[tu]]] | keyword[def] identifier[ziegler_nichols] ( identifier[self] , identifier[ku] , identifier[tu] , identifier[control_type] = literal[string] ):
literal[string]
identifier[converter] = identifier[dict] (
identifier[p] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] , literal[int] ),
identifier[pi] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] *( literal[int] * identifier[ku] )/ identifier[tu] , literal[int] ),
identifier[pd] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] ,( literal[int] * identifier[ku] )* identifier[tu] / literal[int] ),
identifier[pid] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] *( literal[int] * identifier[ku] )/ identifier[tu] ,( literal[int] * identifier[ku] )* identifier[tu] / literal[int] ),
identifier[pessen] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] *( literal[int] * identifier[ku] )/ identifier[tu] , literal[int] *( literal[int] * identifier[ku] )* identifier[tu] / literal[int] ),
identifier[some_overshoot] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] *( literal[int] * identifier[ku] )/ identifier[tu] ,( literal[int] * identifier[ku] )* identifier[tu] / literal[int] ),
identifier[no_overshoot] = keyword[lambda] identifier[ku] , identifier[tu] :( literal[int] * identifier[ku] , literal[int] *( literal[int] * identifier[ku] )/ identifier[tu] ,( literal[int] * identifier[ku] )* identifier[tu] / literal[int] )
)
identifier[self] . identifier[kp] , identifier[self] . identifier[ki] , identifier[self] . identifier[kd] = identifier[converter] [ identifier[control_type] . identifier[lower] ()]( identifier[ku] , identifier[tu] ) | def ziegler_nichols(self, ku, tu, control_type='pid'):
"""
ku = ultimate gain
tu = period of oscillation at ultimate gain
"""
converter = dict(p=lambda ku, tu: (0.5 * ku, 0, 0), pi=lambda ku, tu: (0.45 * ku, 1.2 * (0.45 * ku) / tu, 0), pd=lambda ku, tu: (0.8 * ku, 0, 0.8 * ku * tu / 8), pid=lambda ku, tu: (0.6 * ku, 2 * (0.6 * ku) / tu, 0.6 * ku * tu / 8), pessen=lambda ku, tu: (0.7 * ku, 2.5 * (0.7 * ku) / tu, 3 * (0.7 * ku) * tu / 20), some_overshoot=lambda ku, tu: (0.33 * ku, 2 * (0.33 * ku) / tu, 0.33 * ku * tu / 3), no_overshoot=lambda ku, tu: (0.2 * ku, 2 * (0.2 * ku) / tu, 0.2 * ku * tu / 3))
(self.kp, self.ki, self.kd) = converter[control_type.lower()](ku, tu) |
def list_dir_abspath(path):
"""
Return a list absolute file paths.
see mkdir_p os.listdir.
"""
return map(lambda f: os.path.join(path, f), os.listdir(path)) | def function[list_dir_abspath, parameter[path]]:
constant[
Return a list absolute file paths.
see mkdir_p os.listdir.
]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b1605a20>, call[name[os].listdir, parameter[name[path]]]]]] | keyword[def] identifier[list_dir_abspath] ( identifier[path] ):
literal[string]
keyword[return] identifier[map] ( keyword[lambda] identifier[f] : identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[f] ), identifier[os] . identifier[listdir] ( identifier[path] )) | def list_dir_abspath(path):
"""
Return a list absolute file paths.
see mkdir_p os.listdir.
"""
return map(lambda f: os.path.join(path, f), os.listdir(path)) |
def _ip_unnumbered_type(self, **kwargs):
"""Return the `ip unnumbered` donor type XML.
You should not use this method.
You probably want `Interface.ip_unnumbered`.
Args:
int_type (str): Type of interface. (gigabitethernet,
tengigabitethernet etc).
delete (bool): Remove the configuration if ``True``.
ip_donor_interface_type (str): The donor interface type (loopback)
Returns:
XML to be passed to the switch.
Raises:
None
"""
method_name = 'interface_%s_ip_ip_config_unnumbered_ip_donor_'\
'interface_type' % kwargs['int_type']
ip_unnumbered_type = getattr(self._interface, method_name)
config = ip_unnumbered_type(**kwargs)
if kwargs['delete']:
tag = 'ip-donor-interface-type'
config.find('.//*%s' % tag).set('operation', 'delete')
return config | def function[_ip_unnumbered_type, parameter[self]]:
constant[Return the `ip unnumbered` donor type XML.
You should not use this method.
You probably want `Interface.ip_unnumbered`.
Args:
int_type (str): Type of interface. (gigabitethernet,
tengigabitethernet etc).
delete (bool): Remove the configuration if ``True``.
ip_donor_interface_type (str): The donor interface type (loopback)
Returns:
XML to be passed to the switch.
Raises:
None
]
variable[method_name] assign[=] binary_operation[constant[interface_%s_ip_ip_config_unnumbered_ip_donor_interface_type] <ast.Mod object at 0x7da2590d6920> call[name[kwargs]][constant[int_type]]]
variable[ip_unnumbered_type] assign[=] call[name[getattr], parameter[name[self]._interface, name[method_name]]]
variable[config] assign[=] call[name[ip_unnumbered_type], parameter[]]
if call[name[kwargs]][constant[delete]] begin[:]
variable[tag] assign[=] constant[ip-donor-interface-type]
call[call[name[config].find, parameter[binary_operation[constant[.//*%s] <ast.Mod object at 0x7da2590d6920> name[tag]]]].set, parameter[constant[operation], constant[delete]]]
return[name[config]] | keyword[def] identifier[_ip_unnumbered_type] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[method_name] = literal[string] literal[string] % identifier[kwargs] [ literal[string] ]
identifier[ip_unnumbered_type] = identifier[getattr] ( identifier[self] . identifier[_interface] , identifier[method_name] )
identifier[config] = identifier[ip_unnumbered_type] (** identifier[kwargs] )
keyword[if] identifier[kwargs] [ literal[string] ]:
identifier[tag] = literal[string]
identifier[config] . identifier[find] ( literal[string] % identifier[tag] ). identifier[set] ( literal[string] , literal[string] )
keyword[return] identifier[config] | def _ip_unnumbered_type(self, **kwargs):
"""Return the `ip unnumbered` donor type XML.
You should not use this method.
You probably want `Interface.ip_unnumbered`.
Args:
int_type (str): Type of interface. (gigabitethernet,
tengigabitethernet etc).
delete (bool): Remove the configuration if ``True``.
ip_donor_interface_type (str): The donor interface type (loopback)
Returns:
XML to be passed to the switch.
Raises:
None
"""
method_name = 'interface_%s_ip_ip_config_unnumbered_ip_donor_interface_type' % kwargs['int_type']
ip_unnumbered_type = getattr(self._interface, method_name)
config = ip_unnumbered_type(**kwargs)
if kwargs['delete']:
tag = 'ip-donor-interface-type'
config.find('.//*%s' % tag).set('operation', 'delete') # depends on [control=['if'], data=[]]
return config |
def gene_names(self, contig=None, strand=None):
"""
Return all genes in the database,
optionally restrict to a chromosome and/or strand.
"""
return self._all_feature_values(
column="gene_name",
feature="gene",
contig=contig,
strand=strand) | def function[gene_names, parameter[self, contig, strand]]:
constant[
Return all genes in the database,
optionally restrict to a chromosome and/or strand.
]
return[call[name[self]._all_feature_values, parameter[]]] | keyword[def] identifier[gene_names] ( identifier[self] , identifier[contig] = keyword[None] , identifier[strand] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_all_feature_values] (
identifier[column] = literal[string] ,
identifier[feature] = literal[string] ,
identifier[contig] = identifier[contig] ,
identifier[strand] = identifier[strand] ) | def gene_names(self, contig=None, strand=None):
"""
Return all genes in the database,
optionally restrict to a chromosome and/or strand.
"""
return self._all_feature_values(column='gene_name', feature='gene', contig=contig, strand=strand) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.