code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def _get_stmt_matching_groups(stmts):
"""Use the matches_key method to get sets of matching statements."""
def match_func(x): return x.matches_key()
# Remove exact duplicates using a set() call, then make copies:
logger.debug('%d statements before removing object duplicates.' %
len(stmts))
st = list(set(stmts))
logger.debug('%d statements after removing object duplicates.' %
len(stmts))
# Group statements according to whether they are matches (differing
# only in their evidence).
# Sort the statements in place by matches_key()
st.sort(key=match_func)
return itertools.groupby(st, key=match_func)
|
def function[_get_stmt_matching_groups, parameter[stmts]]:
constant[Use the matches_key method to get sets of matching statements.]
def function[match_func, parameter[x]]:
return[call[name[x].matches_key, parameter[]]]
call[name[logger].debug, parameter[binary_operation[constant[%d statements before removing object duplicates.] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[stmts]]]]]]
variable[st] assign[=] call[name[list], parameter[call[name[set], parameter[name[stmts]]]]]
call[name[logger].debug, parameter[binary_operation[constant[%d statements after removing object duplicates.] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[stmts]]]]]]
call[name[st].sort, parameter[]]
return[call[name[itertools].groupby, parameter[name[st]]]]
|
keyword[def] identifier[_get_stmt_matching_groups] ( identifier[stmts] ):
literal[string]
keyword[def] identifier[match_func] ( identifier[x] ): keyword[return] identifier[x] . identifier[matches_key] ()
identifier[logger] . identifier[debug] ( literal[string] %
identifier[len] ( identifier[stmts] ))
identifier[st] = identifier[list] ( identifier[set] ( identifier[stmts] ))
identifier[logger] . identifier[debug] ( literal[string] %
identifier[len] ( identifier[stmts] ))
identifier[st] . identifier[sort] ( identifier[key] = identifier[match_func] )
keyword[return] identifier[itertools] . identifier[groupby] ( identifier[st] , identifier[key] = identifier[match_func] )
|
def _get_stmt_matching_groups(stmts):
"""Use the matches_key method to get sets of matching statements."""
def match_func(x):
return x.matches_key()
# Remove exact duplicates using a set() call, then make copies:
logger.debug('%d statements before removing object duplicates.' % len(stmts))
st = list(set(stmts))
logger.debug('%d statements after removing object duplicates.' % len(stmts))
# Group statements according to whether they are matches (differing
# only in their evidence).
# Sort the statements in place by matches_key()
st.sort(key=match_func)
return itertools.groupby(st, key=match_func)
|
def get_stats_str(list_=None, newlines=False, keys=None, exclude_keys=[], lbl=None,
precision=None, axis=0, stat_dict=None, use_nan=False,
align=False, use_median=False, **kwargs):
"""
Returns the string version of get_stats
DEPRICATE in favor of ut.repr3(ut.get_stats(...))
if keys is not None then it only displays chosen keys
excluded keys are always removed
CommandLine:
python -m utool.util_dev --test-get_stats_str
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dev import * # NOQA
>>> list_ = [1, 2, 3, 4, 5]
>>> newlines = False
>>> keys = None
>>> exclude_keys = []
>>> lbl = None
>>> precision = 2
>>> stat_str = get_stats_str(list_, newlines, keys, exclude_keys, lbl, precision)
>>> result = str(stat_str)
>>> print(result)
{'mean': 3, 'std': 1.41, 'max': 5, 'min': 1, 'nMin': 1, 'nMax': 1, 'shape': (5,)}
SeeAlso:
repr2
get_stats
"""
from utool.util_str import repr4
import utool as ut
# Get stats dict
if stat_dict is None:
stat_dict = get_stats(list_, axis=axis, use_nan=use_nan, use_median=use_median)
else:
stat_dict = stat_dict.copy()
# Keep only included keys if specified
if keys is not None:
for key in list(six.iterkeys(stat_dict)):
if key not in keys:
del stat_dict[key]
# Remove excluded keys
for key in exclude_keys:
if key in stat_dict:
del stat_dict[key]
# apply precision
statstr_dict = stat_dict.copy()
#precisionless_types = (bool,) + six.string_types
if precision is not None:
assert ut.is_int(precision), 'precision must be an integer'
float_fmtstr = '%.' + str(precision) + 'f'
for key in list(six.iterkeys(statstr_dict)):
val = statstr_dict[key]
isfloat = ut.is_float(val)
if not isfloat and isinstance(val, list):
type_list = list(map(type, val))
if len(type_list) > 0 and ut.allsame(type_list):
if ut.is_float(val[0]):
isfloat = True
val = np.array(val)
if isfloat:
if isinstance(val, np.ndarray):
strval = str([float_fmtstr % v for v in val]).replace('\'', '').lstrip('u')
#np.array_str((val), precision=precision)
else:
strval = float_fmtstr % val
if not strval.startswith('0'):
strval = strval.rstrip('0')
strval = strval.rstrip('.')
statstr_dict[key] = strval
else:
if isinstance(val, np.ndarray):
strval = repr(val.tolist())
else:
strval = str(val)
statstr_dict[key] = strval
# format the dictionary string
stat_str = repr4(statstr_dict, strvals=True, newlines=newlines)
# add a label if requested
if lbl is True:
lbl = ut.get_varname_from_stack(list_, N=1) # fancy
if lbl is not None:
stat_str = 'stats_' + lbl + ' = ' + stat_str
if align:
stat_str = ut.align(stat_str, ':')
return stat_str
|
def function[get_stats_str, parameter[list_, newlines, keys, exclude_keys, lbl, precision, axis, stat_dict, use_nan, align, use_median]]:
constant[
Returns the string version of get_stats
DEPRICATE in favor of ut.repr3(ut.get_stats(...))
if keys is not None then it only displays chosen keys
excluded keys are always removed
CommandLine:
python -m utool.util_dev --test-get_stats_str
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dev import * # NOQA
>>> list_ = [1, 2, 3, 4, 5]
>>> newlines = False
>>> keys = None
>>> exclude_keys = []
>>> lbl = None
>>> precision = 2
>>> stat_str = get_stats_str(list_, newlines, keys, exclude_keys, lbl, precision)
>>> result = str(stat_str)
>>> print(result)
{'mean': 3, 'std': 1.41, 'max': 5, 'min': 1, 'nMin': 1, 'nMax': 1, 'shape': (5,)}
SeeAlso:
repr2
get_stats
]
from relative_module[utool.util_str] import module[repr4]
import module[utool] as alias[ut]
if compare[name[stat_dict] is constant[None]] begin[:]
variable[stat_dict] assign[=] call[name[get_stats], parameter[name[list_]]]
if compare[name[keys] is_not constant[None]] begin[:]
for taget[name[key]] in starred[call[name[list], parameter[call[name[six].iterkeys, parameter[name[stat_dict]]]]]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[keys]] begin[:]
<ast.Delete object at 0x7da1b253aec0>
for taget[name[key]] in starred[name[exclude_keys]] begin[:]
if compare[name[key] in name[stat_dict]] begin[:]
<ast.Delete object at 0x7da1b253a080>
variable[statstr_dict] assign[=] call[name[stat_dict].copy, parameter[]]
if compare[name[precision] is_not constant[None]] begin[:]
assert[call[name[ut].is_int, parameter[name[precision]]]]
variable[float_fmtstr] assign[=] binary_operation[binary_operation[constant[%.] + call[name[str], parameter[name[precision]]]] + constant[f]]
for taget[name[key]] in starred[call[name[list], parameter[call[name[six].iterkeys, parameter[name[statstr_dict]]]]]] begin[:]
variable[val] assign[=] call[name[statstr_dict]][name[key]]
variable[isfloat] assign[=] call[name[ut].is_float, parameter[name[val]]]
if <ast.BoolOp object at 0x7da1b25384c0> begin[:]
variable[type_list] assign[=] call[name[list], parameter[call[name[map], parameter[name[type], name[val]]]]]
if <ast.BoolOp object at 0x7da1b253bb80> begin[:]
if call[name[ut].is_float, parameter[call[name[val]][constant[0]]]] begin[:]
variable[isfloat] assign[=] constant[True]
variable[val] assign[=] call[name[np].array, parameter[name[val]]]
if name[isfloat] begin[:]
if call[name[isinstance], parameter[name[val], name[np].ndarray]] begin[:]
variable[strval] assign[=] call[call[call[name[str], parameter[<ast.ListComp object at 0x7da1b245fb50>]].replace, parameter[constant['], constant[]]].lstrip, parameter[constant[u]]]
if <ast.UnaryOp object at 0x7da1b245eaa0> begin[:]
variable[strval] assign[=] call[name[strval].rstrip, parameter[constant[0]]]
variable[strval] assign[=] call[name[strval].rstrip, parameter[constant[.]]]
call[name[statstr_dict]][name[key]] assign[=] name[strval]
variable[stat_str] assign[=] call[name[repr4], parameter[name[statstr_dict]]]
if compare[name[lbl] is constant[True]] begin[:]
variable[lbl] assign[=] call[name[ut].get_varname_from_stack, parameter[name[list_]]]
if compare[name[lbl] is_not constant[None]] begin[:]
variable[stat_str] assign[=] binary_operation[binary_operation[binary_operation[constant[stats_] + name[lbl]] + constant[ = ]] + name[stat_str]]
if name[align] begin[:]
variable[stat_str] assign[=] call[name[ut].align, parameter[name[stat_str], constant[:]]]
return[name[stat_str]]
|
keyword[def] identifier[get_stats_str] ( identifier[list_] = keyword[None] , identifier[newlines] = keyword[False] , identifier[keys] = keyword[None] , identifier[exclude_keys] =[], identifier[lbl] = keyword[None] ,
identifier[precision] = keyword[None] , identifier[axis] = literal[int] , identifier[stat_dict] = keyword[None] , identifier[use_nan] = keyword[False] ,
identifier[align] = keyword[False] , identifier[use_median] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[utool] . identifier[util_str] keyword[import] identifier[repr4]
keyword[import] identifier[utool] keyword[as] identifier[ut]
keyword[if] identifier[stat_dict] keyword[is] keyword[None] :
identifier[stat_dict] = identifier[get_stats] ( identifier[list_] , identifier[axis] = identifier[axis] , identifier[use_nan] = identifier[use_nan] , identifier[use_median] = identifier[use_median] )
keyword[else] :
identifier[stat_dict] = identifier[stat_dict] . identifier[copy] ()
keyword[if] identifier[keys] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[six] . identifier[iterkeys] ( identifier[stat_dict] )):
keyword[if] identifier[key] keyword[not] keyword[in] identifier[keys] :
keyword[del] identifier[stat_dict] [ identifier[key] ]
keyword[for] identifier[key] keyword[in] identifier[exclude_keys] :
keyword[if] identifier[key] keyword[in] identifier[stat_dict] :
keyword[del] identifier[stat_dict] [ identifier[key] ]
identifier[statstr_dict] = identifier[stat_dict] . identifier[copy] ()
keyword[if] identifier[precision] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[ut] . identifier[is_int] ( identifier[precision] ), literal[string]
identifier[float_fmtstr] = literal[string] + identifier[str] ( identifier[precision] )+ literal[string]
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[six] . identifier[iterkeys] ( identifier[statstr_dict] )):
identifier[val] = identifier[statstr_dict] [ identifier[key] ]
identifier[isfloat] = identifier[ut] . identifier[is_float] ( identifier[val] )
keyword[if] keyword[not] identifier[isfloat] keyword[and] identifier[isinstance] ( identifier[val] , identifier[list] ):
identifier[type_list] = identifier[list] ( identifier[map] ( identifier[type] , identifier[val] ))
keyword[if] identifier[len] ( identifier[type_list] )> literal[int] keyword[and] identifier[ut] . identifier[allsame] ( identifier[type_list] ):
keyword[if] identifier[ut] . identifier[is_float] ( identifier[val] [ literal[int] ]):
identifier[isfloat] = keyword[True]
identifier[val] = identifier[np] . identifier[array] ( identifier[val] )
keyword[if] identifier[isfloat] :
keyword[if] identifier[isinstance] ( identifier[val] , identifier[np] . identifier[ndarray] ):
identifier[strval] = identifier[str] ([ identifier[float_fmtstr] % identifier[v] keyword[for] identifier[v] keyword[in] identifier[val] ]). identifier[replace] ( literal[string] , literal[string] ). identifier[lstrip] ( literal[string] )
keyword[else] :
identifier[strval] = identifier[float_fmtstr] % identifier[val]
keyword[if] keyword[not] identifier[strval] . identifier[startswith] ( literal[string] ):
identifier[strval] = identifier[strval] . identifier[rstrip] ( literal[string] )
identifier[strval] = identifier[strval] . identifier[rstrip] ( literal[string] )
identifier[statstr_dict] [ identifier[key] ]= identifier[strval]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[val] , identifier[np] . identifier[ndarray] ):
identifier[strval] = identifier[repr] ( identifier[val] . identifier[tolist] ())
keyword[else] :
identifier[strval] = identifier[str] ( identifier[val] )
identifier[statstr_dict] [ identifier[key] ]= identifier[strval]
identifier[stat_str] = identifier[repr4] ( identifier[statstr_dict] , identifier[strvals] = keyword[True] , identifier[newlines] = identifier[newlines] )
keyword[if] identifier[lbl] keyword[is] keyword[True] :
identifier[lbl] = identifier[ut] . identifier[get_varname_from_stack] ( identifier[list_] , identifier[N] = literal[int] )
keyword[if] identifier[lbl] keyword[is] keyword[not] keyword[None] :
identifier[stat_str] = literal[string] + identifier[lbl] + literal[string] + identifier[stat_str]
keyword[if] identifier[align] :
identifier[stat_str] = identifier[ut] . identifier[align] ( identifier[stat_str] , literal[string] )
keyword[return] identifier[stat_str]
|
def get_stats_str(list_=None, newlines=False, keys=None, exclude_keys=[], lbl=None, precision=None, axis=0, stat_dict=None, use_nan=False, align=False, use_median=False, **kwargs):
"""
Returns the string version of get_stats
DEPRICATE in favor of ut.repr3(ut.get_stats(...))
if keys is not None then it only displays chosen keys
excluded keys are always removed
CommandLine:
python -m utool.util_dev --test-get_stats_str
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dev import * # NOQA
>>> list_ = [1, 2, 3, 4, 5]
>>> newlines = False
>>> keys = None
>>> exclude_keys = []
>>> lbl = None
>>> precision = 2
>>> stat_str = get_stats_str(list_, newlines, keys, exclude_keys, lbl, precision)
>>> result = str(stat_str)
>>> print(result)
{'mean': 3, 'std': 1.41, 'max': 5, 'min': 1, 'nMin': 1, 'nMax': 1, 'shape': (5,)}
SeeAlso:
repr2
get_stats
"""
from utool.util_str import repr4
import utool as ut
# Get stats dict
if stat_dict is None:
stat_dict = get_stats(list_, axis=axis, use_nan=use_nan, use_median=use_median) # depends on [control=['if'], data=['stat_dict']]
else:
stat_dict = stat_dict.copy()
# Keep only included keys if specified
if keys is not None:
for key in list(six.iterkeys(stat_dict)):
if key not in keys:
del stat_dict[key] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=['keys']]
# Remove excluded keys
for key in exclude_keys:
if key in stat_dict:
del stat_dict[key] # depends on [control=['if'], data=['key', 'stat_dict']] # depends on [control=['for'], data=['key']]
# apply precision
statstr_dict = stat_dict.copy()
#precisionless_types = (bool,) + six.string_types
if precision is not None:
assert ut.is_int(precision), 'precision must be an integer'
float_fmtstr = '%.' + str(precision) + 'f'
for key in list(six.iterkeys(statstr_dict)):
val = statstr_dict[key]
isfloat = ut.is_float(val)
if not isfloat and isinstance(val, list):
type_list = list(map(type, val))
if len(type_list) > 0 and ut.allsame(type_list):
if ut.is_float(val[0]):
isfloat = True
val = np.array(val) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if isfloat:
if isinstance(val, np.ndarray):
strval = str([float_fmtstr % v for v in val]).replace("'", '').lstrip('u') # depends on [control=['if'], data=[]]
else:
#np.array_str((val), precision=precision)
strval = float_fmtstr % val
if not strval.startswith('0'):
strval = strval.rstrip('0')
strval = strval.rstrip('.') # depends on [control=['if'], data=[]]
statstr_dict[key] = strval # depends on [control=['if'], data=[]]
else:
if isinstance(val, np.ndarray):
strval = repr(val.tolist()) # depends on [control=['if'], data=[]]
else:
strval = str(val)
statstr_dict[key] = strval # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=['precision']]
# format the dictionary string
stat_str = repr4(statstr_dict, strvals=True, newlines=newlines)
# add a label if requested
if lbl is True:
lbl = ut.get_varname_from_stack(list_, N=1) # fancy # depends on [control=['if'], data=['lbl']]
if lbl is not None:
stat_str = 'stats_' + lbl + ' = ' + stat_str # depends on [control=['if'], data=['lbl']]
if align:
stat_str = ut.align(stat_str, ':') # depends on [control=['if'], data=[]]
return stat_str
|
def gpp(V,E):
"""gpp -- model for the graph partitioning problem
Parameters:
- V: set/list of nodes in the graph
- E: set/list of edges in the graph
Returns a model, ready to be solved.
"""
model = Model("gpp")
x = {}
y = {}
for i in V:
x[i] = model.addVar(vtype="B", name="x(%s)"%i)
for (i,j) in E:
y[i,j] = model.addVar(vtype="B", name="y(%s,%s)"%(i,j))
model.addCons(quicksum(x[i] for i in V) == len(V)/2, "Partition")
for (i,j) in E:
model.addCons(x[i] - x[j] <= y[i,j], "Edge(%s,%s)"%(i,j))
model.addCons(x[j] - x[i] <= y[i,j], "Edge(%s,%s)"%(j,i))
model.setObjective(quicksum(y[i,j] for (i,j) in E), "minimize")
model.data = x
return model
|
def function[gpp, parameter[V, E]]:
constant[gpp -- model for the graph partitioning problem
Parameters:
- V: set/list of nodes in the graph
- E: set/list of edges in the graph
Returns a model, ready to be solved.
]
variable[model] assign[=] call[name[Model], parameter[constant[gpp]]]
variable[x] assign[=] dictionary[[], []]
variable[y] assign[=] dictionary[[], []]
for taget[name[i]] in starred[name[V]] begin[:]
call[name[x]][name[i]] assign[=] call[name[model].addVar, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1700dc0>, <ast.Name object at 0x7da1b1702fe0>]]] in starred[name[E]] begin[:]
call[name[y]][tuple[[<ast.Name object at 0x7da1b1703910>, <ast.Name object at 0x7da1b17033a0>]]] assign[=] call[name[model].addVar, parameter[]]
call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b1703850>]] equal[==] binary_operation[call[name[len], parameter[name[V]]] / constant[2]]], constant[Partition]]]
for taget[tuple[[<ast.Name object at 0x7da1b1703400>, <ast.Name object at 0x7da1b1702e30>]]] in starred[name[E]] begin[:]
call[name[model].addCons, parameter[compare[binary_operation[call[name[x]][name[i]] - call[name[x]][name[j]]] less_or_equal[<=] call[name[y]][tuple[[<ast.Name object at 0x7da1b18e54e0>, <ast.Name object at 0x7da1b18e6860>]]]], binary_operation[constant[Edge(%s,%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b18e4070>, <ast.Name object at 0x7da1b18e70d0>]]]]]
call[name[model].addCons, parameter[compare[binary_operation[call[name[x]][name[j]] - call[name[x]][name[i]]] less_or_equal[<=] call[name[y]][tuple[[<ast.Name object at 0x7da1b18e46a0>, <ast.Name object at 0x7da1b18e6f50>]]]], binary_operation[constant[Edge(%s,%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b18e7cd0>, <ast.Name object at 0x7da1b18e6c50>]]]]]
call[name[model].setObjective, parameter[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b18e5b70>]], constant[minimize]]]
name[model].data assign[=] name[x]
return[name[model]]
|
keyword[def] identifier[gpp] ( identifier[V] , identifier[E] ):
literal[string]
identifier[model] = identifier[Model] ( literal[string] )
identifier[x] ={}
identifier[y] ={}
keyword[for] identifier[i] keyword[in] identifier[V] :
identifier[x] [ identifier[i] ]= identifier[model] . identifier[addVar] ( identifier[vtype] = literal[string] , identifier[name] = literal[string] % identifier[i] )
keyword[for] ( identifier[i] , identifier[j] ) keyword[in] identifier[E] :
identifier[y] [ identifier[i] , identifier[j] ]= identifier[model] . identifier[addVar] ( identifier[vtype] = literal[string] , identifier[name] = literal[string] %( identifier[i] , identifier[j] ))
identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[x] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[V] )== identifier[len] ( identifier[V] )/ literal[int] , literal[string] )
keyword[for] ( identifier[i] , identifier[j] ) keyword[in] identifier[E] :
identifier[model] . identifier[addCons] ( identifier[x] [ identifier[i] ]- identifier[x] [ identifier[j] ]<= identifier[y] [ identifier[i] , identifier[j] ], literal[string] %( identifier[i] , identifier[j] ))
identifier[model] . identifier[addCons] ( identifier[x] [ identifier[j] ]- identifier[x] [ identifier[i] ]<= identifier[y] [ identifier[i] , identifier[j] ], literal[string] %( identifier[j] , identifier[i] ))
identifier[model] . identifier[setObjective] ( identifier[quicksum] ( identifier[y] [ identifier[i] , identifier[j] ] keyword[for] ( identifier[i] , identifier[j] ) keyword[in] identifier[E] ), literal[string] )
identifier[model] . identifier[data] = identifier[x]
keyword[return] identifier[model]
|
def gpp(V, E):
"""gpp -- model for the graph partitioning problem
Parameters:
- V: set/list of nodes in the graph
- E: set/list of edges in the graph
Returns a model, ready to be solved.
"""
model = Model('gpp')
x = {}
y = {}
for i in V:
x[i] = model.addVar(vtype='B', name='x(%s)' % i) # depends on [control=['for'], data=['i']]
for (i, j) in E:
y[i, j] = model.addVar(vtype='B', name='y(%s,%s)' % (i, j)) # depends on [control=['for'], data=[]]
model.addCons(quicksum((x[i] for i in V)) == len(V) / 2, 'Partition')
for (i, j) in E:
model.addCons(x[i] - x[j] <= y[i, j], 'Edge(%s,%s)' % (i, j))
model.addCons(x[j] - x[i] <= y[i, j], 'Edge(%s,%s)' % (j, i)) # depends on [control=['for'], data=[]]
model.setObjective(quicksum((y[i, j] for (i, j) in E)), 'minimize')
model.data = x
return model
|
def sort(self, by=None, reverse=False):
"""
Sorts the data by the values along the supplied dimensions.
Args:
by: Dimension(s) to sort by
reverse (bool, optional): Reverse sort order
Returns:
Sorted Dataset
"""
if by is None:
by = self.kdims
elif not isinstance(by, list):
by = [by]
sorted_columns = self.interface.sort(self, by, reverse)
return self.clone(sorted_columns)
|
def function[sort, parameter[self, by, reverse]]:
constant[
Sorts the data by the values along the supplied dimensions.
Args:
by: Dimension(s) to sort by
reverse (bool, optional): Reverse sort order
Returns:
Sorted Dataset
]
if compare[name[by] is constant[None]] begin[:]
variable[by] assign[=] name[self].kdims
variable[sorted_columns] assign[=] call[name[self].interface.sort, parameter[name[self], name[by], name[reverse]]]
return[call[name[self].clone, parameter[name[sorted_columns]]]]
|
keyword[def] identifier[sort] ( identifier[self] , identifier[by] = keyword[None] , identifier[reverse] = keyword[False] ):
literal[string]
keyword[if] identifier[by] keyword[is] keyword[None] :
identifier[by] = identifier[self] . identifier[kdims]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[by] , identifier[list] ):
identifier[by] =[ identifier[by] ]
identifier[sorted_columns] = identifier[self] . identifier[interface] . identifier[sort] ( identifier[self] , identifier[by] , identifier[reverse] )
keyword[return] identifier[self] . identifier[clone] ( identifier[sorted_columns] )
|
def sort(self, by=None, reverse=False):
"""
Sorts the data by the values along the supplied dimensions.
Args:
by: Dimension(s) to sort by
reverse (bool, optional): Reverse sort order
Returns:
Sorted Dataset
"""
if by is None:
by = self.kdims # depends on [control=['if'], data=['by']]
elif not isinstance(by, list):
by = [by] # depends on [control=['if'], data=[]]
sorted_columns = self.interface.sort(self, by, reverse)
return self.clone(sorted_columns)
|
def invalidate(self):
" Invalidate the UI for all clients. "
logger.info('Invalidating %s applications', len(self.apps))
for app in self.apps:
app.invalidate()
|
def function[invalidate, parameter[self]]:
constant[ Invalidate the UI for all clients. ]
call[name[logger].info, parameter[constant[Invalidating %s applications], call[name[len], parameter[name[self].apps]]]]
for taget[name[app]] in starred[name[self].apps] begin[:]
call[name[app].invalidate, parameter[]]
|
keyword[def] identifier[invalidate] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] , identifier[len] ( identifier[self] . identifier[apps] ))
keyword[for] identifier[app] keyword[in] identifier[self] . identifier[apps] :
identifier[app] . identifier[invalidate] ()
|
def invalidate(self):
""" Invalidate the UI for all clients. """
logger.info('Invalidating %s applications', len(self.apps))
for app in self.apps:
app.invalidate() # depends on [control=['for'], data=['app']]
|
def unload(module):
'''
Unload specified fault manager module
module: string
module to unload
CLI Example:
.. code-block:: bash
salt '*' fmadm.unload software-response
'''
ret = {}
fmadm = _check_fmadm()
cmd = '{cmd} unload {module}'.format(
cmd=fmadm,
module=module
)
res = __salt__['cmd.run_all'](cmd)
retcode = res['retcode']
result = {}
if retcode != 0:
result['Error'] = res['stderr']
else:
result = True
return result
|
def function[unload, parameter[module]]:
constant[
Unload specified fault manager module
module: string
module to unload
CLI Example:
.. code-block:: bash
salt '*' fmadm.unload software-response
]
variable[ret] assign[=] dictionary[[], []]
variable[fmadm] assign[=] call[name[_check_fmadm], parameter[]]
variable[cmd] assign[=] call[constant[{cmd} unload {module}].format, parameter[]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]
variable[retcode] assign[=] call[name[res]][constant[retcode]]
variable[result] assign[=] dictionary[[], []]
if compare[name[retcode] not_equal[!=] constant[0]] begin[:]
call[name[result]][constant[Error]] assign[=] call[name[res]][constant[stderr]]
return[name[result]]
|
keyword[def] identifier[unload] ( identifier[module] ):
literal[string]
identifier[ret] ={}
identifier[fmadm] = identifier[_check_fmadm] ()
identifier[cmd] = literal[string] . identifier[format] (
identifier[cmd] = identifier[fmadm] ,
identifier[module] = identifier[module]
)
identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[cmd] )
identifier[retcode] = identifier[res] [ literal[string] ]
identifier[result] ={}
keyword[if] identifier[retcode] != literal[int] :
identifier[result] [ literal[string] ]= identifier[res] [ literal[string] ]
keyword[else] :
identifier[result] = keyword[True]
keyword[return] identifier[result]
|
def unload(module):
"""
Unload specified fault manager module
module: string
module to unload
CLI Example:
.. code-block:: bash
salt '*' fmadm.unload software-response
"""
ret = {}
fmadm = _check_fmadm()
cmd = '{cmd} unload {module}'.format(cmd=fmadm, module=module)
res = __salt__['cmd.run_all'](cmd)
retcode = res['retcode']
result = {}
if retcode != 0:
result['Error'] = res['stderr'] # depends on [control=['if'], data=[]]
else:
result = True
return result
|
def remove_entry(self, entry):
"""!
@brief Remove clustering feature from the leaf node.
@param[in] entry (cfentry): Clustering feature.
"""
self.feature -= entry;
self.entries.remove(entry);
|
def function[remove_entry, parameter[self, entry]]:
constant[!
@brief Remove clustering feature from the leaf node.
@param[in] entry (cfentry): Clustering feature.
]
<ast.AugAssign object at 0x7da1b01fc4f0>
call[name[self].entries.remove, parameter[name[entry]]]
|
keyword[def] identifier[remove_entry] ( identifier[self] , identifier[entry] ):
literal[string]
identifier[self] . identifier[feature] -= identifier[entry] ;
identifier[self] . identifier[entries] . identifier[remove] ( identifier[entry] );
|
def remove_entry(self, entry):
"""!
@brief Remove clustering feature from the leaf node.
@param[in] entry (cfentry): Clustering feature.
"""
self.feature -= entry
self.entries.remove(entry)
|
def enum_to_yaml(cls: Type[T_EnumToYAML], representer: Representer, data: T_EnumToYAML) -> ruamel.yaml.nodes.ScalarNode:
""" Encodes YAML representation.
This is a mixin method for writing enum values to YAML. It needs to be added to the enum
as a classmethod. See the module docstring for further information on this approach and how
to implement it.
This method writes whatever is used in the string representation of the YAML value.
Usually, this will be the unique name of the enumeration value. If the name is used,
the corresponding ``EnumFromYAML`` mixin can be used to recreate the value. If the name
isn't used, more care may be necessary, so a ``from_yaml`` method for that particular
enumeration may be necessary.
Note:
This method assumes that the name of the enumeration value should be stored as a scalar node.
Args:
representer: Representation from YAML.
data: Enumeration value to be encoded.
Returns:
Scalar representation of the name of the enumeration value.
"""
return representer.represent_scalar(
f"!{cls.__name__}",
f"{str(data)}"
)
|
def function[enum_to_yaml, parameter[cls, representer, data]]:
constant[ Encodes YAML representation.
This is a mixin method for writing enum values to YAML. It needs to be added to the enum
as a classmethod. See the module docstring for further information on this approach and how
to implement it.
This method writes whatever is used in the string representation of the YAML value.
Usually, this will be the unique name of the enumeration value. If the name is used,
the corresponding ``EnumFromYAML`` mixin can be used to recreate the value. If the name
isn't used, more care may be necessary, so a ``from_yaml`` method for that particular
enumeration may be necessary.
Note:
This method assumes that the name of the enumeration value should be stored as a scalar node.
Args:
representer: Representation from YAML.
data: Enumeration value to be encoded.
Returns:
Scalar representation of the name of the enumeration value.
]
return[call[name[representer].represent_scalar, parameter[<ast.JoinedStr object at 0x7da20c6c78b0>, <ast.JoinedStr object at 0x7da20c6c4e80>]]]
|
keyword[def] identifier[enum_to_yaml] ( identifier[cls] : identifier[Type] [ identifier[T_EnumToYAML] ], identifier[representer] : identifier[Representer] , identifier[data] : identifier[T_EnumToYAML] )-> identifier[ruamel] . identifier[yaml] . identifier[nodes] . identifier[ScalarNode] :
literal[string]
keyword[return] identifier[representer] . identifier[represent_scalar] (
literal[string] ,
literal[string]
)
|
def enum_to_yaml(cls: Type[T_EnumToYAML], representer: Representer, data: T_EnumToYAML) -> ruamel.yaml.nodes.ScalarNode:
""" Encodes YAML representation.
This is a mixin method for writing enum values to YAML. It needs to be added to the enum
as a classmethod. See the module docstring for further information on this approach and how
to implement it.
This method writes whatever is used in the string representation of the YAML value.
Usually, this will be the unique name of the enumeration value. If the name is used,
the corresponding ``EnumFromYAML`` mixin can be used to recreate the value. If the name
isn't used, more care may be necessary, so a ``from_yaml`` method for that particular
enumeration may be necessary.
Note:
This method assumes that the name of the enumeration value should be stored as a scalar node.
Args:
representer: Representation from YAML.
data: Enumeration value to be encoded.
Returns:
Scalar representation of the name of the enumeration value.
"""
return representer.represent_scalar(f'!{cls.__name__}', f'{str(data)}')
|
def on_builder_inited(app):
"""
Hooks into Sphinx's ``builder-inited`` event.
"""
app.cache_db_path = ":memory:"
if app.config["uqbar_book_use_cache"]:
logger.info(bold("[uqbar-book]"), nonl=True)
logger.info(" initializing cache db")
app.connection = uqbar.book.sphinx.create_cache_db(app.cache_db_path)
|
def function[on_builder_inited, parameter[app]]:
constant[
Hooks into Sphinx's ``builder-inited`` event.
]
name[app].cache_db_path assign[=] constant[:memory:]
if call[name[app].config][constant[uqbar_book_use_cache]] begin[:]
call[name[logger].info, parameter[call[name[bold], parameter[constant[[uqbar-book]]]]]]
call[name[logger].info, parameter[constant[ initializing cache db]]]
name[app].connection assign[=] call[name[uqbar].book.sphinx.create_cache_db, parameter[name[app].cache_db_path]]
|
keyword[def] identifier[on_builder_inited] ( identifier[app] ):
literal[string]
identifier[app] . identifier[cache_db_path] = literal[string]
keyword[if] identifier[app] . identifier[config] [ literal[string] ]:
identifier[logger] . identifier[info] ( identifier[bold] ( literal[string] ), identifier[nonl] = keyword[True] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[app] . identifier[connection] = identifier[uqbar] . identifier[book] . identifier[sphinx] . identifier[create_cache_db] ( identifier[app] . identifier[cache_db_path] )
|
def on_builder_inited(app):
"""
Hooks into Sphinx's ``builder-inited`` event.
"""
app.cache_db_path = ':memory:'
if app.config['uqbar_book_use_cache']:
logger.info(bold('[uqbar-book]'), nonl=True)
logger.info(' initializing cache db')
app.connection = uqbar.book.sphinx.create_cache_db(app.cache_db_path) # depends on [control=['if'], data=[]]
|
def move(self):
"""
Advance game by single move, if possible.
@return: logical indicator if move was performed.
"""
if len(self.moves) == MAX_MOVES:
return False
elif len(self.moves) % 2:
active_engine = self.black_engine
active_engine_name = self.black
inactive_engine = self.white_engine
inactive_engine_name = self.white
else:
active_engine = self.white_engine
active_engine_name = self.white
inactive_engine = self.black_engine
inactive_engine_name = self.black
active_engine.setposition(self.moves)
movedict = active_engine.bestmove()
bestmove = movedict.get('move')
info = movedict.get('info')
ponder = movedict.get('ponder')
self.moves.append(bestmove)
if info["score"]["eval"] == "mate":
matenum = info["score"]["value"]
if matenum > 0:
self.winner_engine = active_engine
self.winner = active_engine_name
elif matenum < 0:
self.winner_engine = inactive_engine
self.winner = inactive_engine_name
return False
if ponder != '(none)':
return True
|
def function[move, parameter[self]]:
constant[
Advance game by single move, if possible.
@return: logical indicator if move was performed.
]
if compare[call[name[len], parameter[name[self].moves]] equal[==] name[MAX_MOVES]] begin[:]
return[constant[False]]
call[name[active_engine].setposition, parameter[name[self].moves]]
variable[movedict] assign[=] call[name[active_engine].bestmove, parameter[]]
variable[bestmove] assign[=] call[name[movedict].get, parameter[constant[move]]]
variable[info] assign[=] call[name[movedict].get, parameter[constant[info]]]
variable[ponder] assign[=] call[name[movedict].get, parameter[constant[ponder]]]
call[name[self].moves.append, parameter[name[bestmove]]]
if compare[call[call[name[info]][constant[score]]][constant[eval]] equal[==] constant[mate]] begin[:]
variable[matenum] assign[=] call[call[name[info]][constant[score]]][constant[value]]
if compare[name[matenum] greater[>] constant[0]] begin[:]
name[self].winner_engine assign[=] name[active_engine]
name[self].winner assign[=] name[active_engine_name]
return[constant[False]]
if compare[name[ponder] not_equal[!=] constant[(none)]] begin[:]
return[constant[True]]
|
keyword[def] identifier[move] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[moves] )== identifier[MAX_MOVES] :
keyword[return] keyword[False]
keyword[elif] identifier[len] ( identifier[self] . identifier[moves] )% literal[int] :
identifier[active_engine] = identifier[self] . identifier[black_engine]
identifier[active_engine_name] = identifier[self] . identifier[black]
identifier[inactive_engine] = identifier[self] . identifier[white_engine]
identifier[inactive_engine_name] = identifier[self] . identifier[white]
keyword[else] :
identifier[active_engine] = identifier[self] . identifier[white_engine]
identifier[active_engine_name] = identifier[self] . identifier[white]
identifier[inactive_engine] = identifier[self] . identifier[black_engine]
identifier[inactive_engine_name] = identifier[self] . identifier[black]
identifier[active_engine] . identifier[setposition] ( identifier[self] . identifier[moves] )
identifier[movedict] = identifier[active_engine] . identifier[bestmove] ()
identifier[bestmove] = identifier[movedict] . identifier[get] ( literal[string] )
identifier[info] = identifier[movedict] . identifier[get] ( literal[string] )
identifier[ponder] = identifier[movedict] . identifier[get] ( literal[string] )
identifier[self] . identifier[moves] . identifier[append] ( identifier[bestmove] )
keyword[if] identifier[info] [ literal[string] ][ literal[string] ]== literal[string] :
identifier[matenum] = identifier[info] [ literal[string] ][ literal[string] ]
keyword[if] identifier[matenum] > literal[int] :
identifier[self] . identifier[winner_engine] = identifier[active_engine]
identifier[self] . identifier[winner] = identifier[active_engine_name]
keyword[elif] identifier[matenum] < literal[int] :
identifier[self] . identifier[winner_engine] = identifier[inactive_engine]
identifier[self] . identifier[winner] = identifier[inactive_engine_name]
keyword[return] keyword[False]
keyword[if] identifier[ponder] != literal[string] :
keyword[return] keyword[True]
|
def move(self):
"""
Advance game by single move, if possible.
@return: logical indicator if move was performed.
"""
if len(self.moves) == MAX_MOVES:
return False # depends on [control=['if'], data=[]]
elif len(self.moves) % 2:
active_engine = self.black_engine
active_engine_name = self.black
inactive_engine = self.white_engine
inactive_engine_name = self.white # depends on [control=['if'], data=[]]
else:
active_engine = self.white_engine
active_engine_name = self.white
inactive_engine = self.black_engine
inactive_engine_name = self.black
active_engine.setposition(self.moves)
movedict = active_engine.bestmove()
bestmove = movedict.get('move')
info = movedict.get('info')
ponder = movedict.get('ponder')
self.moves.append(bestmove)
if info['score']['eval'] == 'mate':
matenum = info['score']['value']
if matenum > 0:
self.winner_engine = active_engine
self.winner = active_engine_name # depends on [control=['if'], data=[]]
elif matenum < 0:
self.winner_engine = inactive_engine
self.winner = inactive_engine_name # depends on [control=['if'], data=[]]
return False # depends on [control=['if'], data=[]]
if ponder != '(none)':
return True # depends on [control=['if'], data=[]]
|
def format_results(self, raw_data, options):
'''
Returns a python structure that later gets serialized.
raw_data
full list of objects matching the search term
options
a dictionary of the given options
'''
page_data = self.paginate_results(raw_data, options)
results = {}
meta = options.copy()
meta['more'] = _('Show more results')
if page_data and page_data.has_next():
meta['next_page'] = page_data.next_page_number()
if page_data and page_data.has_previous():
meta['prev_page'] = page_data.previous_page_number()
results['data'] = [self.format_item(item) for item in page_data.object_list]
results['meta'] = meta
return results
|
def function[format_results, parameter[self, raw_data, options]]:
constant[
Returns a python structure that later gets serialized.
raw_data
full list of objects matching the search term
options
a dictionary of the given options
]
variable[page_data] assign[=] call[name[self].paginate_results, parameter[name[raw_data], name[options]]]
variable[results] assign[=] dictionary[[], []]
variable[meta] assign[=] call[name[options].copy, parameter[]]
call[name[meta]][constant[more]] assign[=] call[name[_], parameter[constant[Show more results]]]
if <ast.BoolOp object at 0x7da204623460> begin[:]
call[name[meta]][constant[next_page]] assign[=] call[name[page_data].next_page_number, parameter[]]
if <ast.BoolOp object at 0x7da204621f60> begin[:]
call[name[meta]][constant[prev_page]] assign[=] call[name[page_data].previous_page_number, parameter[]]
call[name[results]][constant[data]] assign[=] <ast.ListComp object at 0x7da1b0625930>
call[name[results]][constant[meta]] assign[=] name[meta]
return[name[results]]
|
keyword[def] identifier[format_results] ( identifier[self] , identifier[raw_data] , identifier[options] ):
literal[string]
identifier[page_data] = identifier[self] . identifier[paginate_results] ( identifier[raw_data] , identifier[options] )
identifier[results] ={}
identifier[meta] = identifier[options] . identifier[copy] ()
identifier[meta] [ literal[string] ]= identifier[_] ( literal[string] )
keyword[if] identifier[page_data] keyword[and] identifier[page_data] . identifier[has_next] ():
identifier[meta] [ literal[string] ]= identifier[page_data] . identifier[next_page_number] ()
keyword[if] identifier[page_data] keyword[and] identifier[page_data] . identifier[has_previous] ():
identifier[meta] [ literal[string] ]= identifier[page_data] . identifier[previous_page_number] ()
identifier[results] [ literal[string] ]=[ identifier[self] . identifier[format_item] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[page_data] . identifier[object_list] ]
identifier[results] [ literal[string] ]= identifier[meta]
keyword[return] identifier[results]
|
def format_results(self, raw_data, options):
"""
Returns a python structure that later gets serialized.
raw_data
full list of objects matching the search term
options
a dictionary of the given options
"""
page_data = self.paginate_results(raw_data, options)
results = {}
meta = options.copy()
meta['more'] = _('Show more results')
if page_data and page_data.has_next():
meta['next_page'] = page_data.next_page_number() # depends on [control=['if'], data=[]]
if page_data and page_data.has_previous():
meta['prev_page'] = page_data.previous_page_number() # depends on [control=['if'], data=[]]
results['data'] = [self.format_item(item) for item in page_data.object_list]
results['meta'] = meta
return results
|
def translate_reaction(reaction, metabolite_mapping):
"""
Return a mapping from KEGG compound identifiers to coefficients.
Parameters
----------
reaction : cobra.Reaction
The reaction whose metabolites are to be translated.
metabolite_mapping : dict
An existing mapping from cobra.Metabolite to KEGG compound identifier
that may already contain the metabolites in question or will have to be
extended.
Returns
-------
dict
The stoichiometry of the reaction given as a mapping from metabolite
KEGG identifier to coefficient.
"""
# Transport reactions where the same metabolite occurs in different
# compartments should have been filtered out but just to be sure, we add
# coefficients in the mapping.
stoichiometry = defaultdict(float)
for met, coef in iteritems(reaction.metabolites):
kegg_id = metabolite_mapping.setdefault(met, map_metabolite2kegg(met))
if kegg_id is None:
continue
stoichiometry[kegg_id] += coef
return dict(stoichiometry)
|
def function[translate_reaction, parameter[reaction, metabolite_mapping]]:
constant[
Return a mapping from KEGG compound identifiers to coefficients.
Parameters
----------
reaction : cobra.Reaction
The reaction whose metabolites are to be translated.
metabolite_mapping : dict
An existing mapping from cobra.Metabolite to KEGG compound identifier
that may already contain the metabolites in question or will have to be
extended.
Returns
-------
dict
The stoichiometry of the reaction given as a mapping from metabolite
KEGG identifier to coefficient.
]
variable[stoichiometry] assign[=] call[name[defaultdict], parameter[name[float]]]
for taget[tuple[[<ast.Name object at 0x7da18f7215a0>, <ast.Name object at 0x7da18f722320>]]] in starred[call[name[iteritems], parameter[name[reaction].metabolites]]] begin[:]
variable[kegg_id] assign[=] call[name[metabolite_mapping].setdefault, parameter[name[met], call[name[map_metabolite2kegg], parameter[name[met]]]]]
if compare[name[kegg_id] is constant[None]] begin[:]
continue
<ast.AugAssign object at 0x7da18f720100>
return[call[name[dict], parameter[name[stoichiometry]]]]
|
keyword[def] identifier[translate_reaction] ( identifier[reaction] , identifier[metabolite_mapping] ):
literal[string]
identifier[stoichiometry] = identifier[defaultdict] ( identifier[float] )
keyword[for] identifier[met] , identifier[coef] keyword[in] identifier[iteritems] ( identifier[reaction] . identifier[metabolites] ):
identifier[kegg_id] = identifier[metabolite_mapping] . identifier[setdefault] ( identifier[met] , identifier[map_metabolite2kegg] ( identifier[met] ))
keyword[if] identifier[kegg_id] keyword[is] keyword[None] :
keyword[continue]
identifier[stoichiometry] [ identifier[kegg_id] ]+= identifier[coef]
keyword[return] identifier[dict] ( identifier[stoichiometry] )
|
def translate_reaction(reaction, metabolite_mapping):
"""
Return a mapping from KEGG compound identifiers to coefficients.
Parameters
----------
reaction : cobra.Reaction
The reaction whose metabolites are to be translated.
metabolite_mapping : dict
An existing mapping from cobra.Metabolite to KEGG compound identifier
that may already contain the metabolites in question or will have to be
extended.
Returns
-------
dict
The stoichiometry of the reaction given as a mapping from metabolite
KEGG identifier to coefficient.
"""
# Transport reactions where the same metabolite occurs in different
# compartments should have been filtered out but just to be sure, we add
# coefficients in the mapping.
stoichiometry = defaultdict(float)
for (met, coef) in iteritems(reaction.metabolites):
kegg_id = metabolite_mapping.setdefault(met, map_metabolite2kegg(met))
if kegg_id is None:
continue # depends on [control=['if'], data=[]]
stoichiometry[kegg_id] += coef # depends on [control=['for'], data=[]]
return dict(stoichiometry)
|
def _record2card(self, record):
"""
when we add new records they don't have a card,
this sort of fakes it up similar to what cfitsio
does, just for display purposes. e.g.
DBL = 23.299843
LNG = 3423432
KEYSNC = 'hello '
KEYSC = 'hello ' / a comment for string
KEYDC = 3.14159265358979 / a comment for pi
KEYLC = 323423432 / a comment for long
basically,
- 8 chars, left aligned, for the keyword name
- a space
- 20 chars for value, left aligned for strings, right aligned for
numbers
- if there is a comment, one space followed by / then another space
then the comment out to 80 chars
"""
name = record['name']
value = record['value']
v_isstring = isstring(value)
if name == 'COMMENT':
# card = 'COMMENT %s' % value
card = 'COMMENT %s' % value
elif name == 'CONTINUE':
card = 'CONTINUE %s' % value
elif name == 'HISTORY':
card = 'HISTORY %s' % value
else:
if len(name) > 8:
card = 'HIERARCH %s= ' % name
else:
card = '%-8s= ' % name[0:8]
# these may be string representations of data, or actual strings
if v_isstring:
value = str(value)
if len(value) > 0:
if value[0] != "'":
# this is a string representing a string header field
# make it look like it will look in the header
value = "'" + value + "'"
vstr = '%-20s' % value
else:
vstr = "%20s" % value
else:
vstr = "''"
else:
vstr = '%20s' % value
card += vstr
if 'comment' in record:
card += ' / %s' % record['comment']
if v_isstring and len(card) > 80:
card = card[0:79] + "'"
else:
card = card[0:80]
return card
|
def function[_record2card, parameter[self, record]]:
constant[
when we add new records they don't have a card,
this sort of fakes it up similar to what cfitsio
does, just for display purposes. e.g.
DBL = 23.299843
LNG = 3423432
KEYSNC = 'hello '
KEYSC = 'hello ' / a comment for string
KEYDC = 3.14159265358979 / a comment for pi
KEYLC = 323423432 / a comment for long
basically,
- 8 chars, left aligned, for the keyword name
- a space
- 20 chars for value, left aligned for strings, right aligned for
numbers
- if there is a comment, one space followed by / then another space
then the comment out to 80 chars
]
variable[name] assign[=] call[name[record]][constant[name]]
variable[value] assign[=] call[name[record]][constant[value]]
variable[v_isstring] assign[=] call[name[isstring], parameter[name[value]]]
if compare[name[name] equal[==] constant[COMMENT]] begin[:]
variable[card] assign[=] binary_operation[constant[COMMENT %s] <ast.Mod object at 0x7da2590d6920> name[value]]
if <ast.BoolOp object at 0x7da18bc71690> begin[:]
variable[card] assign[=] binary_operation[call[name[card]][<ast.Slice object at 0x7da204347010>] + constant[']]
return[name[card]]
|
keyword[def] identifier[_record2card] ( identifier[self] , identifier[record] ):
literal[string]
identifier[name] = identifier[record] [ literal[string] ]
identifier[value] = identifier[record] [ literal[string] ]
identifier[v_isstring] = identifier[isstring] ( identifier[value] )
keyword[if] identifier[name] == literal[string] :
identifier[card] = literal[string] % identifier[value]
keyword[elif] identifier[name] == literal[string] :
identifier[card] = literal[string] % identifier[value]
keyword[elif] identifier[name] == literal[string] :
identifier[card] = literal[string] % identifier[value]
keyword[else] :
keyword[if] identifier[len] ( identifier[name] )> literal[int] :
identifier[card] = literal[string] % identifier[name]
keyword[else] :
identifier[card] = literal[string] % identifier[name] [ literal[int] : literal[int] ]
keyword[if] identifier[v_isstring] :
identifier[value] = identifier[str] ( identifier[value] )
keyword[if] identifier[len] ( identifier[value] )> literal[int] :
keyword[if] identifier[value] [ literal[int] ]!= literal[string] :
identifier[value] = literal[string] + identifier[value] + literal[string]
identifier[vstr] = literal[string] % identifier[value]
keyword[else] :
identifier[vstr] = literal[string] % identifier[value]
keyword[else] :
identifier[vstr] = literal[string]
keyword[else] :
identifier[vstr] = literal[string] % identifier[value]
identifier[card] += identifier[vstr]
keyword[if] literal[string] keyword[in] identifier[record] :
identifier[card] += literal[string] % identifier[record] [ literal[string] ]
keyword[if] identifier[v_isstring] keyword[and] identifier[len] ( identifier[card] )> literal[int] :
identifier[card] = identifier[card] [ literal[int] : literal[int] ]+ literal[string]
keyword[else] :
identifier[card] = identifier[card] [ literal[int] : literal[int] ]
keyword[return] identifier[card]
|
def _record2card(self, record):
"""
when we add new records they don't have a card,
this sort of fakes it up similar to what cfitsio
does, just for display purposes. e.g.
DBL = 23.299843
LNG = 3423432
KEYSNC = 'hello '
KEYSC = 'hello ' / a comment for string
KEYDC = 3.14159265358979 / a comment for pi
KEYLC = 323423432 / a comment for long
basically,
- 8 chars, left aligned, for the keyword name
- a space
- 20 chars for value, left aligned for strings, right aligned for
numbers
- if there is a comment, one space followed by / then another space
then the comment out to 80 chars
"""
name = record['name']
value = record['value']
v_isstring = isstring(value)
if name == 'COMMENT':
# card = 'COMMENT %s' % value
card = 'COMMENT %s' % value # depends on [control=['if'], data=[]]
elif name == 'CONTINUE':
card = 'CONTINUE %s' % value # depends on [control=['if'], data=[]]
elif name == 'HISTORY':
card = 'HISTORY %s' % value # depends on [control=['if'], data=[]]
else:
if len(name) > 8:
card = 'HIERARCH %s= ' % name # depends on [control=['if'], data=[]]
else:
card = '%-8s= ' % name[0:8]
# these may be string representations of data, or actual strings
if v_isstring:
value = str(value)
if len(value) > 0:
if value[0] != "'":
# this is a string representing a string header field
# make it look like it will look in the header
value = "'" + value + "'"
vstr = '%-20s' % value # depends on [control=['if'], data=[]]
else:
vstr = '%20s' % value # depends on [control=['if'], data=[]]
else:
vstr = "''" # depends on [control=['if'], data=[]]
else:
vstr = '%20s' % value
card += vstr
if 'comment' in record:
card += ' / %s' % record['comment'] # depends on [control=['if'], data=['record']]
if v_isstring and len(card) > 80:
card = card[0:79] + "'" # depends on [control=['if'], data=[]]
else:
card = card[0:80]
return card
|
def dump_links(self, o):
"""Dump links."""
return {
'self': url_for('.bucket_api', bucket_id=o.id, _external=True),
'versions': url_for(
'.bucket_api', bucket_id=o.id, _external=True) + '?versions',
'uploads': url_for(
'.bucket_api', bucket_id=o.id, _external=True) + '?uploads',
}
|
def function[dump_links, parameter[self, o]]:
constant[Dump links.]
return[dictionary[[<ast.Constant object at 0x7da1b19c3850>, <ast.Constant object at 0x7da1b19c2c20>, <ast.Constant object at 0x7da1b19c36d0>], [<ast.Call object at 0x7da1b19c3880>, <ast.BinOp object at 0x7da1b19c0550>, <ast.BinOp object at 0x7da1b1a6de10>]]]
|
keyword[def] identifier[dump_links] ( identifier[self] , identifier[o] ):
literal[string]
keyword[return] {
literal[string] : identifier[url_for] ( literal[string] , identifier[bucket_id] = identifier[o] . identifier[id] , identifier[_external] = keyword[True] ),
literal[string] : identifier[url_for] (
literal[string] , identifier[bucket_id] = identifier[o] . identifier[id] , identifier[_external] = keyword[True] )+ literal[string] ,
literal[string] : identifier[url_for] (
literal[string] , identifier[bucket_id] = identifier[o] . identifier[id] , identifier[_external] = keyword[True] )+ literal[string] ,
}
|
def dump_links(self, o):
"""Dump links."""
return {'self': url_for('.bucket_api', bucket_id=o.id, _external=True), 'versions': url_for('.bucket_api', bucket_id=o.id, _external=True) + '?versions', 'uploads': url_for('.bucket_api', bucket_id=o.id, _external=True) + '?uploads'}
|
def have_all_block_data(self):
"""
Have we received all block data?
"""
if not (self.num_blocks_received == self.num_blocks_requested):
log.debug("num blocks received = %s, num requested = %s" % (self.num_blocks_received, self.num_blocks_requested))
return False
return True
|
def function[have_all_block_data, parameter[self]]:
constant[
Have we received all block data?
]
if <ast.UnaryOp object at 0x7da1b26c91e0> begin[:]
call[name[log].debug, parameter[binary_operation[constant[num blocks received = %s, num requested = %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b26cb490>, <ast.Attribute object at 0x7da1b26c9210>]]]]]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[have_all_block_data] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] ( identifier[self] . identifier[num_blocks_received] == identifier[self] . identifier[num_blocks_requested] ):
identifier[log] . identifier[debug] ( literal[string] %( identifier[self] . identifier[num_blocks_received] , identifier[self] . identifier[num_blocks_requested] ))
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def have_all_block_data(self):
"""
Have we received all block data?
"""
if not self.num_blocks_received == self.num_blocks_requested:
log.debug('num blocks received = %s, num requested = %s' % (self.num_blocks_received, self.num_blocks_requested))
return False # depends on [control=['if'], data=[]]
return True
|
def _is_api_key_correct(request):
"""Return whether the Geckoboard API key on the request is correct."""
api_key = getattr(settings, 'GECKOBOARD_API_KEY', None)
if api_key is None:
return True
auth = request.META.get('HTTP_AUTHORIZATION', '').split()
if len(auth) == 2:
if auth[0].lower() == b'basic':
request_key = base64.b64decode(auth[1]).split(b':')[0]
return request_key == api_key
return False
|
def function[_is_api_key_correct, parameter[request]]:
constant[Return whether the Geckoboard API key on the request is correct.]
variable[api_key] assign[=] call[name[getattr], parameter[name[settings], constant[GECKOBOARD_API_KEY], constant[None]]]
if compare[name[api_key] is constant[None]] begin[:]
return[constant[True]]
variable[auth] assign[=] call[call[name[request].META.get, parameter[constant[HTTP_AUTHORIZATION], constant[]]].split, parameter[]]
if compare[call[name[len], parameter[name[auth]]] equal[==] constant[2]] begin[:]
if compare[call[call[name[auth]][constant[0]].lower, parameter[]] equal[==] constant[b'basic']] begin[:]
variable[request_key] assign[=] call[call[call[name[base64].b64decode, parameter[call[name[auth]][constant[1]]]].split, parameter[constant[b':']]]][constant[0]]
return[compare[name[request_key] equal[==] name[api_key]]]
return[constant[False]]
|
keyword[def] identifier[_is_api_key_correct] ( identifier[request] ):
literal[string]
identifier[api_key] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[None] )
keyword[if] identifier[api_key] keyword[is] keyword[None] :
keyword[return] keyword[True]
identifier[auth] = identifier[request] . identifier[META] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ()
keyword[if] identifier[len] ( identifier[auth] )== literal[int] :
keyword[if] identifier[auth] [ literal[int] ]. identifier[lower] ()== literal[string] :
identifier[request_key] = identifier[base64] . identifier[b64decode] ( identifier[auth] [ literal[int] ]). identifier[split] ( literal[string] )[ literal[int] ]
keyword[return] identifier[request_key] == identifier[api_key]
keyword[return] keyword[False]
|
def _is_api_key_correct(request):
"""Return whether the Geckoboard API key on the request is correct."""
api_key = getattr(settings, 'GECKOBOARD_API_KEY', None)
if api_key is None:
return True # depends on [control=['if'], data=[]]
auth = request.META.get('HTTP_AUTHORIZATION', '').split()
if len(auth) == 2:
if auth[0].lower() == b'basic':
request_key = base64.b64decode(auth[1]).split(b':')[0]
return request_key == api_key # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return False
|
def _get_boundary(self, var):
"""Get the position of exon-intron boundary for current variant
"""
if var.type == "r" or var.type == "n":
if self.cross_boundaries:
return 0, float("inf")
else:
# Get genomic sequence access number for this transcript
map_info = self.hdp.get_tx_mapping_options(var.ac)
if not map_info:
raise HGVSDataNotAvailableError(
"No mapping info available for {ac}".format(ac=var.ac))
map_info = [
item for item in map_info if item["alt_aln_method"] == self.alt_aln_method
]
alt_ac = map_info[0]["alt_ac"]
# Get tx info
tx_info = self.hdp.get_tx_info(var.ac, alt_ac, self.alt_aln_method)
cds_start = tx_info["cds_start_i"]
cds_end = tx_info["cds_end_i"]
# Get exon info
exon_info = self.hdp.get_tx_exons(var.ac, alt_ac, self.alt_aln_method)
exon_starts = [exon["tx_start_i"] for exon in exon_info]
exon_ends = [exon["tx_end_i"] for exon in exon_info]
exon_starts.sort()
exon_ends.sort()
exon_starts.append(exon_ends[-1])
exon_ends.append(float("inf"))
# Find the end pos of the exon where the var locates
left = 0
right = float("inf")
# TODO: #242: implement methods to find tx regions
for i, _ in enumerate(exon_starts):
if (var.posedit.pos.start.base - 1 >= exon_starts[i]
and var.posedit.pos.start.base - 1 < exon_ends[i]):
break
for j, _ in enumerate(exon_starts):
if (var.posedit.pos.end.base - 1 >= exon_starts[j]
and var.posedit.pos.end.base - 1 < exon_ends[j]):
break
if i != j:
raise HGVSUnsupportedOperationError(
"Unsupported normalization of variants spanning the exon-intron boundary ({var})"
.format(var=var))
left = exon_starts[i]
right = exon_ends[i]
if cds_start is None:
pass
elif var.posedit.pos.end.base - 1 < cds_start:
right = min(right, cds_start)
elif var.posedit.pos.start.base - 1 >= cds_start:
left = max(left, cds_start)
else:
raise HGVSUnsupportedOperationError(
"Unsupported normalization of variants spanning the UTR-exon boundary ({var})"
.format(var=var))
if cds_end is None:
pass
elif var.posedit.pos.start.base - 1 >= cds_end:
left = max(left, cds_end)
elif var.posedit.pos.end.base - 1 < cds_end:
right = min(right, cds_end)
else:
raise HGVSUnsupportedOperationError(
"Unsupported normalization of variants spanning the exon-UTR boundary ({var})"
.format(var=var))
return left, right
else:
# For variant type of g and m etc.
return 0, float("inf")
|
def function[_get_boundary, parameter[self, var]]:
constant[Get the position of exon-intron boundary for current variant
]
if <ast.BoolOp object at 0x7da1b20b4250> begin[:]
if name[self].cross_boundaries begin[:]
return[tuple[[<ast.Constant object at 0x7da1b20b4ee0>, <ast.Call object at 0x7da1b20b62f0>]]]
|
keyword[def] identifier[_get_boundary] ( identifier[self] , identifier[var] ):
literal[string]
keyword[if] identifier[var] . identifier[type] == literal[string] keyword[or] identifier[var] . identifier[type] == literal[string] :
keyword[if] identifier[self] . identifier[cross_boundaries] :
keyword[return] literal[int] , identifier[float] ( literal[string] )
keyword[else] :
identifier[map_info] = identifier[self] . identifier[hdp] . identifier[get_tx_mapping_options] ( identifier[var] . identifier[ac] )
keyword[if] keyword[not] identifier[map_info] :
keyword[raise] identifier[HGVSDataNotAvailableError] (
literal[string] . identifier[format] ( identifier[ac] = identifier[var] . identifier[ac] ))
identifier[map_info] =[
identifier[item] keyword[for] identifier[item] keyword[in] identifier[map_info] keyword[if] identifier[item] [ literal[string] ]== identifier[self] . identifier[alt_aln_method]
]
identifier[alt_ac] = identifier[map_info] [ literal[int] ][ literal[string] ]
identifier[tx_info] = identifier[self] . identifier[hdp] . identifier[get_tx_info] ( identifier[var] . identifier[ac] , identifier[alt_ac] , identifier[self] . identifier[alt_aln_method] )
identifier[cds_start] = identifier[tx_info] [ literal[string] ]
identifier[cds_end] = identifier[tx_info] [ literal[string] ]
identifier[exon_info] = identifier[self] . identifier[hdp] . identifier[get_tx_exons] ( identifier[var] . identifier[ac] , identifier[alt_ac] , identifier[self] . identifier[alt_aln_method] )
identifier[exon_starts] =[ identifier[exon] [ literal[string] ] keyword[for] identifier[exon] keyword[in] identifier[exon_info] ]
identifier[exon_ends] =[ identifier[exon] [ literal[string] ] keyword[for] identifier[exon] keyword[in] identifier[exon_info] ]
identifier[exon_starts] . identifier[sort] ()
identifier[exon_ends] . identifier[sort] ()
identifier[exon_starts] . identifier[append] ( identifier[exon_ends] [- literal[int] ])
identifier[exon_ends] . identifier[append] ( identifier[float] ( literal[string] ))
identifier[left] = literal[int]
identifier[right] = identifier[float] ( literal[string] )
keyword[for] identifier[i] , identifier[_] keyword[in] identifier[enumerate] ( identifier[exon_starts] ):
keyword[if] ( identifier[var] . identifier[posedit] . identifier[pos] . identifier[start] . identifier[base] - literal[int] >= identifier[exon_starts] [ identifier[i] ]
keyword[and] identifier[var] . identifier[posedit] . identifier[pos] . identifier[start] . identifier[base] - literal[int] < identifier[exon_ends] [ identifier[i] ]):
keyword[break]
keyword[for] identifier[j] , identifier[_] keyword[in] identifier[enumerate] ( identifier[exon_starts] ):
keyword[if] ( identifier[var] . identifier[posedit] . identifier[pos] . identifier[end] . identifier[base] - literal[int] >= identifier[exon_starts] [ identifier[j] ]
keyword[and] identifier[var] . identifier[posedit] . identifier[pos] . identifier[end] . identifier[base] - literal[int] < identifier[exon_ends] [ identifier[j] ]):
keyword[break]
keyword[if] identifier[i] != identifier[j] :
keyword[raise] identifier[HGVSUnsupportedOperationError] (
literal[string]
. identifier[format] ( identifier[var] = identifier[var] ))
identifier[left] = identifier[exon_starts] [ identifier[i] ]
identifier[right] = identifier[exon_ends] [ identifier[i] ]
keyword[if] identifier[cds_start] keyword[is] keyword[None] :
keyword[pass]
keyword[elif] identifier[var] . identifier[posedit] . identifier[pos] . identifier[end] . identifier[base] - literal[int] < identifier[cds_start] :
identifier[right] = identifier[min] ( identifier[right] , identifier[cds_start] )
keyword[elif] identifier[var] . identifier[posedit] . identifier[pos] . identifier[start] . identifier[base] - literal[int] >= identifier[cds_start] :
identifier[left] = identifier[max] ( identifier[left] , identifier[cds_start] )
keyword[else] :
keyword[raise] identifier[HGVSUnsupportedOperationError] (
literal[string]
. identifier[format] ( identifier[var] = identifier[var] ))
keyword[if] identifier[cds_end] keyword[is] keyword[None] :
keyword[pass]
keyword[elif] identifier[var] . identifier[posedit] . identifier[pos] . identifier[start] . identifier[base] - literal[int] >= identifier[cds_end] :
identifier[left] = identifier[max] ( identifier[left] , identifier[cds_end] )
keyword[elif] identifier[var] . identifier[posedit] . identifier[pos] . identifier[end] . identifier[base] - literal[int] < identifier[cds_end] :
identifier[right] = identifier[min] ( identifier[right] , identifier[cds_end] )
keyword[else] :
keyword[raise] identifier[HGVSUnsupportedOperationError] (
literal[string]
. identifier[format] ( identifier[var] = identifier[var] ))
keyword[return] identifier[left] , identifier[right]
keyword[else] :
keyword[return] literal[int] , identifier[float] ( literal[string] )
|
def _get_boundary(self, var):
"""Get the position of exon-intron boundary for current variant
"""
if var.type == 'r' or var.type == 'n':
if self.cross_boundaries:
return (0, float('inf')) # depends on [control=['if'], data=[]]
else:
# Get genomic sequence access number for this transcript
map_info = self.hdp.get_tx_mapping_options(var.ac)
if not map_info:
raise HGVSDataNotAvailableError('No mapping info available for {ac}'.format(ac=var.ac)) # depends on [control=['if'], data=[]]
map_info = [item for item in map_info if item['alt_aln_method'] == self.alt_aln_method]
alt_ac = map_info[0]['alt_ac']
# Get tx info
tx_info = self.hdp.get_tx_info(var.ac, alt_ac, self.alt_aln_method)
cds_start = tx_info['cds_start_i']
cds_end = tx_info['cds_end_i']
# Get exon info
exon_info = self.hdp.get_tx_exons(var.ac, alt_ac, self.alt_aln_method)
exon_starts = [exon['tx_start_i'] for exon in exon_info]
exon_ends = [exon['tx_end_i'] for exon in exon_info]
exon_starts.sort()
exon_ends.sort()
exon_starts.append(exon_ends[-1])
exon_ends.append(float('inf'))
# Find the end pos of the exon where the var locates
left = 0
right = float('inf')
# TODO: #242: implement methods to find tx regions
for (i, _) in enumerate(exon_starts):
if var.posedit.pos.start.base - 1 >= exon_starts[i] and var.posedit.pos.start.base - 1 < exon_ends[i]:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for (j, _) in enumerate(exon_starts):
if var.posedit.pos.end.base - 1 >= exon_starts[j] and var.posedit.pos.end.base - 1 < exon_ends[j]:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if i != j:
raise HGVSUnsupportedOperationError('Unsupported normalization of variants spanning the exon-intron boundary ({var})'.format(var=var)) # depends on [control=['if'], data=[]]
left = exon_starts[i]
right = exon_ends[i]
if cds_start is None:
pass # depends on [control=['if'], data=[]]
elif var.posedit.pos.end.base - 1 < cds_start:
right = min(right, cds_start) # depends on [control=['if'], data=['cds_start']]
elif var.posedit.pos.start.base - 1 >= cds_start:
left = max(left, cds_start) # depends on [control=['if'], data=['cds_start']]
else:
raise HGVSUnsupportedOperationError('Unsupported normalization of variants spanning the UTR-exon boundary ({var})'.format(var=var))
if cds_end is None:
pass # depends on [control=['if'], data=[]]
elif var.posedit.pos.start.base - 1 >= cds_end:
left = max(left, cds_end) # depends on [control=['if'], data=['cds_end']]
elif var.posedit.pos.end.base - 1 < cds_end:
right = min(right, cds_end) # depends on [control=['if'], data=['cds_end']]
else:
raise HGVSUnsupportedOperationError('Unsupported normalization of variants spanning the exon-UTR boundary ({var})'.format(var=var))
return (left, right) # depends on [control=['if'], data=[]]
else:
# For variant type of g and m etc.
return (0, float('inf'))
|
def save(self):
"""
save or update this entity on Ariane server
:return:
"""
LOGGER.debug("InjectorUITreeEntity.save")
if self.id and self.value and self.type:
ok = True
if InjectorUITreeService.find_ui_tree_entity(self.id) is None: # SAVE
self_string = str(self.injector_ui_tree_menu_entity_2_json(ignore_genealogy=True)).replace("'", '"')
args = {'properties': {'OPERATION': 'REGISTER', 'TREE_MENU_ENTITY': self_string}}
result = InjectorUITreeService.requester.call(args).get()
if result.rc != 0:
err_msg = 'InjectorUITreeEntity.save - Problem while saving injector UI Tree Menu Entity (id:' + \
self.id + '). ' + \
'Reason: ' + str(result.response_content) + '-' + str(result.error_message) + \
" (" + str(result.rc) + ")"
LOGGER.warning(err_msg)
ok = False
else: # UPDATE
self_string = str(self.injector_ui_tree_menu_entity_2_json(ignore_genealogy=True)).replace("'", '"')
args = {'properties': {'OPERATION': 'UPDATE', 'TREE_MENU_ENTITY': self_string}}
result = InjectorUITreeService.requester.call(args).get()
if result.rc != 0:
err_msg = 'InjectorUITreeEntity.save - Problem while saving injector UI Tree Menu Entity (id:' + \
self.id + '). ' + \
'Reason: ' + str(result.response_content) + '-' + str(result.error_message) + \
" (" + str(result.rc) + ")"
LOGGER.warning(err_msg)
ok = False
if ok and self.parent_id is not None:
args = {'properties': {'OPERATION': 'SET_PARENT', 'TREE_MENU_ENTITY_ID': self.id,
'TREE_MENU_ENTITY_PARENT_ID': self.parent_id}}
result = InjectorUITreeService.requester.call(args).get()
if result.rc != 0:
err_msg = 'InjectorUITreeEntity.save - Problem while updating injector UI Tree Menu Entity (id:' + \
self.id + '). ' + \
'Reason: ' + str(result.response_content) + '-' + str(result.error_message) + \
" (" + str(result.rc) + ")"
LOGGER.warning(err_msg)
else:
err_msg = 'InjectorUITreeEntity.save - Problem while saving or updating ' \
'injector UI Tree Menu Entity (id:' + self.id + '). ' + \
'Reason: id and/or value and/or type is/are not defined !'
LOGGER.debug(err_msg)
|
def function[save, parameter[self]]:
constant[
save or update this entity on Ariane server
:return:
]
call[name[LOGGER].debug, parameter[constant[InjectorUITreeEntity.save]]]
if <ast.BoolOp object at 0x7da20c6e40a0> begin[:]
variable[ok] assign[=] constant[True]
if compare[call[name[InjectorUITreeService].find_ui_tree_entity, parameter[name[self].id]] is constant[None]] begin[:]
variable[self_string] assign[=] call[call[name[str], parameter[call[name[self].injector_ui_tree_menu_entity_2_json, parameter[]]]].replace, parameter[constant['], constant["]]]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e7a30>], [<ast.Dict object at 0x7da20c6e6ad0>]]
variable[result] assign[=] call[call[name[InjectorUITreeService].requester.call, parameter[name[args]]].get, parameter[]]
if compare[name[result].rc not_equal[!=] constant[0]] begin[:]
variable[err_msg] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[InjectorUITreeEntity.save - Problem while saving injector UI Tree Menu Entity (id:] + name[self].id] + constant[). ]] + constant[Reason: ]] + call[name[str], parameter[name[result].response_content]]] + constant[-]] + call[name[str], parameter[name[result].error_message]]] + constant[ (]] + call[name[str], parameter[name[result].rc]]] + constant[)]]
call[name[LOGGER].warning, parameter[name[err_msg]]]
variable[ok] assign[=] constant[False]
if <ast.BoolOp object at 0x7da204345090> begin[:]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da204346320>], [<ast.Dict object at 0x7da204347b50>]]
variable[result] assign[=] call[call[name[InjectorUITreeService].requester.call, parameter[name[args]]].get, parameter[]]
if compare[name[result].rc not_equal[!=] constant[0]] begin[:]
variable[err_msg] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[InjectorUITreeEntity.save - Problem while updating injector UI Tree Menu Entity (id:] + name[self].id] + constant[). ]] + constant[Reason: ]] + call[name[str], parameter[name[result].response_content]]] + constant[-]] + call[name[str], parameter[name[result].error_message]]] + constant[ (]] + call[name[str], parameter[name[result].rc]]] + constant[)]]
call[name[LOGGER].warning, parameter[name[err_msg]]]
|
keyword[def] identifier[save] ( identifier[self] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] identifier[self] . identifier[id] keyword[and] identifier[self] . identifier[value] keyword[and] identifier[self] . identifier[type] :
identifier[ok] = keyword[True]
keyword[if] identifier[InjectorUITreeService] . identifier[find_ui_tree_entity] ( identifier[self] . identifier[id] ) keyword[is] keyword[None] :
identifier[self_string] = identifier[str] ( identifier[self] . identifier[injector_ui_tree_menu_entity_2_json] ( identifier[ignore_genealogy] = keyword[True] )). identifier[replace] ( literal[string] , literal[string] )
identifier[args] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : identifier[self_string] }}
identifier[result] = identifier[InjectorUITreeService] . identifier[requester] . identifier[call] ( identifier[args] ). identifier[get] ()
keyword[if] identifier[result] . identifier[rc] != literal[int] :
identifier[err_msg] = literal[string] + identifier[self] . identifier[id] + literal[string] + literal[string] + identifier[str] ( identifier[result] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[result] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[result] . identifier[rc] )+ literal[string]
identifier[LOGGER] . identifier[warning] ( identifier[err_msg] )
identifier[ok] = keyword[False]
keyword[else] :
identifier[self_string] = identifier[str] ( identifier[self] . identifier[injector_ui_tree_menu_entity_2_json] ( identifier[ignore_genealogy] = keyword[True] )). identifier[replace] ( literal[string] , literal[string] )
identifier[args] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : identifier[self_string] }}
identifier[result] = identifier[InjectorUITreeService] . identifier[requester] . identifier[call] ( identifier[args] ). identifier[get] ()
keyword[if] identifier[result] . identifier[rc] != literal[int] :
identifier[err_msg] = literal[string] + identifier[self] . identifier[id] + literal[string] + literal[string] + identifier[str] ( identifier[result] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[result] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[result] . identifier[rc] )+ literal[string]
identifier[LOGGER] . identifier[warning] ( identifier[err_msg] )
identifier[ok] = keyword[False]
keyword[if] identifier[ok] keyword[and] identifier[self] . identifier[parent_id] keyword[is] keyword[not] keyword[None] :
identifier[args] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[id] ,
literal[string] : identifier[self] . identifier[parent_id] }}
identifier[result] = identifier[InjectorUITreeService] . identifier[requester] . identifier[call] ( identifier[args] ). identifier[get] ()
keyword[if] identifier[result] . identifier[rc] != literal[int] :
identifier[err_msg] = literal[string] + identifier[self] . identifier[id] + literal[string] + literal[string] + identifier[str] ( identifier[result] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[result] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[result] . identifier[rc] )+ literal[string]
identifier[LOGGER] . identifier[warning] ( identifier[err_msg] )
keyword[else] :
identifier[err_msg] = literal[string] literal[string] + identifier[self] . identifier[id] + literal[string] + literal[string]
identifier[LOGGER] . identifier[debug] ( identifier[err_msg] )
|
def save(self):
"""
save or update this entity on Ariane server
:return:
"""
LOGGER.debug('InjectorUITreeEntity.save')
if self.id and self.value and self.type:
ok = True
if InjectorUITreeService.find_ui_tree_entity(self.id) is None: # SAVE
self_string = str(self.injector_ui_tree_menu_entity_2_json(ignore_genealogy=True)).replace("'", '"')
args = {'properties': {'OPERATION': 'REGISTER', 'TREE_MENU_ENTITY': self_string}}
result = InjectorUITreeService.requester.call(args).get()
if result.rc != 0:
err_msg = 'InjectorUITreeEntity.save - Problem while saving injector UI Tree Menu Entity (id:' + self.id + '). ' + 'Reason: ' + str(result.response_content) + '-' + str(result.error_message) + ' (' + str(result.rc) + ')'
LOGGER.warning(err_msg)
ok = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else: # UPDATE
self_string = str(self.injector_ui_tree_menu_entity_2_json(ignore_genealogy=True)).replace("'", '"')
args = {'properties': {'OPERATION': 'UPDATE', 'TREE_MENU_ENTITY': self_string}}
result = InjectorUITreeService.requester.call(args).get()
if result.rc != 0:
err_msg = 'InjectorUITreeEntity.save - Problem while saving injector UI Tree Menu Entity (id:' + self.id + '). ' + 'Reason: ' + str(result.response_content) + '-' + str(result.error_message) + ' (' + str(result.rc) + ')'
LOGGER.warning(err_msg)
ok = False # depends on [control=['if'], data=[]]
if ok and self.parent_id is not None:
args = {'properties': {'OPERATION': 'SET_PARENT', 'TREE_MENU_ENTITY_ID': self.id, 'TREE_MENU_ENTITY_PARENT_ID': self.parent_id}}
result = InjectorUITreeService.requester.call(args).get()
if result.rc != 0:
err_msg = 'InjectorUITreeEntity.save - Problem while updating injector UI Tree Menu Entity (id:' + self.id + '). ' + 'Reason: ' + str(result.response_content) + '-' + str(result.error_message) + ' (' + str(result.rc) + ')'
LOGGER.warning(err_msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
err_msg = 'InjectorUITreeEntity.save - Problem while saving or updating injector UI Tree Menu Entity (id:' + self.id + '). ' + 'Reason: id and/or value and/or type is/are not defined !'
LOGGER.debug(err_msg)
|
def _deepcopy(self, x, memo=None):
"""Deepcopy helper for the data dictionary or list.
Regular expressions cannot be deep copied but as they are immutable we
don't have to copy them when cloning.
"""
if not hasattr(x, 'items'):
y, is_list, iterator = [], True, enumerate(x)
else:
y, is_list, iterator = {}, False, iteritems(x)
if memo is None:
memo = {}
val_id = id(x)
if val_id in memo:
return memo.get(val_id)
memo[val_id] = y
for key, value in iterator:
if isinstance(value, (dict, list)) and not isinstance(value, SON):
value = self._deepcopy(value, memo)
elif not isinstance(value, RE_TYPE):
value = copy.deepcopy(value, memo)
if is_list:
y.append(value)
else:
if not isinstance(key, RE_TYPE):
key = copy.deepcopy(key, memo)
y[key] = value
return y
|
def function[_deepcopy, parameter[self, x, memo]]:
constant[Deepcopy helper for the data dictionary or list.
Regular expressions cannot be deep copied but as they are immutable we
don't have to copy them when cloning.
]
if <ast.UnaryOp object at 0x7da20c7c9c00> begin[:]
<ast.Tuple object at 0x7da20c7c9f90> assign[=] tuple[[<ast.List object at 0x7da20c7cadd0>, <ast.Constant object at 0x7da20c7c85b0>, <ast.Call object at 0x7da20c7c8eb0>]]
if compare[name[memo] is constant[None]] begin[:]
variable[memo] assign[=] dictionary[[], []]
variable[val_id] assign[=] call[name[id], parameter[name[x]]]
if compare[name[val_id] in name[memo]] begin[:]
return[call[name[memo].get, parameter[name[val_id]]]]
call[name[memo]][name[val_id]] assign[=] name[y]
for taget[tuple[[<ast.Name object at 0x7da20c7cb1f0>, <ast.Name object at 0x7da20c7cbc70>]]] in starred[name[iterator]] begin[:]
if <ast.BoolOp object at 0x7da20c7c80a0> begin[:]
variable[value] assign[=] call[name[self]._deepcopy, parameter[name[value], name[memo]]]
if name[is_list] begin[:]
call[name[y].append, parameter[name[value]]]
return[name[y]]
|
keyword[def] identifier[_deepcopy] ( identifier[self] , identifier[x] , identifier[memo] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[x] , literal[string] ):
identifier[y] , identifier[is_list] , identifier[iterator] =[], keyword[True] , identifier[enumerate] ( identifier[x] )
keyword[else] :
identifier[y] , identifier[is_list] , identifier[iterator] ={}, keyword[False] , identifier[iteritems] ( identifier[x] )
keyword[if] identifier[memo] keyword[is] keyword[None] :
identifier[memo] ={}
identifier[val_id] = identifier[id] ( identifier[x] )
keyword[if] identifier[val_id] keyword[in] identifier[memo] :
keyword[return] identifier[memo] . identifier[get] ( identifier[val_id] )
identifier[memo] [ identifier[val_id] ]= identifier[y]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iterator] :
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[dict] , identifier[list] )) keyword[and] keyword[not] identifier[isinstance] ( identifier[value] , identifier[SON] ):
identifier[value] = identifier[self] . identifier[_deepcopy] ( identifier[value] , identifier[memo] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[value] , identifier[RE_TYPE] ):
identifier[value] = identifier[copy] . identifier[deepcopy] ( identifier[value] , identifier[memo] )
keyword[if] identifier[is_list] :
identifier[y] . identifier[append] ( identifier[value] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[RE_TYPE] ):
identifier[key] = identifier[copy] . identifier[deepcopy] ( identifier[key] , identifier[memo] )
identifier[y] [ identifier[key] ]= identifier[value]
keyword[return] identifier[y]
|
def _deepcopy(self, x, memo=None):
"""Deepcopy helper for the data dictionary or list.
Regular expressions cannot be deep copied but as they are immutable we
don't have to copy them when cloning.
"""
if not hasattr(x, 'items'):
(y, is_list, iterator) = ([], True, enumerate(x)) # depends on [control=['if'], data=[]]
else:
(y, is_list, iterator) = ({}, False, iteritems(x))
if memo is None:
memo = {} # depends on [control=['if'], data=['memo']]
val_id = id(x)
if val_id in memo:
return memo.get(val_id) # depends on [control=['if'], data=['val_id', 'memo']]
memo[val_id] = y
for (key, value) in iterator:
if isinstance(value, (dict, list)) and (not isinstance(value, SON)):
value = self._deepcopy(value, memo) # depends on [control=['if'], data=[]]
elif not isinstance(value, RE_TYPE):
value = copy.deepcopy(value, memo) # depends on [control=['if'], data=[]]
if is_list:
y.append(value) # depends on [control=['if'], data=[]]
else:
if not isinstance(key, RE_TYPE):
key = copy.deepcopy(key, memo) # depends on [control=['if'], data=[]]
y[key] = value # depends on [control=['for'], data=[]]
return y
|
def scrub_dict(d):
""" Recursively inspect a dictionary and remove all empty values, including
empty strings, lists, and dictionaries.
"""
if type(d) is dict:
return dict(
(k, scrub_dict(v)) for k, v in d.iteritems() if v and scrub_dict(v)
)
elif type(d) is list:
return [
scrub_dict(v) for v in d if v and scrub_dict(v)
]
else:
return d
|
def function[scrub_dict, parameter[d]]:
constant[ Recursively inspect a dictionary and remove all empty values, including
empty strings, lists, and dictionaries.
]
if compare[call[name[type], parameter[name[d]]] is name[dict]] begin[:]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b23b0e80>]]]
|
keyword[def] identifier[scrub_dict] ( identifier[d] ):
literal[string]
keyword[if] identifier[type] ( identifier[d] ) keyword[is] identifier[dict] :
keyword[return] identifier[dict] (
( identifier[k] , identifier[scrub_dict] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[iteritems] () keyword[if] identifier[v] keyword[and] identifier[scrub_dict] ( identifier[v] )
)
keyword[elif] identifier[type] ( identifier[d] ) keyword[is] identifier[list] :
keyword[return] [
identifier[scrub_dict] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[d] keyword[if] identifier[v] keyword[and] identifier[scrub_dict] ( identifier[v] )
]
keyword[else] :
keyword[return] identifier[d]
|
def scrub_dict(d):
""" Recursively inspect a dictionary and remove all empty values, including
empty strings, lists, and dictionaries.
"""
if type(d) is dict:
return dict(((k, scrub_dict(v)) for (k, v) in d.iteritems() if v and scrub_dict(v))) # depends on [control=['if'], data=['dict']]
elif type(d) is list:
return [scrub_dict(v) for v in d if v and scrub_dict(v)] # depends on [control=['if'], data=[]]
else:
return d
|
def _invoke_hook(hook_name, target):
"""
Generic hook invocation.
"""
try:
for value in getattr(target, hook_name):
func, args, kwargs = value
func(target, *args, **kwargs)
except AttributeError:
# no hook defined
pass
except (TypeError, ValueError):
# hook not properly defined (might be a mock)
pass
|
def function[_invoke_hook, parameter[hook_name, target]]:
constant[
Generic hook invocation.
]
<ast.Try object at 0x7da1b0c92350>
|
keyword[def] identifier[_invoke_hook] ( identifier[hook_name] , identifier[target] ):
literal[string]
keyword[try] :
keyword[for] identifier[value] keyword[in] identifier[getattr] ( identifier[target] , identifier[hook_name] ):
identifier[func] , identifier[args] , identifier[kwargs] = identifier[value]
identifier[func] ( identifier[target] ,* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[pass]
|
def _invoke_hook(hook_name, target):
"""
Generic hook invocation.
"""
try:
for value in getattr(target, hook_name):
(func, args, kwargs) = value
func(target, *args, **kwargs) # depends on [control=['for'], data=['value']] # depends on [control=['try'], data=[]]
except AttributeError:
# no hook defined
pass # depends on [control=['except'], data=[]]
except (TypeError, ValueError):
# hook not properly defined (might be a mock)
pass # depends on [control=['except'], data=[]]
|
def append(self, child):
"""
Append the given child
:class:`Element <hl7apy.core.Element>`
:param child: an instance of an :class:`Element <hl7apy.core.Element>` subclass
"""
if self._can_add_child(child):
if self.element == child.parent:
self._remove_from_traversal_index(child)
self.list.append(child)
try:
self.indexes[child.name].append(child)
except KeyError:
self.indexes[child.name] = [child]
elif self.element == child.traversal_parent:
try:
self.traversal_indexes[child.name].append(child)
except KeyError:
self.traversal_indexes[child.name] = [child]
|
def function[append, parameter[self, child]]:
constant[
Append the given child
:class:`Element <hl7apy.core.Element>`
:param child: an instance of an :class:`Element <hl7apy.core.Element>` subclass
]
if call[name[self]._can_add_child, parameter[name[child]]] begin[:]
if compare[name[self].element equal[==] name[child].parent] begin[:]
call[name[self]._remove_from_traversal_index, parameter[name[child]]]
call[name[self].list.append, parameter[name[child]]]
<ast.Try object at 0x7da1b0ebfa90>
|
keyword[def] identifier[append] ( identifier[self] , identifier[child] ):
literal[string]
keyword[if] identifier[self] . identifier[_can_add_child] ( identifier[child] ):
keyword[if] identifier[self] . identifier[element] == identifier[child] . identifier[parent] :
identifier[self] . identifier[_remove_from_traversal_index] ( identifier[child] )
identifier[self] . identifier[list] . identifier[append] ( identifier[child] )
keyword[try] :
identifier[self] . identifier[indexes] [ identifier[child] . identifier[name] ]. identifier[append] ( identifier[child] )
keyword[except] identifier[KeyError] :
identifier[self] . identifier[indexes] [ identifier[child] . identifier[name] ]=[ identifier[child] ]
keyword[elif] identifier[self] . identifier[element] == identifier[child] . identifier[traversal_parent] :
keyword[try] :
identifier[self] . identifier[traversal_indexes] [ identifier[child] . identifier[name] ]. identifier[append] ( identifier[child] )
keyword[except] identifier[KeyError] :
identifier[self] . identifier[traversal_indexes] [ identifier[child] . identifier[name] ]=[ identifier[child] ]
|
def append(self, child):
"""
Append the given child
:class:`Element <hl7apy.core.Element>`
:param child: an instance of an :class:`Element <hl7apy.core.Element>` subclass
"""
if self._can_add_child(child):
if self.element == child.parent:
self._remove_from_traversal_index(child)
self.list.append(child)
try:
self.indexes[child.name].append(child) # depends on [control=['try'], data=[]]
except KeyError:
self.indexes[child.name] = [child] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif self.element == child.traversal_parent:
try:
self.traversal_indexes[child.name].append(child) # depends on [control=['try'], data=[]]
except KeyError:
self.traversal_indexes[child.name] = [child] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def _get_success(self, stanza):
"""Handle successful response to the roster request.
"""
payload = stanza.get_payload(RosterPayload)
if payload is None:
if "versioning" in self.server_features and self.roster:
logger.debug("Server will send roster delta in pushes")
else:
logger.warning("Bad roster response (no payload)")
self._event_queue.put(RosterNotReceivedEvent(self, stanza))
return
else:
items = list(payload)
for item in items:
item.verify_roster_result(True)
self.roster = Roster(items, payload.version)
self._event_queue.put(RosterReceivedEvent(self, self.roster))
|
def function[_get_success, parameter[self, stanza]]:
constant[Handle successful response to the roster request.
]
variable[payload] assign[=] call[name[stanza].get_payload, parameter[name[RosterPayload]]]
if compare[name[payload] is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da18f00e410> begin[:]
call[name[logger].debug, parameter[constant[Server will send roster delta in pushes]]]
call[name[self]._event_queue.put, parameter[call[name[RosterReceivedEvent], parameter[name[self], name[self].roster]]]]
|
keyword[def] identifier[_get_success] ( identifier[self] , identifier[stanza] ):
literal[string]
identifier[payload] = identifier[stanza] . identifier[get_payload] ( identifier[RosterPayload] )
keyword[if] identifier[payload] keyword[is] keyword[None] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[server_features] keyword[and] identifier[self] . identifier[roster] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[self] . identifier[_event_queue] . identifier[put] ( identifier[RosterNotReceivedEvent] ( identifier[self] , identifier[stanza] ))
keyword[return]
keyword[else] :
identifier[items] = identifier[list] ( identifier[payload] )
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[item] . identifier[verify_roster_result] ( keyword[True] )
identifier[self] . identifier[roster] = identifier[Roster] ( identifier[items] , identifier[payload] . identifier[version] )
identifier[self] . identifier[_event_queue] . identifier[put] ( identifier[RosterReceivedEvent] ( identifier[self] , identifier[self] . identifier[roster] ))
|
def _get_success(self, stanza):
"""Handle successful response to the roster request.
"""
payload = stanza.get_payload(RosterPayload)
if payload is None:
if 'versioning' in self.server_features and self.roster:
logger.debug('Server will send roster delta in pushes') # depends on [control=['if'], data=[]]
else:
logger.warning('Bad roster response (no payload)')
self._event_queue.put(RosterNotReceivedEvent(self, stanza))
return # depends on [control=['if'], data=[]]
else:
items = list(payload)
for item in items:
item.verify_roster_result(True) # depends on [control=['for'], data=['item']]
self.roster = Roster(items, payload.version)
self._event_queue.put(RosterReceivedEvent(self, self.roster))
|
def _file_dict(self, fn_):
'''
Take a path and return the contents of the file as a string
'''
if not os.path.isfile(fn_):
err = 'The referenced file, {0} is not available.'.format(fn_)
sys.stderr.write(err + '\n')
sys.exit(42)
with salt.utils.files.fopen(fn_, 'r') as fp_:
data = fp_.read()
return {fn_: data}
|
def function[_file_dict, parameter[self, fn_]]:
constant[
Take a path and return the contents of the file as a string
]
if <ast.UnaryOp object at 0x7da1b1c47670> begin[:]
variable[err] assign[=] call[constant[The referenced file, {0} is not available.].format, parameter[name[fn_]]]
call[name[sys].stderr.write, parameter[binary_operation[name[err] + constant[
]]]]
call[name[sys].exit, parameter[constant[42]]]
with call[name[salt].utils.files.fopen, parameter[name[fn_], constant[r]]] begin[:]
variable[data] assign[=] call[name[fp_].read, parameter[]]
return[dictionary[[<ast.Name object at 0x7da1b215ce50>], [<ast.Name object at 0x7da1b215fe50>]]]
|
keyword[def] identifier[_file_dict] ( identifier[self] , identifier[fn_] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fn_] ):
identifier[err] = literal[string] . identifier[format] ( identifier[fn_] )
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[err] + literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[fn_] , literal[string] ) keyword[as] identifier[fp_] :
identifier[data] = identifier[fp_] . identifier[read] ()
keyword[return] { identifier[fn_] : identifier[data] }
|
def _file_dict(self, fn_):
"""
Take a path and return the contents of the file as a string
"""
if not os.path.isfile(fn_):
err = 'The referenced file, {0} is not available.'.format(fn_)
sys.stderr.write(err + '\n')
sys.exit(42) # depends on [control=['if'], data=[]]
with salt.utils.files.fopen(fn_, 'r') as fp_:
data = fp_.read() # depends on [control=['with'], data=['fp_']]
return {fn_: data}
|
def delete_states_geo_zone_by_id(cls, states_geo_zone_id, **kwargs):
"""Delete StatesGeoZone
Delete an instance of StatesGeoZone by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_states_geo_zone_by_id(states_geo_zone_id, async=True)
>>> result = thread.get()
:param async bool
:param str states_geo_zone_id: ID of statesGeoZone to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_states_geo_zone_by_id_with_http_info(states_geo_zone_id, **kwargs)
else:
(data) = cls._delete_states_geo_zone_by_id_with_http_info(states_geo_zone_id, **kwargs)
return data
|
def function[delete_states_geo_zone_by_id, parameter[cls, states_geo_zone_id]]:
constant[Delete StatesGeoZone
Delete an instance of StatesGeoZone by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_states_geo_zone_by_id(states_geo_zone_id, async=True)
>>> result = thread.get()
:param async bool
:param str states_geo_zone_id: ID of statesGeoZone to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._delete_states_geo_zone_by_id_with_http_info, parameter[name[states_geo_zone_id]]]]
|
keyword[def] identifier[delete_states_geo_zone_by_id] ( identifier[cls] , identifier[states_geo_zone_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_delete_states_geo_zone_by_id_with_http_info] ( identifier[states_geo_zone_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_delete_states_geo_zone_by_id_with_http_info] ( identifier[states_geo_zone_id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def delete_states_geo_zone_by_id(cls, states_geo_zone_id, **kwargs):
"""Delete StatesGeoZone
Delete an instance of StatesGeoZone by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_states_geo_zone_by_id(states_geo_zone_id, async=True)
>>> result = thread.get()
:param async bool
:param str states_geo_zone_id: ID of statesGeoZone to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_states_geo_zone_by_id_with_http_info(states_geo_zone_id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._delete_states_geo_zone_by_id_with_http_info(states_geo_zone_id, **kwargs)
return data
|
def on_get(resc, req, resp, rid):
""" Find the model by id & serialize it back """
signals.pre_req.send(resc.model)
signals.pre_req_find.send(resc.model)
model = find(resc.model, rid)
props = to_rest_model(model, includes=req.includes)
resp.last_modified = model.updated
resp.serialize(props)
signals.post_req.send(resc.model)
signals.post_req_find.send(resc.model)
|
def function[on_get, parameter[resc, req, resp, rid]]:
constant[ Find the model by id & serialize it back ]
call[name[signals].pre_req.send, parameter[name[resc].model]]
call[name[signals].pre_req_find.send, parameter[name[resc].model]]
variable[model] assign[=] call[name[find], parameter[name[resc].model, name[rid]]]
variable[props] assign[=] call[name[to_rest_model], parameter[name[model]]]
name[resp].last_modified assign[=] name[model].updated
call[name[resp].serialize, parameter[name[props]]]
call[name[signals].post_req.send, parameter[name[resc].model]]
call[name[signals].post_req_find.send, parameter[name[resc].model]]
|
keyword[def] identifier[on_get] ( identifier[resc] , identifier[req] , identifier[resp] , identifier[rid] ):
literal[string]
identifier[signals] . identifier[pre_req] . identifier[send] ( identifier[resc] . identifier[model] )
identifier[signals] . identifier[pre_req_find] . identifier[send] ( identifier[resc] . identifier[model] )
identifier[model] = identifier[find] ( identifier[resc] . identifier[model] , identifier[rid] )
identifier[props] = identifier[to_rest_model] ( identifier[model] , identifier[includes] = identifier[req] . identifier[includes] )
identifier[resp] . identifier[last_modified] = identifier[model] . identifier[updated]
identifier[resp] . identifier[serialize] ( identifier[props] )
identifier[signals] . identifier[post_req] . identifier[send] ( identifier[resc] . identifier[model] )
identifier[signals] . identifier[post_req_find] . identifier[send] ( identifier[resc] . identifier[model] )
|
def on_get(resc, req, resp, rid):
""" Find the model by id & serialize it back """
signals.pre_req.send(resc.model)
signals.pre_req_find.send(resc.model)
model = find(resc.model, rid)
props = to_rest_model(model, includes=req.includes)
resp.last_modified = model.updated
resp.serialize(props)
signals.post_req.send(resc.model)
signals.post_req_find.send(resc.model)
|
def read_txt_file(filepath):
"""read text from `filepath` and remove linebreaks
"""
if sys.version > '3':
with open(filepath,'r',encoding='utf-8') as txt_file:
return txt_file.readlines()
else:
with open(filepath) as txt_file:
return txt_file.readlines()
|
def function[read_txt_file, parameter[filepath]]:
constant[read text from `filepath` and remove linebreaks
]
if compare[name[sys].version greater[>] constant[3]] begin[:]
with call[name[open], parameter[name[filepath], constant[r]]] begin[:]
return[call[name[txt_file].readlines, parameter[]]]
|
keyword[def] identifier[read_txt_file] ( identifier[filepath] ):
literal[string]
keyword[if] identifier[sys] . identifier[version] > literal[string] :
keyword[with] identifier[open] ( identifier[filepath] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[txt_file] :
keyword[return] identifier[txt_file] . identifier[readlines] ()
keyword[else] :
keyword[with] identifier[open] ( identifier[filepath] ) keyword[as] identifier[txt_file] :
keyword[return] identifier[txt_file] . identifier[readlines] ()
|
def read_txt_file(filepath):
"""read text from `filepath` and remove linebreaks
"""
if sys.version > '3':
with open(filepath, 'r', encoding='utf-8') as txt_file:
return txt_file.readlines() # depends on [control=['with'], data=['txt_file']] # depends on [control=['if'], data=[]]
else:
with open(filepath) as txt_file:
return txt_file.readlines() # depends on [control=['with'], data=['txt_file']]
|
def _on_arc(self, speed, radius_mm, distance_mm, brake, block, arc_right):
"""
Drive in a circle with 'radius' for 'distance'
"""
if radius_mm < self.min_circle_radius_mm:
raise ValueError("{}: radius_mm {} is less than min_circle_radius_mm {}" .format(
self, radius_mm, self.min_circle_radius_mm))
# The circle formed at the halfway point between the two wheels is the
# circle that must have a radius of radius_mm
circle_outer_mm = 2 * math.pi * (radius_mm + (self.wheel_distance_mm / 2))
circle_middle_mm = 2 * math.pi * radius_mm
circle_inner_mm = 2 * math.pi * (radius_mm - (self.wheel_distance_mm / 2))
if arc_right:
# The left wheel is making the larger circle and will move at 'speed'
# The right wheel is making a smaller circle so its speed will be a fraction of the left motor's speed
left_speed = speed
right_speed = float(circle_inner_mm/circle_outer_mm) * left_speed
else:
# The right wheel is making the larger circle and will move at 'speed'
# The left wheel is making a smaller circle so its speed will be a fraction of the right motor's speed
right_speed = speed
left_speed = float(circle_inner_mm/circle_outer_mm) * right_speed
log.debug("%s: arc %s, radius %s, distance %s, left-speed %s, right-speed %s, circle_outer_mm %s, circle_middle_mm %s, circle_inner_mm %s" %
(self, "right" if arc_right else "left",
radius_mm, distance_mm, left_speed, right_speed,
circle_outer_mm, circle_middle_mm, circle_inner_mm
)
)
# We know we want the middle circle to be of length distance_mm so
# calculate the percentage of circle_middle_mm we must travel for the
# middle of the robot to travel distance_mm.
circle_middle_percentage = float(distance_mm / circle_middle_mm)
# Now multiple that percentage by circle_outer_mm to calculate how
# many mm the outer wheel should travel.
circle_outer_final_mm = circle_middle_percentage * circle_outer_mm
outer_wheel_rotations = float(circle_outer_final_mm / self.wheel.circumference_mm)
outer_wheel_degrees = outer_wheel_rotations * 360
log.debug("%s: arc %s, circle_middle_percentage %s, circle_outer_final_mm %s, outer_wheel_rotations %s, outer_wheel_degrees %s" %
(self, "right" if arc_right else "left",
circle_middle_percentage, circle_outer_final_mm,
outer_wheel_rotations, outer_wheel_degrees
)
)
MoveTank.on_for_degrees(self, left_speed, right_speed, outer_wheel_degrees, brake, block)
|
def function[_on_arc, parameter[self, speed, radius_mm, distance_mm, brake, block, arc_right]]:
constant[
Drive in a circle with 'radius' for 'distance'
]
if compare[name[radius_mm] less[<] name[self].min_circle_radius_mm] begin[:]
<ast.Raise object at 0x7da1b16e8940>
variable[circle_outer_mm] assign[=] binary_operation[binary_operation[constant[2] * name[math].pi] * binary_operation[name[radius_mm] + binary_operation[name[self].wheel_distance_mm / constant[2]]]]
variable[circle_middle_mm] assign[=] binary_operation[binary_operation[constant[2] * name[math].pi] * name[radius_mm]]
variable[circle_inner_mm] assign[=] binary_operation[binary_operation[constant[2] * name[math].pi] * binary_operation[name[radius_mm] - binary_operation[name[self].wheel_distance_mm / constant[2]]]]
if name[arc_right] begin[:]
variable[left_speed] assign[=] name[speed]
variable[right_speed] assign[=] binary_operation[call[name[float], parameter[binary_operation[name[circle_inner_mm] / name[circle_outer_mm]]]] * name[left_speed]]
call[name[log].debug, parameter[binary_operation[constant[%s: arc %s, radius %s, distance %s, left-speed %s, right-speed %s, circle_outer_mm %s, circle_middle_mm %s, circle_inner_mm %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204567820>, <ast.IfExp object at 0x7da204565510>, <ast.Name object at 0x7da204564970>, <ast.Name object at 0x7da204567bb0>, <ast.Name object at 0x7da204565bd0>, <ast.Name object at 0x7da204565630>, <ast.Name object at 0x7da204565120>, <ast.Name object at 0x7da204564d00>, <ast.Name object at 0x7da2045660b0>]]]]]
variable[circle_middle_percentage] assign[=] call[name[float], parameter[binary_operation[name[distance_mm] / name[circle_middle_mm]]]]
variable[circle_outer_final_mm] assign[=] binary_operation[name[circle_middle_percentage] * name[circle_outer_mm]]
variable[outer_wheel_rotations] assign[=] call[name[float], parameter[binary_operation[name[circle_outer_final_mm] / name[self].wheel.circumference_mm]]]
variable[outer_wheel_degrees] assign[=] binary_operation[name[outer_wheel_rotations] * constant[360]]
call[name[log].debug, parameter[binary_operation[constant[%s: arc %s, circle_middle_percentage %s, circle_outer_final_mm %s, outer_wheel_rotations %s, outer_wheel_degrees %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b16f8910>, <ast.IfExp object at 0x7da1b16fa650>, <ast.Name object at 0x7da1b16f91b0>, <ast.Name object at 0x7da1b16fa9e0>, <ast.Name object at 0x7da1b16fa530>, <ast.Name object at 0x7da1b16f97b0>]]]]]
call[name[MoveTank].on_for_degrees, parameter[name[self], name[left_speed], name[right_speed], name[outer_wheel_degrees], name[brake], name[block]]]
|
keyword[def] identifier[_on_arc] ( identifier[self] , identifier[speed] , identifier[radius_mm] , identifier[distance_mm] , identifier[brake] , identifier[block] , identifier[arc_right] ):
literal[string]
keyword[if] identifier[radius_mm] < identifier[self] . identifier[min_circle_radius_mm] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[self] , identifier[radius_mm] , identifier[self] . identifier[min_circle_radius_mm] ))
identifier[circle_outer_mm] = literal[int] * identifier[math] . identifier[pi] *( identifier[radius_mm] +( identifier[self] . identifier[wheel_distance_mm] / literal[int] ))
identifier[circle_middle_mm] = literal[int] * identifier[math] . identifier[pi] * identifier[radius_mm]
identifier[circle_inner_mm] = literal[int] * identifier[math] . identifier[pi] *( identifier[radius_mm] -( identifier[self] . identifier[wheel_distance_mm] / literal[int] ))
keyword[if] identifier[arc_right] :
identifier[left_speed] = identifier[speed]
identifier[right_speed] = identifier[float] ( identifier[circle_inner_mm] / identifier[circle_outer_mm] )* identifier[left_speed]
keyword[else] :
identifier[right_speed] = identifier[speed]
identifier[left_speed] = identifier[float] ( identifier[circle_inner_mm] / identifier[circle_outer_mm] )* identifier[right_speed]
identifier[log] . identifier[debug] ( literal[string] %
( identifier[self] , literal[string] keyword[if] identifier[arc_right] keyword[else] literal[string] ,
identifier[radius_mm] , identifier[distance_mm] , identifier[left_speed] , identifier[right_speed] ,
identifier[circle_outer_mm] , identifier[circle_middle_mm] , identifier[circle_inner_mm]
)
)
identifier[circle_middle_percentage] = identifier[float] ( identifier[distance_mm] / identifier[circle_middle_mm] )
identifier[circle_outer_final_mm] = identifier[circle_middle_percentage] * identifier[circle_outer_mm]
identifier[outer_wheel_rotations] = identifier[float] ( identifier[circle_outer_final_mm] / identifier[self] . identifier[wheel] . identifier[circumference_mm] )
identifier[outer_wheel_degrees] = identifier[outer_wheel_rotations] * literal[int]
identifier[log] . identifier[debug] ( literal[string] %
( identifier[self] , literal[string] keyword[if] identifier[arc_right] keyword[else] literal[string] ,
identifier[circle_middle_percentage] , identifier[circle_outer_final_mm] ,
identifier[outer_wheel_rotations] , identifier[outer_wheel_degrees]
)
)
identifier[MoveTank] . identifier[on_for_degrees] ( identifier[self] , identifier[left_speed] , identifier[right_speed] , identifier[outer_wheel_degrees] , identifier[brake] , identifier[block] )
|
def _on_arc(self, speed, radius_mm, distance_mm, brake, block, arc_right):
"""
Drive in a circle with 'radius' for 'distance'
"""
if radius_mm < self.min_circle_radius_mm:
raise ValueError('{}: radius_mm {} is less than min_circle_radius_mm {}'.format(self, radius_mm, self.min_circle_radius_mm)) # depends on [control=['if'], data=['radius_mm']]
# The circle formed at the halfway point between the two wheels is the
# circle that must have a radius of radius_mm
circle_outer_mm = 2 * math.pi * (radius_mm + self.wheel_distance_mm / 2)
circle_middle_mm = 2 * math.pi * radius_mm
circle_inner_mm = 2 * math.pi * (radius_mm - self.wheel_distance_mm / 2)
if arc_right:
# The left wheel is making the larger circle and will move at 'speed'
# The right wheel is making a smaller circle so its speed will be a fraction of the left motor's speed
left_speed = speed
right_speed = float(circle_inner_mm / circle_outer_mm) * left_speed # depends on [control=['if'], data=[]]
else:
# The right wheel is making the larger circle and will move at 'speed'
# The left wheel is making a smaller circle so its speed will be a fraction of the right motor's speed
right_speed = speed
left_speed = float(circle_inner_mm / circle_outer_mm) * right_speed
log.debug('%s: arc %s, radius %s, distance %s, left-speed %s, right-speed %s, circle_outer_mm %s, circle_middle_mm %s, circle_inner_mm %s' % (self, 'right' if arc_right else 'left', radius_mm, distance_mm, left_speed, right_speed, circle_outer_mm, circle_middle_mm, circle_inner_mm))
# We know we want the middle circle to be of length distance_mm so
# calculate the percentage of circle_middle_mm we must travel for the
# middle of the robot to travel distance_mm.
circle_middle_percentage = float(distance_mm / circle_middle_mm)
# Now multiple that percentage by circle_outer_mm to calculate how
# many mm the outer wheel should travel.
circle_outer_final_mm = circle_middle_percentage * circle_outer_mm
outer_wheel_rotations = float(circle_outer_final_mm / self.wheel.circumference_mm)
outer_wheel_degrees = outer_wheel_rotations * 360
log.debug('%s: arc %s, circle_middle_percentage %s, circle_outer_final_mm %s, outer_wheel_rotations %s, outer_wheel_degrees %s' % (self, 'right' if arc_right else 'left', circle_middle_percentage, circle_outer_final_mm, outer_wheel_rotations, outer_wheel_degrees))
MoveTank.on_for_degrees(self, left_speed, right_speed, outer_wheel_degrees, brake, block)
|
def _get_args(self, executable, *args):
"""compile all the executable and the arguments, combining with common arguments
to create a full batch of command args"""
args = list(args)
args.insert(0, executable)
if self.username:
args.append("--username={}".format(self.username))
if self.host:
args.append("--host={}".format(self.host))
if self.port:
args.append("--port={}".format(self.port))
args.append(self.dbname)
#args.extend(other_args)
return args
|
def function[_get_args, parameter[self, executable]]:
constant[compile all the executable and the arguments, combining with common arguments
to create a full batch of command args]
variable[args] assign[=] call[name[list], parameter[name[args]]]
call[name[args].insert, parameter[constant[0], name[executable]]]
if name[self].username begin[:]
call[name[args].append, parameter[call[constant[--username={}].format, parameter[name[self].username]]]]
if name[self].host begin[:]
call[name[args].append, parameter[call[constant[--host={}].format, parameter[name[self].host]]]]
if name[self].port begin[:]
call[name[args].append, parameter[call[constant[--port={}].format, parameter[name[self].port]]]]
call[name[args].append, parameter[name[self].dbname]]
return[name[args]]
|
keyword[def] identifier[_get_args] ( identifier[self] , identifier[executable] ,* identifier[args] ):
literal[string]
identifier[args] = identifier[list] ( identifier[args] )
identifier[args] . identifier[insert] ( literal[int] , identifier[executable] )
keyword[if] identifier[self] . identifier[username] :
identifier[args] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[username] ))
keyword[if] identifier[self] . identifier[host] :
identifier[args] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[host] ))
keyword[if] identifier[self] . identifier[port] :
identifier[args] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[port] ))
identifier[args] . identifier[append] ( identifier[self] . identifier[dbname] )
keyword[return] identifier[args]
|
def _get_args(self, executable, *args):
"""compile all the executable and the arguments, combining with common arguments
to create a full batch of command args"""
args = list(args)
args.insert(0, executable)
if self.username:
args.append('--username={}'.format(self.username)) # depends on [control=['if'], data=[]]
if self.host:
args.append('--host={}'.format(self.host)) # depends on [control=['if'], data=[]]
if self.port:
args.append('--port={}'.format(self.port)) # depends on [control=['if'], data=[]]
args.append(self.dbname)
#args.extend(other_args)
return args
|
def rdl_decomposition(T, k=None, reversible=False, norm='standard', mu=None):
r"""Compute the decomposition into left and right eigenvectors.
Parameters
----------
T : (M, M) ndarray
Transition matrix
k : int (optional)
Number of eigenvector/eigenvalue pairs
norm: {'standard', 'reversible', 'auto'}
standard: (L'R) = Id, L[:,0] is a probability distribution,
the stationary distribution mu of T. Right eigenvectors
R have a 2-norm of 1.
reversible: R and L are related via L=L[:,0]*R.
auto: will be reversible if T is reversible, otherwise standard
reversible : bool, optional
Indicate that transition matrix is reversible
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
R : (M, M) ndarray
The normalized (with respect to L) right eigenvectors, such that the
column R[:,i] is the right eigenvector corresponding to the eigenvalue
w[i], dot(T,R[:,i])=w[i]*R[:,i]
D : (M, M) ndarray
A diagonal matrix containing the eigenvalues, each repeated
according to its multiplicity
L : (M, M) ndarray
The normalized (with respect to `R`) left eigenvectors, such that the
row ``L[i, :]`` is the left eigenvector corresponding to the eigenvalue
``w[i]``, ``dot(L[i, :], T)``=``w[i]*L[i, :]``
Notes
-----
If reversible=True the the eigenvalues and eigenvectors of the
similar symmetric matrix `\sqrt(\mu_i / \mu_j) p_{ij}` will be
used to compute the eigenvalues and eigenvectors of T.
The precomputed stationary distribution will only be used if
reversible=True.
"""
# auto-set norm
if norm == 'auto':
if is_reversible(T):
norm = 'reversible'
else:
norm = 'standard'
if reversible:
R, D, L = rdl_decomposition_rev(T, norm=norm, mu=mu)
else:
R, D, L = rdl_decomposition_nrev(T, norm=norm)
if k is None:
return R, D, L
else:
return R[:, 0:k], D[0:k, 0:k], L[0:k, :]
|
def function[rdl_decomposition, parameter[T, k, reversible, norm, mu]]:
constant[Compute the decomposition into left and right eigenvectors.
Parameters
----------
T : (M, M) ndarray
Transition matrix
k : int (optional)
Number of eigenvector/eigenvalue pairs
norm: {'standard', 'reversible', 'auto'}
standard: (L'R) = Id, L[:,0] is a probability distribution,
the stationary distribution mu of T. Right eigenvectors
R have a 2-norm of 1.
reversible: R and L are related via L=L[:,0]*R.
auto: will be reversible if T is reversible, otherwise standard
reversible : bool, optional
Indicate that transition matrix is reversible
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
R : (M, M) ndarray
The normalized (with respect to L) right eigenvectors, such that the
column R[:,i] is the right eigenvector corresponding to the eigenvalue
w[i], dot(T,R[:,i])=w[i]*R[:,i]
D : (M, M) ndarray
A diagonal matrix containing the eigenvalues, each repeated
according to its multiplicity
L : (M, M) ndarray
The normalized (with respect to `R`) left eigenvectors, such that the
row ``L[i, :]`` is the left eigenvector corresponding to the eigenvalue
``w[i]``, ``dot(L[i, :], T)``=``w[i]*L[i, :]``
Notes
-----
If reversible=True the the eigenvalues and eigenvectors of the
similar symmetric matrix `\sqrt(\mu_i / \mu_j) p_{ij}` will be
used to compute the eigenvalues and eigenvectors of T.
The precomputed stationary distribution will only be used if
reversible=True.
]
if compare[name[norm] equal[==] constant[auto]] begin[:]
if call[name[is_reversible], parameter[name[T]]] begin[:]
variable[norm] assign[=] constant[reversible]
if name[reversible] begin[:]
<ast.Tuple object at 0x7da1b25441c0> assign[=] call[name[rdl_decomposition_rev], parameter[name[T]]]
if compare[name[k] is constant[None]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b25454e0>, <ast.Name object at 0x7da1b2545900>, <ast.Name object at 0x7da1b2546020>]]]
|
keyword[def] identifier[rdl_decomposition] ( identifier[T] , identifier[k] = keyword[None] , identifier[reversible] = keyword[False] , identifier[norm] = literal[string] , identifier[mu] = keyword[None] ):
literal[string]
keyword[if] identifier[norm] == literal[string] :
keyword[if] identifier[is_reversible] ( identifier[T] ):
identifier[norm] = literal[string]
keyword[else] :
identifier[norm] = literal[string]
keyword[if] identifier[reversible] :
identifier[R] , identifier[D] , identifier[L] = identifier[rdl_decomposition_rev] ( identifier[T] , identifier[norm] = identifier[norm] , identifier[mu] = identifier[mu] )
keyword[else] :
identifier[R] , identifier[D] , identifier[L] = identifier[rdl_decomposition_nrev] ( identifier[T] , identifier[norm] = identifier[norm] )
keyword[if] identifier[k] keyword[is] keyword[None] :
keyword[return] identifier[R] , identifier[D] , identifier[L]
keyword[else] :
keyword[return] identifier[R] [:, literal[int] : identifier[k] ], identifier[D] [ literal[int] : identifier[k] , literal[int] : identifier[k] ], identifier[L] [ literal[int] : identifier[k] ,:]
|
def rdl_decomposition(T, k=None, reversible=False, norm='standard', mu=None):
"""Compute the decomposition into left and right eigenvectors.
Parameters
----------
T : (M, M) ndarray
Transition matrix
k : int (optional)
Number of eigenvector/eigenvalue pairs
norm: {'standard', 'reversible', 'auto'}
standard: (L'R) = Id, L[:,0] is a probability distribution,
the stationary distribution mu of T. Right eigenvectors
R have a 2-norm of 1.
reversible: R and L are related via L=L[:,0]*R.
auto: will be reversible if T is reversible, otherwise standard
reversible : bool, optional
Indicate that transition matrix is reversible
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
R : (M, M) ndarray
The normalized (with respect to L) right eigenvectors, such that the
column R[:,i] is the right eigenvector corresponding to the eigenvalue
w[i], dot(T,R[:,i])=w[i]*R[:,i]
D : (M, M) ndarray
A diagonal matrix containing the eigenvalues, each repeated
according to its multiplicity
L : (M, M) ndarray
The normalized (with respect to `R`) left eigenvectors, such that the
row ``L[i, :]`` is the left eigenvector corresponding to the eigenvalue
``w[i]``, ``dot(L[i, :], T)``=``w[i]*L[i, :]``
Notes
-----
If reversible=True the the eigenvalues and eigenvectors of the
similar symmetric matrix `\\sqrt(\\mu_i / \\mu_j) p_{ij}` will be
used to compute the eigenvalues and eigenvectors of T.
The precomputed stationary distribution will only be used if
reversible=True.
"""
# auto-set norm
if norm == 'auto':
if is_reversible(T):
norm = 'reversible' # depends on [control=['if'], data=[]]
else:
norm = 'standard' # depends on [control=['if'], data=['norm']]
if reversible:
(R, D, L) = rdl_decomposition_rev(T, norm=norm, mu=mu) # depends on [control=['if'], data=[]]
else:
(R, D, L) = rdl_decomposition_nrev(T, norm=norm)
if k is None:
return (R, D, L) # depends on [control=['if'], data=[]]
else:
return (R[:, 0:k], D[0:k, 0:k], L[0:k, :])
|
def get_members(pkg_name, module_filter = None, member_filter = None):
"""
返回包中所有符合条件的模块成员。
参数:
pkg_name 包名称
module_filter 模块名过滤器 def (module_name)
member_filter 成员过滤器 def member_filter(module_member_object)
"""
modules = get_modules(pkg_name, module_filter)
ret = {}
for m in modules:
members = dict(("{0}.{1}".format(v.__module__, k), v) for k, v in getmembers(m, member_filter))
ret.update(members)
return ret
|
def function[get_members, parameter[pkg_name, module_filter, member_filter]]:
constant[
返回包中所有符合条件的模块成员。
参数:
pkg_name 包名称
module_filter 模块名过滤器 def (module_name)
member_filter 成员过滤器 def member_filter(module_member_object)
]
variable[modules] assign[=] call[name[get_modules], parameter[name[pkg_name], name[module_filter]]]
variable[ret] assign[=] dictionary[[], []]
for taget[name[m]] in starred[name[modules]] begin[:]
variable[members] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20c7952d0>]]
call[name[ret].update, parameter[name[members]]]
return[name[ret]]
|
keyword[def] identifier[get_members] ( identifier[pkg_name] , identifier[module_filter] = keyword[None] , identifier[member_filter] = keyword[None] ):
literal[string]
identifier[modules] = identifier[get_modules] ( identifier[pkg_name] , identifier[module_filter] )
identifier[ret] ={}
keyword[for] identifier[m] keyword[in] identifier[modules] :
identifier[members] = identifier[dict] (( literal[string] . identifier[format] ( identifier[v] . identifier[__module__] , identifier[k] ), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[getmembers] ( identifier[m] , identifier[member_filter] ))
identifier[ret] . identifier[update] ( identifier[members] )
keyword[return] identifier[ret]
|
def get_members(pkg_name, module_filter=None, member_filter=None):
"""
返回包中所有符合条件的模块成员。
参数:
pkg_name 包名称
module_filter 模块名过滤器 def (module_name)
member_filter 成员过滤器 def member_filter(module_member_object)
"""
modules = get_modules(pkg_name, module_filter)
ret = {}
for m in modules:
members = dict((('{0}.{1}'.format(v.__module__, k), v) for (k, v) in getmembers(m, member_filter)))
ret.update(members) # depends on [control=['for'], data=['m']]
return ret
|
def discover(name, timeout=None, minimum_providers=1):
"""
discovers a service. If timeout is specified, waits for at least minimum_providers service instance to be available.
Note : we do not want to make the discovery block undefinitely since we never know for sure if a service is running or not
TODO : improve with future...
:param name: name of the service
:param timeout: maximum number of seconds the discover can wait for a discovery matching requirements. if None, doesn't wait.
:param minimum_providers the number of provider we need to reach before discover() returns if timeout enabled
:return: a Service object, containing the list of providers. if the minimum number cannot be reached, still returns what is available.
"""
start = time.time()
endtime = timeout if timeout else 0
while True:
timed_out = time.time() - start > endtime
if name in services and isinstance(services[name], list):
if len(services[name]) >= minimum_providers or timed_out:
providers = services[name]
if providers:
return Service(name, providers)
else:
return None
if timed_out:
break
# else we keep looping after a short sleep ( to allow time to refresh services list )
time.sleep(0.2) # sleep
return None
|
def function[discover, parameter[name, timeout, minimum_providers]]:
constant[
discovers a service. If timeout is specified, waits for at least minimum_providers service instance to be available.
Note : we do not want to make the discovery block undefinitely since we never know for sure if a service is running or not
TODO : improve with future...
:param name: name of the service
:param timeout: maximum number of seconds the discover can wait for a discovery matching requirements. if None, doesn't wait.
:param minimum_providers the number of provider we need to reach before discover() returns if timeout enabled
:return: a Service object, containing the list of providers. if the minimum number cannot be reached, still returns what is available.
]
variable[start] assign[=] call[name[time].time, parameter[]]
variable[endtime] assign[=] <ast.IfExp object at 0x7da1b2448fa0>
while constant[True] begin[:]
variable[timed_out] assign[=] compare[binary_operation[call[name[time].time, parameter[]] - name[start]] greater[>] name[endtime]]
if <ast.BoolOp object at 0x7da1b244b520> begin[:]
if <ast.BoolOp object at 0x7da1b2448430> begin[:]
variable[providers] assign[=] call[name[services]][name[name]]
if name[providers] begin[:]
return[call[name[Service], parameter[name[name], name[providers]]]]
if name[timed_out] begin[:]
break
call[name[time].sleep, parameter[constant[0.2]]]
return[constant[None]]
|
keyword[def] identifier[discover] ( identifier[name] , identifier[timeout] = keyword[None] , identifier[minimum_providers] = literal[int] ):
literal[string]
identifier[start] = identifier[time] . identifier[time] ()
identifier[endtime] = identifier[timeout] keyword[if] identifier[timeout] keyword[else] literal[int]
keyword[while] keyword[True] :
identifier[timed_out] = identifier[time] . identifier[time] ()- identifier[start] > identifier[endtime]
keyword[if] identifier[name] keyword[in] identifier[services] keyword[and] identifier[isinstance] ( identifier[services] [ identifier[name] ], identifier[list] ):
keyword[if] identifier[len] ( identifier[services] [ identifier[name] ])>= identifier[minimum_providers] keyword[or] identifier[timed_out] :
identifier[providers] = identifier[services] [ identifier[name] ]
keyword[if] identifier[providers] :
keyword[return] identifier[Service] ( identifier[name] , identifier[providers] )
keyword[else] :
keyword[return] keyword[None]
keyword[if] identifier[timed_out] :
keyword[break]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[return] keyword[None]
|
def discover(name, timeout=None, minimum_providers=1):
"""
discovers a service. If timeout is specified, waits for at least minimum_providers service instance to be available.
Note : we do not want to make the discovery block undefinitely since we never know for sure if a service is running or not
TODO : improve with future...
:param name: name of the service
:param timeout: maximum number of seconds the discover can wait for a discovery matching requirements. if None, doesn't wait.
:param minimum_providers the number of provider we need to reach before discover() returns if timeout enabled
:return: a Service object, containing the list of providers. if the minimum number cannot be reached, still returns what is available.
"""
start = time.time()
endtime = timeout if timeout else 0
while True:
timed_out = time.time() - start > endtime
if name in services and isinstance(services[name], list):
if len(services[name]) >= minimum_providers or timed_out:
providers = services[name]
if providers:
return Service(name, providers) # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if timed_out:
break # depends on [control=['if'], data=[]]
# else we keep looping after a short sleep ( to allow time to refresh services list )
time.sleep(0.2) # sleep # depends on [control=['while'], data=[]]
return None
|
def GetHostMemUsedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret)
return counter.value
|
def function[GetHostMemUsedMB, parameter[self]]:
constant[Undocumented.]
variable[counter] assign[=] call[name[c_uint], parameter[]]
variable[ret] assign[=] call[name[vmGuestLib].VMGuestLib_GetHostMemUsedMB, parameter[name[self].handle.value, call[name[byref], parameter[name[counter]]]]]
if compare[name[ret] not_equal[!=] name[VMGUESTLIB_ERROR_SUCCESS]] begin[:]
<ast.Raise object at 0x7da20c7949d0>
return[name[counter].value]
|
keyword[def] identifier[GetHostMemUsedMB] ( identifier[self] ):
literal[string]
identifier[counter] = identifier[c_uint] ()
identifier[ret] = identifier[vmGuestLib] . identifier[VMGuestLib_GetHostMemUsedMB] ( identifier[self] . identifier[handle] . identifier[value] , identifier[byref] ( identifier[counter] ))
keyword[if] identifier[ret] != identifier[VMGUESTLIB_ERROR_SUCCESS] : keyword[raise] identifier[VMGuestLibException] ( identifier[ret] )
keyword[return] identifier[counter] . identifier[value]
|
def GetHostMemUsedMB(self):
"""Undocumented."""
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret) # depends on [control=['if'], data=['ret']]
return counter.value
|
def check_row(state, index, missing_msg=None, expand_msg=None):
"""Zoom in on a particular row in the query result, by index.
After zooming in on a row, which is represented as a single-row query result,
you can use ``has_equal_value()`` to verify whether all columns in the zoomed in solution
query result have a match in the student query result.
Args:
index: index of the row to zoom in on (zero-based indexed).
missing_msg: if specified, this overrides the automatically generated feedback
message in case the row is missing in the student query result.
expand_msg: if specified, this overrides the automatically generated feedback
message that is prepended to feedback messages that are thrown
further in the SCT chain.
:Example:
Suppose we are testing the following SELECT statements
* solution: ``SELECT artist_id as id, name FROM artists LIMIT 5``
* student : ``SELECT artist_id, name FROM artists LIMIT 2``
We can write the following SCTs: ::
# fails, since row 3 at index 2 is not in the student result
Ex().check_row(2)
# passes, since row 2 at index 1 is in the student result
Ex().check_row(0)
"""
if missing_msg is None:
missing_msg = "The system wants to verify row {{index + 1}} of your query result, but couldn't find it. Have another look."
if expand_msg is None:
expand_msg = "Have another look at row {{index + 1}} in your query result. "
msg_kwargs = {"index": index}
# check that query returned something
has_result(state)
stu_res = state.student_result
sol_res = state.solution_result
n_sol = len(next(iter(sol_res.values())))
n_stu = len(next(iter(stu_res.values())))
if index >= n_sol:
raise BaseException(
"There are only {} rows in the solution query result, and you're trying to fetch the row at index {}".format(
n_sol, index
)
)
if index >= n_stu:
_msg = state.build_message(missing_msg, fmt_kwargs=msg_kwargs)
state.do_test(_msg)
return state.to_child(
append_message={"msg": expand_msg, "kwargs": msg_kwargs},
student_result={k: [v[index]] for k, v in stu_res.items()},
solution_result={k: [v[index]] for k, v in sol_res.items()},
)
|
def function[check_row, parameter[state, index, missing_msg, expand_msg]]:
constant[Zoom in on a particular row in the query result, by index.
After zooming in on a row, which is represented as a single-row query result,
you can use ``has_equal_value()`` to verify whether all columns in the zoomed in solution
query result have a match in the student query result.
Args:
index: index of the row to zoom in on (zero-based indexed).
missing_msg: if specified, this overrides the automatically generated feedback
message in case the row is missing in the student query result.
expand_msg: if specified, this overrides the automatically generated feedback
message that is prepended to feedback messages that are thrown
further in the SCT chain.
:Example:
Suppose we are testing the following SELECT statements
* solution: ``SELECT artist_id as id, name FROM artists LIMIT 5``
* student : ``SELECT artist_id, name FROM artists LIMIT 2``
We can write the following SCTs: ::
# fails, since row 3 at index 2 is not in the student result
Ex().check_row(2)
# passes, since row 2 at index 1 is in the student result
Ex().check_row(0)
]
if compare[name[missing_msg] is constant[None]] begin[:]
variable[missing_msg] assign[=] constant[The system wants to verify row {{index + 1}} of your query result, but couldn't find it. Have another look.]
if compare[name[expand_msg] is constant[None]] begin[:]
variable[expand_msg] assign[=] constant[Have another look at row {{index + 1}} in your query result. ]
variable[msg_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b03daf80>], [<ast.Name object at 0x7da1b03db910>]]
call[name[has_result], parameter[name[state]]]
variable[stu_res] assign[=] name[state].student_result
variable[sol_res] assign[=] name[state].solution_result
variable[n_sol] assign[=] call[name[len], parameter[call[name[next], parameter[call[name[iter], parameter[call[name[sol_res].values, parameter[]]]]]]]]
variable[n_stu] assign[=] call[name[len], parameter[call[name[next], parameter[call[name[iter], parameter[call[name[stu_res].values, parameter[]]]]]]]]
if compare[name[index] greater_or_equal[>=] name[n_sol]] begin[:]
<ast.Raise object at 0x7da1b03da590>
if compare[name[index] greater_or_equal[>=] name[n_stu]] begin[:]
variable[_msg] assign[=] call[name[state].build_message, parameter[name[missing_msg]]]
call[name[state].do_test, parameter[name[_msg]]]
return[call[name[state].to_child, parameter[]]]
|
keyword[def] identifier[check_row] ( identifier[state] , identifier[index] , identifier[missing_msg] = keyword[None] , identifier[expand_msg] = keyword[None] ):
literal[string]
keyword[if] identifier[missing_msg] keyword[is] keyword[None] :
identifier[missing_msg] = literal[string]
keyword[if] identifier[expand_msg] keyword[is] keyword[None] :
identifier[expand_msg] = literal[string]
identifier[msg_kwargs] ={ literal[string] : identifier[index] }
identifier[has_result] ( identifier[state] )
identifier[stu_res] = identifier[state] . identifier[student_result]
identifier[sol_res] = identifier[state] . identifier[solution_result]
identifier[n_sol] = identifier[len] ( identifier[next] ( identifier[iter] ( identifier[sol_res] . identifier[values] ())))
identifier[n_stu] = identifier[len] ( identifier[next] ( identifier[iter] ( identifier[stu_res] . identifier[values] ())))
keyword[if] identifier[index] >= identifier[n_sol] :
keyword[raise] identifier[BaseException] (
literal[string] . identifier[format] (
identifier[n_sol] , identifier[index]
)
)
keyword[if] identifier[index] >= identifier[n_stu] :
identifier[_msg] = identifier[state] . identifier[build_message] ( identifier[missing_msg] , identifier[fmt_kwargs] = identifier[msg_kwargs] )
identifier[state] . identifier[do_test] ( identifier[_msg] )
keyword[return] identifier[state] . identifier[to_child] (
identifier[append_message] ={ literal[string] : identifier[expand_msg] , literal[string] : identifier[msg_kwargs] },
identifier[student_result] ={ identifier[k] :[ identifier[v] [ identifier[index] ]] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[stu_res] . identifier[items] ()},
identifier[solution_result] ={ identifier[k] :[ identifier[v] [ identifier[index] ]] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sol_res] . identifier[items] ()},
)
|
def check_row(state, index, missing_msg=None, expand_msg=None):
"""Zoom in on a particular row in the query result, by index.
After zooming in on a row, which is represented as a single-row query result,
you can use ``has_equal_value()`` to verify whether all columns in the zoomed in solution
query result have a match in the student query result.
Args:
index: index of the row to zoom in on (zero-based indexed).
missing_msg: if specified, this overrides the automatically generated feedback
message in case the row is missing in the student query result.
expand_msg: if specified, this overrides the automatically generated feedback
message that is prepended to feedback messages that are thrown
further in the SCT chain.
:Example:
Suppose we are testing the following SELECT statements
* solution: ``SELECT artist_id as id, name FROM artists LIMIT 5``
* student : ``SELECT artist_id, name FROM artists LIMIT 2``
We can write the following SCTs: ::
# fails, since row 3 at index 2 is not in the student result
Ex().check_row(2)
# passes, since row 2 at index 1 is in the student result
Ex().check_row(0)
"""
if missing_msg is None:
missing_msg = "The system wants to verify row {{index + 1}} of your query result, but couldn't find it. Have another look." # depends on [control=['if'], data=['missing_msg']]
if expand_msg is None:
expand_msg = 'Have another look at row {{index + 1}} in your query result. ' # depends on [control=['if'], data=['expand_msg']]
msg_kwargs = {'index': index}
# check that query returned something
has_result(state)
stu_res = state.student_result
sol_res = state.solution_result
n_sol = len(next(iter(sol_res.values())))
n_stu = len(next(iter(stu_res.values())))
if index >= n_sol:
raise BaseException("There are only {} rows in the solution query result, and you're trying to fetch the row at index {}".format(n_sol, index)) # depends on [control=['if'], data=['index', 'n_sol']]
if index >= n_stu:
_msg = state.build_message(missing_msg, fmt_kwargs=msg_kwargs)
state.do_test(_msg) # depends on [control=['if'], data=[]]
return state.to_child(append_message={'msg': expand_msg, 'kwargs': msg_kwargs}, student_result={k: [v[index]] for (k, v) in stu_res.items()}, solution_result={k: [v[index]] for (k, v) in sol_res.items()})
|
def teach_students(self):
"""
Train each model (student) with the labeled data using bootstrap
aggregating (bagging).
"""
dataset = self.dataset
for student in self.students:
bag = self._labeled_uniform_sample(int(dataset.len_labeled()))
while bag.get_num_of_labels() != dataset.get_num_of_labels():
bag = self._labeled_uniform_sample(int(dataset.len_labeled()))
LOGGER.warning('There is student receiving only one label,'
're-sample the bag.')
student.train(bag)
|
def function[teach_students, parameter[self]]:
constant[
Train each model (student) with the labeled data using bootstrap
aggregating (bagging).
]
variable[dataset] assign[=] name[self].dataset
for taget[name[student]] in starred[name[self].students] begin[:]
variable[bag] assign[=] call[name[self]._labeled_uniform_sample, parameter[call[name[int], parameter[call[name[dataset].len_labeled, parameter[]]]]]]
while compare[call[name[bag].get_num_of_labels, parameter[]] not_equal[!=] call[name[dataset].get_num_of_labels, parameter[]]] begin[:]
variable[bag] assign[=] call[name[self]._labeled_uniform_sample, parameter[call[name[int], parameter[call[name[dataset].len_labeled, parameter[]]]]]]
call[name[LOGGER].warning, parameter[constant[There is student receiving only one label,re-sample the bag.]]]
call[name[student].train, parameter[name[bag]]]
|
keyword[def] identifier[teach_students] ( identifier[self] ):
literal[string]
identifier[dataset] = identifier[self] . identifier[dataset]
keyword[for] identifier[student] keyword[in] identifier[self] . identifier[students] :
identifier[bag] = identifier[self] . identifier[_labeled_uniform_sample] ( identifier[int] ( identifier[dataset] . identifier[len_labeled] ()))
keyword[while] identifier[bag] . identifier[get_num_of_labels] ()!= identifier[dataset] . identifier[get_num_of_labels] ():
identifier[bag] = identifier[self] . identifier[_labeled_uniform_sample] ( identifier[int] ( identifier[dataset] . identifier[len_labeled] ()))
identifier[LOGGER] . identifier[warning] ( literal[string]
literal[string] )
identifier[student] . identifier[train] ( identifier[bag] )
|
def teach_students(self):
"""
Train each model (student) with the labeled data using bootstrap
aggregating (bagging).
"""
dataset = self.dataset
for student in self.students:
bag = self._labeled_uniform_sample(int(dataset.len_labeled()))
while bag.get_num_of_labels() != dataset.get_num_of_labels():
bag = self._labeled_uniform_sample(int(dataset.len_labeled()))
LOGGER.warning('There is student receiving only one label,re-sample the bag.') # depends on [control=['while'], data=[]]
student.train(bag) # depends on [control=['for'], data=['student']]
|
def push_repository(self,
repository,
docker_executable='docker',
shutit_pexpect_child=None,
expect=None,
note=None,
loglevel=logging.INFO):
"""Pushes the repository.
@param repository: Repository to push.
@param docker_executable: Defaults to 'docker'
@param expect: See send()
@param shutit_pexpect_child: See send()
@type repository: string
@type docker_executable: string
"""
shutit_global.shutit_global_object.yield_to_draw()
self.handle_note(note)
shutit_pexpect_child = shutit_pexpect_child or self.get_shutit_pexpect_session_from_id('host_child').pexpect_child
expect = expect or self.expect_prompts['ORIGIN_ENV']
send = docker_executable + ' push ' + self.repository['user'] + '/' + repository
timeout = 99999
self.log('Running: ' + send,level=logging.INFO)
self.multisend(docker_executable + ' login',
{'Username':self.repository['user'], 'Password':self.repository['password'], 'Email':self.repository['email']},
shutit_pexpect_child=shutit_pexpect_child,
expect=expect)
self.send(send,
shutit_pexpect_child=shutit_pexpect_child,
expect=expect,
timeout=timeout,
check_exit=False,
fail_on_empty_before=False,
loglevel=loglevel)
self.handle_note_after(note)
return True
|
def function[push_repository, parameter[self, repository, docker_executable, shutit_pexpect_child, expect, note, loglevel]]:
constant[Pushes the repository.
@param repository: Repository to push.
@param docker_executable: Defaults to 'docker'
@param expect: See send()
@param shutit_pexpect_child: See send()
@type repository: string
@type docker_executable: string
]
call[name[shutit_global].shutit_global_object.yield_to_draw, parameter[]]
call[name[self].handle_note, parameter[name[note]]]
variable[shutit_pexpect_child] assign[=] <ast.BoolOp object at 0x7da18f00da50>
variable[expect] assign[=] <ast.BoolOp object at 0x7da18f00f550>
variable[send] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[docker_executable] + constant[ push ]] + call[name[self].repository][constant[user]]] + constant[/]] + name[repository]]
variable[timeout] assign[=] constant[99999]
call[name[self].log, parameter[binary_operation[constant[Running: ] + name[send]]]]
call[name[self].multisend, parameter[binary_operation[name[docker_executable] + constant[ login]], dictionary[[<ast.Constant object at 0x7da18f00ce20>, <ast.Constant object at 0x7da18f00d240>, <ast.Constant object at 0x7da18f00d030>], [<ast.Subscript object at 0x7da18f00d6c0>, <ast.Subscript object at 0x7da18f00c640>, <ast.Subscript object at 0x7da18f00efe0>]]]]
call[name[self].send, parameter[name[send]]]
call[name[self].handle_note_after, parameter[name[note]]]
return[constant[True]]
|
keyword[def] identifier[push_repository] ( identifier[self] ,
identifier[repository] ,
identifier[docker_executable] = literal[string] ,
identifier[shutit_pexpect_child] = keyword[None] ,
identifier[expect] = keyword[None] ,
identifier[note] = keyword[None] ,
identifier[loglevel] = identifier[logging] . identifier[INFO] ):
literal[string]
identifier[shutit_global] . identifier[shutit_global_object] . identifier[yield_to_draw] ()
identifier[self] . identifier[handle_note] ( identifier[note] )
identifier[shutit_pexpect_child] = identifier[shutit_pexpect_child] keyword[or] identifier[self] . identifier[get_shutit_pexpect_session_from_id] ( literal[string] ). identifier[pexpect_child]
identifier[expect] = identifier[expect] keyword[or] identifier[self] . identifier[expect_prompts] [ literal[string] ]
identifier[send] = identifier[docker_executable] + literal[string] + identifier[self] . identifier[repository] [ literal[string] ]+ literal[string] + identifier[repository]
identifier[timeout] = literal[int]
identifier[self] . identifier[log] ( literal[string] + identifier[send] , identifier[level] = identifier[logging] . identifier[INFO] )
identifier[self] . identifier[multisend] ( identifier[docker_executable] + literal[string] ,
{ literal[string] : identifier[self] . identifier[repository] [ literal[string] ], literal[string] : identifier[self] . identifier[repository] [ literal[string] ], literal[string] : identifier[self] . identifier[repository] [ literal[string] ]},
identifier[shutit_pexpect_child] = identifier[shutit_pexpect_child] ,
identifier[expect] = identifier[expect] )
identifier[self] . identifier[send] ( identifier[send] ,
identifier[shutit_pexpect_child] = identifier[shutit_pexpect_child] ,
identifier[expect] = identifier[expect] ,
identifier[timeout] = identifier[timeout] ,
identifier[check_exit] = keyword[False] ,
identifier[fail_on_empty_before] = keyword[False] ,
identifier[loglevel] = identifier[loglevel] )
identifier[self] . identifier[handle_note_after] ( identifier[note] )
keyword[return] keyword[True]
|
def push_repository(self, repository, docker_executable='docker', shutit_pexpect_child=None, expect=None, note=None, loglevel=logging.INFO):
"""Pushes the repository.
@param repository: Repository to push.
@param docker_executable: Defaults to 'docker'
@param expect: See send()
@param shutit_pexpect_child: See send()
@type repository: string
@type docker_executable: string
"""
shutit_global.shutit_global_object.yield_to_draw()
self.handle_note(note)
shutit_pexpect_child = shutit_pexpect_child or self.get_shutit_pexpect_session_from_id('host_child').pexpect_child
expect = expect or self.expect_prompts['ORIGIN_ENV']
send = docker_executable + ' push ' + self.repository['user'] + '/' + repository
timeout = 99999
self.log('Running: ' + send, level=logging.INFO)
self.multisend(docker_executable + ' login', {'Username': self.repository['user'], 'Password': self.repository['password'], 'Email': self.repository['email']}, shutit_pexpect_child=shutit_pexpect_child, expect=expect)
self.send(send, shutit_pexpect_child=shutit_pexpect_child, expect=expect, timeout=timeout, check_exit=False, fail_on_empty_before=False, loglevel=loglevel)
self.handle_note_after(note)
return True
|
def read_data(self,variable_instance):
"""
read values from the device
"""
if self.inst is None:
return
vp_func = variable_instance.variableproperty_set.filter(name=':FUNC').first()
measure_function = ''
if vp_func:
if vp_func.value():
measure_function = ':FUNC "%s";'%vp_func.value()
trig_delay = 0.1
if variable_instance.visavariable.device_property.upper() == 'PRESENT_VALUE':
return self.parse_value(self.inst.query(':FETCH?'))
m = re.search('(PRESENT_VALUE_CH)([0-9]*)', variable_instance.visavariable.device_property.upper())
if m:
return self.parse_value(
self.inst.query(':route:close (@%s);%s:TRIG:DEL %1.3f;:fetch?'%(m.group(2),measure_function,trig_delay)))
return self.parse_value(self.inst.query(variable_instance.visavariable.device_property.upper()))
|
def function[read_data, parameter[self, variable_instance]]:
constant[
read values from the device
]
if compare[name[self].inst is constant[None]] begin[:]
return[None]
variable[vp_func] assign[=] call[call[name[variable_instance].variableproperty_set.filter, parameter[]].first, parameter[]]
variable[measure_function] assign[=] constant[]
if name[vp_func] begin[:]
if call[name[vp_func].value, parameter[]] begin[:]
variable[measure_function] assign[=] binary_operation[constant[:FUNC "%s";] <ast.Mod object at 0x7da2590d6920> call[name[vp_func].value, parameter[]]]
variable[trig_delay] assign[=] constant[0.1]
if compare[call[name[variable_instance].visavariable.device_property.upper, parameter[]] equal[==] constant[PRESENT_VALUE]] begin[:]
return[call[name[self].parse_value, parameter[call[name[self].inst.query, parameter[constant[:FETCH?]]]]]]
variable[m] assign[=] call[name[re].search, parameter[constant[(PRESENT_VALUE_CH)([0-9]*)], call[name[variable_instance].visavariable.device_property.upper, parameter[]]]]
if name[m] begin[:]
return[call[name[self].parse_value, parameter[call[name[self].inst.query, parameter[binary_operation[constant[:route:close (@%s);%s:TRIG:DEL %1.3f;:fetch?] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20c76dc60>, <ast.Name object at 0x7da20c76e440>, <ast.Name object at 0x7da20c76c310>]]]]]]]]
return[call[name[self].parse_value, parameter[call[name[self].inst.query, parameter[call[name[variable_instance].visavariable.device_property.upper, parameter[]]]]]]]
|
keyword[def] identifier[read_data] ( identifier[self] , identifier[variable_instance] ):
literal[string]
keyword[if] identifier[self] . identifier[inst] keyword[is] keyword[None] :
keyword[return]
identifier[vp_func] = identifier[variable_instance] . identifier[variableproperty_set] . identifier[filter] ( identifier[name] = literal[string] ). identifier[first] ()
identifier[measure_function] = literal[string]
keyword[if] identifier[vp_func] :
keyword[if] identifier[vp_func] . identifier[value] ():
identifier[measure_function] = literal[string] % identifier[vp_func] . identifier[value] ()
identifier[trig_delay] = literal[int]
keyword[if] identifier[variable_instance] . identifier[visavariable] . identifier[device_property] . identifier[upper] ()== literal[string] :
keyword[return] identifier[self] . identifier[parse_value] ( identifier[self] . identifier[inst] . identifier[query] ( literal[string] ))
identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[variable_instance] . identifier[visavariable] . identifier[device_property] . identifier[upper] ())
keyword[if] identifier[m] :
keyword[return] identifier[self] . identifier[parse_value] (
identifier[self] . identifier[inst] . identifier[query] ( literal[string] %( identifier[m] . identifier[group] ( literal[int] ), identifier[measure_function] , identifier[trig_delay] )))
keyword[return] identifier[self] . identifier[parse_value] ( identifier[self] . identifier[inst] . identifier[query] ( identifier[variable_instance] . identifier[visavariable] . identifier[device_property] . identifier[upper] ()))
|
def read_data(self, variable_instance):
"""
read values from the device
"""
if self.inst is None:
return # depends on [control=['if'], data=[]]
vp_func = variable_instance.variableproperty_set.filter(name=':FUNC').first()
measure_function = ''
if vp_func:
if vp_func.value():
measure_function = ':FUNC "%s";' % vp_func.value() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
trig_delay = 0.1
if variable_instance.visavariable.device_property.upper() == 'PRESENT_VALUE':
return self.parse_value(self.inst.query(':FETCH?')) # depends on [control=['if'], data=[]]
m = re.search('(PRESENT_VALUE_CH)([0-9]*)', variable_instance.visavariable.device_property.upper())
if m:
return self.parse_value(self.inst.query(':route:close (@%s);%s:TRIG:DEL %1.3f;:fetch?' % (m.group(2), measure_function, trig_delay))) # depends on [control=['if'], data=[]]
return self.parse_value(self.inst.query(variable_instance.visavariable.device_property.upper()))
|
def _get_policies(self, resource_properties):
"""
Returns a list of policies from the resource properties. This method knows how to interpret and handle
polymorphic nature of the policies property.
Policies can be one of the following:
* Managed policy name: string
* List of managed policy names: list of strings
* IAM Policy document: dict containing Statement key
* List of IAM Policy documents: list of IAM Policy Document
* Policy Template: dict with only one key where key is in list of supported policy template names
* List of Policy Templates: list of Policy Template
:param dict resource_properties: Dictionary of resource properties containing the policies property.
It is assumed that this is already a dictionary and contains policies key.
:return list of PolicyEntry: List of policies, where each item is an instance of named tuple `PolicyEntry`
"""
policies = None
if self._contains_policies(resource_properties):
policies = resource_properties[self.POLICIES_PROPERTY_NAME]
if not policies:
# Policies is None or empty
return []
if not isinstance(policies, list):
# Just a single entry. Make it into a list of convenience
policies = [policies]
result = []
for policy in policies:
policy_type = self._get_type(policy)
entry = PolicyEntry(data=policy, type=policy_type)
result.append(entry)
return result
|
def function[_get_policies, parameter[self, resource_properties]]:
constant[
Returns a list of policies from the resource properties. This method knows how to interpret and handle
polymorphic nature of the policies property.
Policies can be one of the following:
* Managed policy name: string
* List of managed policy names: list of strings
* IAM Policy document: dict containing Statement key
* List of IAM Policy documents: list of IAM Policy Document
* Policy Template: dict with only one key where key is in list of supported policy template names
* List of Policy Templates: list of Policy Template
:param dict resource_properties: Dictionary of resource properties containing the policies property.
It is assumed that this is already a dictionary and contains policies key.
:return list of PolicyEntry: List of policies, where each item is an instance of named tuple `PolicyEntry`
]
variable[policies] assign[=] constant[None]
if call[name[self]._contains_policies, parameter[name[resource_properties]]] begin[:]
variable[policies] assign[=] call[name[resource_properties]][name[self].POLICIES_PROPERTY_NAME]
if <ast.UnaryOp object at 0x7da2054a4070> begin[:]
return[list[[]]]
if <ast.UnaryOp object at 0x7da2054a4ca0> begin[:]
variable[policies] assign[=] list[[<ast.Name object at 0x7da2054a62f0>]]
variable[result] assign[=] list[[]]
for taget[name[policy]] in starred[name[policies]] begin[:]
variable[policy_type] assign[=] call[name[self]._get_type, parameter[name[policy]]]
variable[entry] assign[=] call[name[PolicyEntry], parameter[]]
call[name[result].append, parameter[name[entry]]]
return[name[result]]
|
keyword[def] identifier[_get_policies] ( identifier[self] , identifier[resource_properties] ):
literal[string]
identifier[policies] = keyword[None]
keyword[if] identifier[self] . identifier[_contains_policies] ( identifier[resource_properties] ):
identifier[policies] = identifier[resource_properties] [ identifier[self] . identifier[POLICIES_PROPERTY_NAME] ]
keyword[if] keyword[not] identifier[policies] :
keyword[return] []
keyword[if] keyword[not] identifier[isinstance] ( identifier[policies] , identifier[list] ):
identifier[policies] =[ identifier[policies] ]
identifier[result] =[]
keyword[for] identifier[policy] keyword[in] identifier[policies] :
identifier[policy_type] = identifier[self] . identifier[_get_type] ( identifier[policy] )
identifier[entry] = identifier[PolicyEntry] ( identifier[data] = identifier[policy] , identifier[type] = identifier[policy_type] )
identifier[result] . identifier[append] ( identifier[entry] )
keyword[return] identifier[result]
|
def _get_policies(self, resource_properties):
"""
Returns a list of policies from the resource properties. This method knows how to interpret and handle
polymorphic nature of the policies property.
Policies can be one of the following:
* Managed policy name: string
* List of managed policy names: list of strings
* IAM Policy document: dict containing Statement key
* List of IAM Policy documents: list of IAM Policy Document
* Policy Template: dict with only one key where key is in list of supported policy template names
* List of Policy Templates: list of Policy Template
:param dict resource_properties: Dictionary of resource properties containing the policies property.
It is assumed that this is already a dictionary and contains policies key.
:return list of PolicyEntry: List of policies, where each item is an instance of named tuple `PolicyEntry`
"""
policies = None
if self._contains_policies(resource_properties):
policies = resource_properties[self.POLICIES_PROPERTY_NAME] # depends on [control=['if'], data=[]]
if not policies:
# Policies is None or empty
return [] # depends on [control=['if'], data=[]]
if not isinstance(policies, list):
# Just a single entry. Make it into a list of convenience
policies = [policies] # depends on [control=['if'], data=[]]
result = []
for policy in policies:
policy_type = self._get_type(policy)
entry = PolicyEntry(data=policy, type=policy_type)
result.append(entry) # depends on [control=['for'], data=['policy']]
return result
|
def dft_task(cls, mol, xc="b3lyp", **kwargs):
"""
A class method for quickly creating DFT tasks with optional
cosmo parameter .
Args:
mol: Input molecule
xc: Exchange correlation to use.
\\*\\*kwargs: Any of the other kwargs supported by NwTask. Note the
theory is always "dft" for a dft task.
"""
t = NwTask.from_molecule(mol, theory="dft", **kwargs)
t.theory_directives.update({"xc": xc,
"mult": t.spin_multiplicity})
return t
|
def function[dft_task, parameter[cls, mol, xc]]:
constant[
A class method for quickly creating DFT tasks with optional
cosmo parameter .
Args:
mol: Input molecule
xc: Exchange correlation to use.
\*\*kwargs: Any of the other kwargs supported by NwTask. Note the
theory is always "dft" for a dft task.
]
variable[t] assign[=] call[name[NwTask].from_molecule, parameter[name[mol]]]
call[name[t].theory_directives.update, parameter[dictionary[[<ast.Constant object at 0x7da2045649d0>, <ast.Constant object at 0x7da204566080>], [<ast.Name object at 0x7da2045651e0>, <ast.Attribute object at 0x7da204564a90>]]]]
return[name[t]]
|
keyword[def] identifier[dft_task] ( identifier[cls] , identifier[mol] , identifier[xc] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[t] = identifier[NwTask] . identifier[from_molecule] ( identifier[mol] , identifier[theory] = literal[string] ,** identifier[kwargs] )
identifier[t] . identifier[theory_directives] . identifier[update] ({ literal[string] : identifier[xc] ,
literal[string] : identifier[t] . identifier[spin_multiplicity] })
keyword[return] identifier[t]
|
def dft_task(cls, mol, xc='b3lyp', **kwargs):
"""
A class method for quickly creating DFT tasks with optional
cosmo parameter .
Args:
mol: Input molecule
xc: Exchange correlation to use.
\\*\\*kwargs: Any of the other kwargs supported by NwTask. Note the
theory is always "dft" for a dft task.
"""
t = NwTask.from_molecule(mol, theory='dft', **kwargs)
t.theory_directives.update({'xc': xc, 'mult': t.spin_multiplicity})
return t
|
def remove_col(self, col_num):
"""
update table dataframe, and remove a column.
resize grid to display correctly
"""
label_value = self.GetColLabelValue(col_num).strip('**').strip('^^')
self.col_labels.remove(label_value)
del self.table.dataframe[label_value]
result = self.DeleteCols(pos=col_num, numCols=1, updateLabels=True)
self.size_grid()
return result
|
def function[remove_col, parameter[self, col_num]]:
constant[
update table dataframe, and remove a column.
resize grid to display correctly
]
variable[label_value] assign[=] call[call[call[name[self].GetColLabelValue, parameter[name[col_num]]].strip, parameter[constant[**]]].strip, parameter[constant[^^]]]
call[name[self].col_labels.remove, parameter[name[label_value]]]
<ast.Delete object at 0x7da18c4ccee0>
variable[result] assign[=] call[name[self].DeleteCols, parameter[]]
call[name[self].size_grid, parameter[]]
return[name[result]]
|
keyword[def] identifier[remove_col] ( identifier[self] , identifier[col_num] ):
literal[string]
identifier[label_value] = identifier[self] . identifier[GetColLabelValue] ( identifier[col_num] ). identifier[strip] ( literal[string] ). identifier[strip] ( literal[string] )
identifier[self] . identifier[col_labels] . identifier[remove] ( identifier[label_value] )
keyword[del] identifier[self] . identifier[table] . identifier[dataframe] [ identifier[label_value] ]
identifier[result] = identifier[self] . identifier[DeleteCols] ( identifier[pos] = identifier[col_num] , identifier[numCols] = literal[int] , identifier[updateLabels] = keyword[True] )
identifier[self] . identifier[size_grid] ()
keyword[return] identifier[result]
|
def remove_col(self, col_num):
"""
update table dataframe, and remove a column.
resize grid to display correctly
"""
label_value = self.GetColLabelValue(col_num).strip('**').strip('^^')
self.col_labels.remove(label_value)
del self.table.dataframe[label_value]
result = self.DeleteCols(pos=col_num, numCols=1, updateLabels=True)
self.size_grid()
return result
|
def escape(pathname):
"""Escape all special characters.
"""
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be escaped.
drive, pathname = os.path.splitdrive(pathname)
if isinstance(pathname, bytes):
pathname = magic_check_bytes.sub(br'[\1]', pathname)
else:
pathname = magic_check.sub(r'[\1]', pathname)
return drive + pathname
|
def function[escape, parameter[pathname]]:
constant[Escape all special characters.
]
<ast.Tuple object at 0x7da1b1cd6fb0> assign[=] call[name[os].path.splitdrive, parameter[name[pathname]]]
if call[name[isinstance], parameter[name[pathname], name[bytes]]] begin[:]
variable[pathname] assign[=] call[name[magic_check_bytes].sub, parameter[constant[b'[\\1]'], name[pathname]]]
return[binary_operation[name[drive] + name[pathname]]]
|
keyword[def] identifier[escape] ( identifier[pathname] ):
literal[string]
identifier[drive] , identifier[pathname] = identifier[os] . identifier[path] . identifier[splitdrive] ( identifier[pathname] )
keyword[if] identifier[isinstance] ( identifier[pathname] , identifier[bytes] ):
identifier[pathname] = identifier[magic_check_bytes] . identifier[sub] ( literal[string] , identifier[pathname] )
keyword[else] :
identifier[pathname] = identifier[magic_check] . identifier[sub] ( literal[string] , identifier[pathname] )
keyword[return] identifier[drive] + identifier[pathname]
|
def escape(pathname):
"""Escape all special characters.
"""
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be escaped.
(drive, pathname) = os.path.splitdrive(pathname)
if isinstance(pathname, bytes):
pathname = magic_check_bytes.sub(b'[\\1]', pathname) # depends on [control=['if'], data=[]]
else:
pathname = magic_check.sub('[\\1]', pathname)
return drive + pathname
|
def reserve_file(self, relative_path):
"""reserve a XML file for the slice at <relative_path>.xml
- the relative path will be created for you
- not writing anything to that file is an error
"""
if os.path.isabs(relative_path):
raise ValueError('%s must be a relative path' % relative_path)
dest_path = os.path.join(self.root_dir, '%s.xml' % relative_path)
if os.path.exists(dest_path):
raise ValueError('%r must not already exist' % dest_path)
if dest_path in self.expected_xunit_files:
raise ValueError('%r already reserved' % dest_path)
dest_dir = os.path.dirname(dest_path)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
self.expected_xunit_files.append(dest_path)
return dest_path
|
def function[reserve_file, parameter[self, relative_path]]:
constant[reserve a XML file for the slice at <relative_path>.xml
- the relative path will be created for you
- not writing anything to that file is an error
]
if call[name[os].path.isabs, parameter[name[relative_path]]] begin[:]
<ast.Raise object at 0x7da1b0241a80>
variable[dest_path] assign[=] call[name[os].path.join, parameter[name[self].root_dir, binary_operation[constant[%s.xml] <ast.Mod object at 0x7da2590d6920> name[relative_path]]]]
if call[name[os].path.exists, parameter[name[dest_path]]] begin[:]
<ast.Raise object at 0x7da1b0242fe0>
if compare[name[dest_path] in name[self].expected_xunit_files] begin[:]
<ast.Raise object at 0x7da1b016ce20>
variable[dest_dir] assign[=] call[name[os].path.dirname, parameter[name[dest_path]]]
if <ast.UnaryOp object at 0x7da1b016dcc0> begin[:]
call[name[os].makedirs, parameter[name[dest_dir]]]
call[name[self].expected_xunit_files.append, parameter[name[dest_path]]]
return[name[dest_path]]
|
keyword[def] identifier[reserve_file] ( identifier[self] , identifier[relative_path] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isabs] ( identifier[relative_path] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[relative_path] )
identifier[dest_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root_dir] , literal[string] % identifier[relative_path] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[dest_path] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[dest_path] )
keyword[if] identifier[dest_path] keyword[in] identifier[self] . identifier[expected_xunit_files] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[dest_path] )
identifier[dest_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[dest_path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dest_dir] ):
identifier[os] . identifier[makedirs] ( identifier[dest_dir] )
identifier[self] . identifier[expected_xunit_files] . identifier[append] ( identifier[dest_path] )
keyword[return] identifier[dest_path]
|
def reserve_file(self, relative_path):
"""reserve a XML file for the slice at <relative_path>.xml
- the relative path will be created for you
- not writing anything to that file is an error
"""
if os.path.isabs(relative_path):
raise ValueError('%s must be a relative path' % relative_path) # depends on [control=['if'], data=[]]
dest_path = os.path.join(self.root_dir, '%s.xml' % relative_path)
if os.path.exists(dest_path):
raise ValueError('%r must not already exist' % dest_path) # depends on [control=['if'], data=[]]
if dest_path in self.expected_xunit_files:
raise ValueError('%r already reserved' % dest_path) # depends on [control=['if'], data=['dest_path']]
dest_dir = os.path.dirname(dest_path)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir) # depends on [control=['if'], data=[]]
self.expected_xunit_files.append(dest_path)
return dest_path
|
def list_nodes(**kwargs):
'''
Return basic data on nodes
'''
ret = {}
nodes = list_nodes_full()
for node in nodes:
ret[node] = {}
for prop in 'id', 'image', 'size', 'state', 'private_ips', 'public_ips':
ret[node][prop] = nodes[node][prop]
return ret
|
def function[list_nodes, parameter[]]:
constant[
Return basic data on nodes
]
variable[ret] assign[=] dictionary[[], []]
variable[nodes] assign[=] call[name[list_nodes_full], parameter[]]
for taget[name[node]] in starred[name[nodes]] begin[:]
call[name[ret]][name[node]] assign[=] dictionary[[], []]
for taget[name[prop]] in starred[tuple[[<ast.Constant object at 0x7da1b21a9060>, <ast.Constant object at 0x7da1b21a9030>, <ast.Constant object at 0x7da1b21a90c0>, <ast.Constant object at 0x7da1b21a9090>, <ast.Constant object at 0x7da1b21a9210>, <ast.Constant object at 0x7da1b21a91b0>]]] begin[:]
call[call[name[ret]][name[node]]][name[prop]] assign[=] call[call[name[nodes]][name[node]]][name[prop]]
return[name[ret]]
|
keyword[def] identifier[list_nodes] (** identifier[kwargs] ):
literal[string]
identifier[ret] ={}
identifier[nodes] = identifier[list_nodes_full] ()
keyword[for] identifier[node] keyword[in] identifier[nodes] :
identifier[ret] [ identifier[node] ]={}
keyword[for] identifier[prop] keyword[in] literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] :
identifier[ret] [ identifier[node] ][ identifier[prop] ]= identifier[nodes] [ identifier[node] ][ identifier[prop] ]
keyword[return] identifier[ret]
|
def list_nodes(**kwargs):
"""
Return basic data on nodes
"""
ret = {}
nodes = list_nodes_full()
for node in nodes:
ret[node] = {}
for prop in ('id', 'image', 'size', 'state', 'private_ips', 'public_ips'):
ret[node][prop] = nodes[node][prop] # depends on [control=['for'], data=['prop']] # depends on [control=['for'], data=['node']]
return ret
|
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
raise NotImplementedError()
|
def function[rename, parameter[self, dn, new_rdn, new_base_dn]]:
constant[
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
]
<ast.Raise object at 0x7da20c6a90c0>
|
keyword[def] identifier[rename] ( identifier[self] , identifier[dn] : identifier[str] , identifier[new_rdn] : identifier[str] , identifier[new_base_dn] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> keyword[None] :
literal[string]
keyword[raise] identifier[NotImplementedError] ()
|
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str]=None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
raise NotImplementedError()
|
def plot_bit_for_bit(case, var_name, model_data, bench_data, diff_data):
""" Create a bit for bit plot """
plot_title = ""
plot_name = case + "_" + var_name + ".png"
plot_path = os.path.join(os.path.join(livvkit.output_dir, "verification", "imgs"))
functions.mkdir_p(plot_path)
m_ndim = np.ndim(model_data)
b_ndim = np.ndim(bench_data)
if m_ndim != b_ndim:
return "Dataset dimensions didn't match!"
if m_ndim == 3:
model_data = model_data[-1]
bench_data = bench_data[-1]
diff_data = diff_data[-1]
plot_title = "Showing "+var_name+"[-1,:,:]"
elif m_ndim == 4:
model_data = model_data[-1][0]
bench_data = bench_data[-1][0]
diff_data = diff_data[-1][0]
plot_title = "Showing "+var_name+"[-1,0,:,:]"
plt.figure(figsize=(12, 3), dpi=80)
plt.clf()
# Calculate min and max to scale the colorbars
_max = np.amax([np.amax(model_data), np.amax(bench_data)])
_min = np.amin([np.amin(model_data), np.amin(bench_data)])
# Plot the model output
plt.subplot(1, 3, 1)
plt.xlabel("Model Data")
plt.ylabel(var_name)
plt.xticks([])
plt.yticks([])
plt.imshow(model_data, vmin=_min, vmax=_max, interpolation='nearest', cmap=colormaps.viridis)
plt.colorbar()
# Plot the benchmark data
plt.subplot(1, 3, 2)
plt.xlabel("Benchmark Data")
plt.xticks([])
plt.yticks([])
plt.imshow(bench_data, vmin=_min, vmax=_max, interpolation='nearest', cmap=colormaps.viridis)
plt.colorbar()
# Plot the difference
plt.subplot(1, 3, 3)
plt.xlabel("Difference")
plt.xticks([])
plt.yticks([])
plt.imshow(diff_data, interpolation='nearest', cmap=colormaps.viridis)
plt.colorbar()
plt.tight_layout(rect=(0, 0, 0.95, 0.9))
plt.suptitle(plot_title)
plot_file = os.path.sep.join([plot_path, plot_name])
if livvkit.publish:
plt.savefig(os.path.splitext(plot_file)[0]+'.eps', dpi=600)
plt.savefig(plot_file)
plt.close()
return os.path.join(os.path.relpath(plot_path,
os.path.join(livvkit.output_dir, "verification")),
plot_name)
|
def function[plot_bit_for_bit, parameter[case, var_name, model_data, bench_data, diff_data]]:
constant[ Create a bit for bit plot ]
variable[plot_title] assign[=] constant[]
variable[plot_name] assign[=] binary_operation[binary_operation[binary_operation[name[case] + constant[_]] + name[var_name]] + constant[.png]]
variable[plot_path] assign[=] call[name[os].path.join, parameter[call[name[os].path.join, parameter[name[livvkit].output_dir, constant[verification], constant[imgs]]]]]
call[name[functions].mkdir_p, parameter[name[plot_path]]]
variable[m_ndim] assign[=] call[name[np].ndim, parameter[name[model_data]]]
variable[b_ndim] assign[=] call[name[np].ndim, parameter[name[bench_data]]]
if compare[name[m_ndim] not_equal[!=] name[b_ndim]] begin[:]
return[constant[Dataset dimensions didn't match!]]
if compare[name[m_ndim] equal[==] constant[3]] begin[:]
variable[model_data] assign[=] call[name[model_data]][<ast.UnaryOp object at 0x7da1b0a2ece0>]
variable[bench_data] assign[=] call[name[bench_data]][<ast.UnaryOp object at 0x7da1b0a2c3d0>]
variable[diff_data] assign[=] call[name[diff_data]][<ast.UnaryOp object at 0x7da1b0a2c040>]
variable[plot_title] assign[=] binary_operation[binary_operation[constant[Showing ] + name[var_name]] + constant[[-1,:,:]]]
call[name[plt].figure, parameter[]]
call[name[plt].clf, parameter[]]
variable[_max] assign[=] call[name[np].amax, parameter[list[[<ast.Call object at 0x7da1b0a2e380>, <ast.Call object at 0x7da1b0a2d180>]]]]
variable[_min] assign[=] call[name[np].amin, parameter[list[[<ast.Call object at 0x7da1b0a2dcf0>, <ast.Call object at 0x7da1b0a2c8e0>]]]]
call[name[plt].subplot, parameter[constant[1], constant[3], constant[1]]]
call[name[plt].xlabel, parameter[constant[Model Data]]]
call[name[plt].ylabel, parameter[name[var_name]]]
call[name[plt].xticks, parameter[list[[]]]]
call[name[plt].yticks, parameter[list[[]]]]
call[name[plt].imshow, parameter[name[model_data]]]
call[name[plt].colorbar, parameter[]]
call[name[plt].subplot, parameter[constant[1], constant[3], constant[2]]]
call[name[plt].xlabel, parameter[constant[Benchmark Data]]]
call[name[plt].xticks, parameter[list[[]]]]
call[name[plt].yticks, parameter[list[[]]]]
call[name[plt].imshow, parameter[name[bench_data]]]
call[name[plt].colorbar, parameter[]]
call[name[plt].subplot, parameter[constant[1], constant[3], constant[3]]]
call[name[plt].xlabel, parameter[constant[Difference]]]
call[name[plt].xticks, parameter[list[[]]]]
call[name[plt].yticks, parameter[list[[]]]]
call[name[plt].imshow, parameter[name[diff_data]]]
call[name[plt].colorbar, parameter[]]
call[name[plt].tight_layout, parameter[]]
call[name[plt].suptitle, parameter[name[plot_title]]]
variable[plot_file] assign[=] call[name[os].path.sep.join, parameter[list[[<ast.Name object at 0x7da1b0b7db70>, <ast.Name object at 0x7da1b0b7d150>]]]]
if name[livvkit].publish begin[:]
call[name[plt].savefig, parameter[binary_operation[call[call[name[os].path.splitext, parameter[name[plot_file]]]][constant[0]] + constant[.eps]]]]
call[name[plt].savefig, parameter[name[plot_file]]]
call[name[plt].close, parameter[]]
return[call[name[os].path.join, parameter[call[name[os].path.relpath, parameter[name[plot_path], call[name[os].path.join, parameter[name[livvkit].output_dir, constant[verification]]]]], name[plot_name]]]]
|
keyword[def] identifier[plot_bit_for_bit] ( identifier[case] , identifier[var_name] , identifier[model_data] , identifier[bench_data] , identifier[diff_data] ):
literal[string]
identifier[plot_title] = literal[string]
identifier[plot_name] = identifier[case] + literal[string] + identifier[var_name] + literal[string]
identifier[plot_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[join] ( identifier[livvkit] . identifier[output_dir] , literal[string] , literal[string] ))
identifier[functions] . identifier[mkdir_p] ( identifier[plot_path] )
identifier[m_ndim] = identifier[np] . identifier[ndim] ( identifier[model_data] )
identifier[b_ndim] = identifier[np] . identifier[ndim] ( identifier[bench_data] )
keyword[if] identifier[m_ndim] != identifier[b_ndim] :
keyword[return] literal[string]
keyword[if] identifier[m_ndim] == literal[int] :
identifier[model_data] = identifier[model_data] [- literal[int] ]
identifier[bench_data] = identifier[bench_data] [- literal[int] ]
identifier[diff_data] = identifier[diff_data] [- literal[int] ]
identifier[plot_title] = literal[string] + identifier[var_name] + literal[string]
keyword[elif] identifier[m_ndim] == literal[int] :
identifier[model_data] = identifier[model_data] [- literal[int] ][ literal[int] ]
identifier[bench_data] = identifier[bench_data] [- literal[int] ][ literal[int] ]
identifier[diff_data] = identifier[diff_data] [- literal[int] ][ literal[int] ]
identifier[plot_title] = literal[string] + identifier[var_name] + literal[string]
identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ), identifier[dpi] = literal[int] )
identifier[plt] . identifier[clf] ()
identifier[_max] = identifier[np] . identifier[amax] ([ identifier[np] . identifier[amax] ( identifier[model_data] ), identifier[np] . identifier[amax] ( identifier[bench_data] )])
identifier[_min] = identifier[np] . identifier[amin] ([ identifier[np] . identifier[amin] ( identifier[model_data] ), identifier[np] . identifier[amin] ( identifier[bench_data] )])
identifier[plt] . identifier[subplot] ( literal[int] , literal[int] , literal[int] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( identifier[var_name] )
identifier[plt] . identifier[xticks] ([])
identifier[plt] . identifier[yticks] ([])
identifier[plt] . identifier[imshow] ( identifier[model_data] , identifier[vmin] = identifier[_min] , identifier[vmax] = identifier[_max] , identifier[interpolation] = literal[string] , identifier[cmap] = identifier[colormaps] . identifier[viridis] )
identifier[plt] . identifier[colorbar] ()
identifier[plt] . identifier[subplot] ( literal[int] , literal[int] , literal[int] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[xticks] ([])
identifier[plt] . identifier[yticks] ([])
identifier[plt] . identifier[imshow] ( identifier[bench_data] , identifier[vmin] = identifier[_min] , identifier[vmax] = identifier[_max] , identifier[interpolation] = literal[string] , identifier[cmap] = identifier[colormaps] . identifier[viridis] )
identifier[plt] . identifier[colorbar] ()
identifier[plt] . identifier[subplot] ( literal[int] , literal[int] , literal[int] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[xticks] ([])
identifier[plt] . identifier[yticks] ([])
identifier[plt] . identifier[imshow] ( identifier[diff_data] , identifier[interpolation] = literal[string] , identifier[cmap] = identifier[colormaps] . identifier[viridis] )
identifier[plt] . identifier[colorbar] ()
identifier[plt] . identifier[tight_layout] ( identifier[rect] =( literal[int] , literal[int] , literal[int] , literal[int] ))
identifier[plt] . identifier[suptitle] ( identifier[plot_title] )
identifier[plot_file] = identifier[os] . identifier[path] . identifier[sep] . identifier[join] ([ identifier[plot_path] , identifier[plot_name] ])
keyword[if] identifier[livvkit] . identifier[publish] :
identifier[plt] . identifier[savefig] ( identifier[os] . identifier[path] . identifier[splitext] ( identifier[plot_file] )[ literal[int] ]+ literal[string] , identifier[dpi] = literal[int] )
identifier[plt] . identifier[savefig] ( identifier[plot_file] )
identifier[plt] . identifier[close] ()
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[plot_path] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[livvkit] . identifier[output_dir] , literal[string] )),
identifier[plot_name] )
|
def plot_bit_for_bit(case, var_name, model_data, bench_data, diff_data):
""" Create a bit for bit plot """
plot_title = ''
plot_name = case + '_' + var_name + '.png'
plot_path = os.path.join(os.path.join(livvkit.output_dir, 'verification', 'imgs'))
functions.mkdir_p(plot_path)
m_ndim = np.ndim(model_data)
b_ndim = np.ndim(bench_data)
if m_ndim != b_ndim:
return "Dataset dimensions didn't match!" # depends on [control=['if'], data=[]]
if m_ndim == 3:
model_data = model_data[-1]
bench_data = bench_data[-1]
diff_data = diff_data[-1]
plot_title = 'Showing ' + var_name + '[-1,:,:]' # depends on [control=['if'], data=[]]
elif m_ndim == 4:
model_data = model_data[-1][0]
bench_data = bench_data[-1][0]
diff_data = diff_data[-1][0]
plot_title = 'Showing ' + var_name + '[-1,0,:,:]' # depends on [control=['if'], data=[]]
plt.figure(figsize=(12, 3), dpi=80)
plt.clf()
# Calculate min and max to scale the colorbars
_max = np.amax([np.amax(model_data), np.amax(bench_data)])
_min = np.amin([np.amin(model_data), np.amin(bench_data)])
# Plot the model output
plt.subplot(1, 3, 1)
plt.xlabel('Model Data')
plt.ylabel(var_name)
plt.xticks([])
plt.yticks([])
plt.imshow(model_data, vmin=_min, vmax=_max, interpolation='nearest', cmap=colormaps.viridis)
plt.colorbar()
# Plot the benchmark data
plt.subplot(1, 3, 2)
plt.xlabel('Benchmark Data')
plt.xticks([])
plt.yticks([])
plt.imshow(bench_data, vmin=_min, vmax=_max, interpolation='nearest', cmap=colormaps.viridis)
plt.colorbar()
# Plot the difference
plt.subplot(1, 3, 3)
plt.xlabel('Difference')
plt.xticks([])
plt.yticks([])
plt.imshow(diff_data, interpolation='nearest', cmap=colormaps.viridis)
plt.colorbar()
plt.tight_layout(rect=(0, 0, 0.95, 0.9))
plt.suptitle(plot_title)
plot_file = os.path.sep.join([plot_path, plot_name])
if livvkit.publish:
plt.savefig(os.path.splitext(plot_file)[0] + '.eps', dpi=600) # depends on [control=['if'], data=[]]
plt.savefig(plot_file)
plt.close()
return os.path.join(os.path.relpath(plot_path, os.path.join(livvkit.output_dir, 'verification')), plot_name)
|
def _get_api_urls(self, api_urls=None):
"""
Completes a dict with the CRUD urls of the API.
:param api_urls: A dict with the urls {'<FUNCTION>':'<URL>',...}
:return: A dict with the CRUD urls of the base API.
"""
view_name = self.__class__.__name__
api_urls = api_urls or {}
api_urls["read"] = url_for(view_name + ".api_read")
api_urls["delete"] = url_for(view_name + ".api_delete", pk="")
api_urls["create"] = url_for(view_name + ".api_create")
api_urls["update"] = url_for(view_name + ".api_update", pk="")
return api_urls
|
def function[_get_api_urls, parameter[self, api_urls]]:
constant[
Completes a dict with the CRUD urls of the API.
:param api_urls: A dict with the urls {'<FUNCTION>':'<URL>',...}
:return: A dict with the CRUD urls of the base API.
]
variable[view_name] assign[=] name[self].__class__.__name__
variable[api_urls] assign[=] <ast.BoolOp object at 0x7da207f01300>
call[name[api_urls]][constant[read]] assign[=] call[name[url_for], parameter[binary_operation[name[view_name] + constant[.api_read]]]]
call[name[api_urls]][constant[delete]] assign[=] call[name[url_for], parameter[binary_operation[name[view_name] + constant[.api_delete]]]]
call[name[api_urls]][constant[create]] assign[=] call[name[url_for], parameter[binary_operation[name[view_name] + constant[.api_create]]]]
call[name[api_urls]][constant[update]] assign[=] call[name[url_for], parameter[binary_operation[name[view_name] + constant[.api_update]]]]
return[name[api_urls]]
|
keyword[def] identifier[_get_api_urls] ( identifier[self] , identifier[api_urls] = keyword[None] ):
literal[string]
identifier[view_name] = identifier[self] . identifier[__class__] . identifier[__name__]
identifier[api_urls] = identifier[api_urls] keyword[or] {}
identifier[api_urls] [ literal[string] ]= identifier[url_for] ( identifier[view_name] + literal[string] )
identifier[api_urls] [ literal[string] ]= identifier[url_for] ( identifier[view_name] + literal[string] , identifier[pk] = literal[string] )
identifier[api_urls] [ literal[string] ]= identifier[url_for] ( identifier[view_name] + literal[string] )
identifier[api_urls] [ literal[string] ]= identifier[url_for] ( identifier[view_name] + literal[string] , identifier[pk] = literal[string] )
keyword[return] identifier[api_urls]
|
def _get_api_urls(self, api_urls=None):
"""
Completes a dict with the CRUD urls of the API.
:param api_urls: A dict with the urls {'<FUNCTION>':'<URL>',...}
:return: A dict with the CRUD urls of the base API.
"""
view_name = self.__class__.__name__
api_urls = api_urls or {}
api_urls['read'] = url_for(view_name + '.api_read')
api_urls['delete'] = url_for(view_name + '.api_delete', pk='')
api_urls['create'] = url_for(view_name + '.api_create')
api_urls['update'] = url_for(view_name + '.api_update', pk='')
return api_urls
|
def on_step_end(self, step, logs):
""" Update statistics of episode after each step """
episode = logs['episode']
self.observations[episode].append(logs['observation'])
self.rewards[episode].append(logs['reward'])
self.actions[episode].append(logs['action'])
self.metrics[episode].append(logs['metrics'])
self.step += 1
|
def function[on_step_end, parameter[self, step, logs]]:
constant[ Update statistics of episode after each step ]
variable[episode] assign[=] call[name[logs]][constant[episode]]
call[call[name[self].observations][name[episode]].append, parameter[call[name[logs]][constant[observation]]]]
call[call[name[self].rewards][name[episode]].append, parameter[call[name[logs]][constant[reward]]]]
call[call[name[self].actions][name[episode]].append, parameter[call[name[logs]][constant[action]]]]
call[call[name[self].metrics][name[episode]].append, parameter[call[name[logs]][constant[metrics]]]]
<ast.AugAssign object at 0x7da1b1f182b0>
|
keyword[def] identifier[on_step_end] ( identifier[self] , identifier[step] , identifier[logs] ):
literal[string]
identifier[episode] = identifier[logs] [ literal[string] ]
identifier[self] . identifier[observations] [ identifier[episode] ]. identifier[append] ( identifier[logs] [ literal[string] ])
identifier[self] . identifier[rewards] [ identifier[episode] ]. identifier[append] ( identifier[logs] [ literal[string] ])
identifier[self] . identifier[actions] [ identifier[episode] ]. identifier[append] ( identifier[logs] [ literal[string] ])
identifier[self] . identifier[metrics] [ identifier[episode] ]. identifier[append] ( identifier[logs] [ literal[string] ])
identifier[self] . identifier[step] += literal[int]
|
def on_step_end(self, step, logs):
""" Update statistics of episode after each step """
episode = logs['episode']
self.observations[episode].append(logs['observation'])
self.rewards[episode].append(logs['reward'])
self.actions[episode].append(logs['action'])
self.metrics[episode].append(logs['metrics'])
self.step += 1
|
def themeble(name, themes=None, global_context=None):
""" Decorator for registering objects (i.e. functions, classes) for
different themes.
Params:
* name - type of string. New global name for object
* themes - for this themes ``obj`` will be have alias with given name
* global_context - current decorator's global context
Example:
.. code:: python
# my_app.forms.py
@themeble(name='Form', themes=('dark_theme',))
class DarkThemeForm(object):
''' Some kind of logic for dark_theme
'''
name = 'DarkThemeForm'
@themeble(name='Form')
class DefaultForm(object):
''' Default logic for all themes
'''
name = 'Default form'
Now if settings.CURRENT_THEME == 'dark_theme':
.. code:: python
# my_app.views.py
from my_app.forms import Form
assert Form.name == 'DarkThemeForm'
"""
def wrap(obj):
context = global_context or inspect.stack()[1][0].f_globals
if name in context and not getattr(context[name], '__themeble', False):
raise RuntimeError(
'Name {} already exists in this context!'.format(name))
if ((themes and settings.CURRENT_THEME in themes) or
(themes is None and name not in context)):
context[name] = obj
obj.__themeble = True
return obj
return wrap
|
def function[themeble, parameter[name, themes, global_context]]:
constant[ Decorator for registering objects (i.e. functions, classes) for
different themes.
Params:
* name - type of string. New global name for object
* themes - for this themes ``obj`` will be have alias with given name
* global_context - current decorator's global context
Example:
.. code:: python
# my_app.forms.py
@themeble(name='Form', themes=('dark_theme',))
class DarkThemeForm(object):
''' Some kind of logic for dark_theme
'''
name = 'DarkThemeForm'
@themeble(name='Form')
class DefaultForm(object):
''' Default logic for all themes
'''
name = 'Default form'
Now if settings.CURRENT_THEME == 'dark_theme':
.. code:: python
# my_app.views.py
from my_app.forms import Form
assert Form.name == 'DarkThemeForm'
]
def function[wrap, parameter[obj]]:
variable[context] assign[=] <ast.BoolOp object at 0x7da1b13b82e0>
if <ast.BoolOp object at 0x7da1b13b9330> begin[:]
<ast.Raise object at 0x7da1b13b8f70>
if <ast.BoolOp object at 0x7da1b13ba200> begin[:]
call[name[context]][name[name]] assign[=] name[obj]
name[obj].__themeble assign[=] constant[True]
return[name[obj]]
return[name[wrap]]
|
keyword[def] identifier[themeble] ( identifier[name] , identifier[themes] = keyword[None] , identifier[global_context] = keyword[None] ):
literal[string]
keyword[def] identifier[wrap] ( identifier[obj] ):
identifier[context] = identifier[global_context] keyword[or] identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ]. identifier[f_globals]
keyword[if] identifier[name] keyword[in] identifier[context] keyword[and] keyword[not] identifier[getattr] ( identifier[context] [ identifier[name] ], literal[string] , keyword[False] ):
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] ( identifier[name] ))
keyword[if] (( identifier[themes] keyword[and] identifier[settings] . identifier[CURRENT_THEME] keyword[in] identifier[themes] ) keyword[or]
( identifier[themes] keyword[is] keyword[None] keyword[and] identifier[name] keyword[not] keyword[in] identifier[context] )):
identifier[context] [ identifier[name] ]= identifier[obj]
identifier[obj] . identifier[__themeble] = keyword[True]
keyword[return] identifier[obj]
keyword[return] identifier[wrap]
|
def themeble(name, themes=None, global_context=None):
""" Decorator for registering objects (i.e. functions, classes) for
different themes.
Params:
* name - type of string. New global name for object
* themes - for this themes ``obj`` will be have alias with given name
* global_context - current decorator's global context
Example:
.. code:: python
# my_app.forms.py
@themeble(name='Form', themes=('dark_theme',))
class DarkThemeForm(object):
''' Some kind of logic for dark_theme
'''
name = 'DarkThemeForm'
@themeble(name='Form')
class DefaultForm(object):
''' Default logic for all themes
'''
name = 'Default form'
Now if settings.CURRENT_THEME == 'dark_theme':
.. code:: python
# my_app.views.py
from my_app.forms import Form
assert Form.name == 'DarkThemeForm'
"""
def wrap(obj):
context = global_context or inspect.stack()[1][0].f_globals
if name in context and (not getattr(context[name], '__themeble', False)):
raise RuntimeError('Name {} already exists in this context!'.format(name)) # depends on [control=['if'], data=[]]
if themes and settings.CURRENT_THEME in themes or (themes is None and name not in context):
context[name] = obj
obj.__themeble = True # depends on [control=['if'], data=[]]
return obj
return wrap
|
def md2pypi(filename):
'''
Load .md (markdown) file and sanitize it for PyPI.
Remove unsupported github tags:
- code-block directive
- travis ci build badges
'''
content = io.open(filename).read()
for match in RE_MD_CODE_BLOCK.finditer(content):
rst_block = '\n'.join(
['.. code-block:: {language}'.format(**match.groupdict()).replace('markdown', ''), ''] +
[' {0}'.format(l) for l in match.group('lines').split('\n')] +
['']
)
content = content.replace(match.group(0), rst_block)
refs = dict(RE_LINK_REF.findall(content))
content = RE_LINK_REF.sub('.. _\g<key>: \g<url>', content)
content = RE_SELF_LINK.sub('`\g<1>`_', content)
content = RE_LINK_TO_URL.sub('`\g<text> <\g<url>>`_', content)
for match in RE_BADGE.finditer(content):
params = match.groupdict()
params['badge'] = refs[match.group('badge')]
params['target'] = refs[match.group('target')]
content = content.replace(match.group(0), RST_BADGE.format(**params))
for match in RE_IMAGE.finditer(content):
url = match.group('url')
if not url.startswith('http'):
url = '/'.join((GITHUB_REPOSITORY, 'raw/master', url))
rst_block = '\n'.join([
'.. image:: {0}'.format(url),
' :alt: {0}'.format(match.group('text'))
])
content = content.replace(match.group(0), rst_block)
# Must occur after badges
for match in RE_LINK_TO_REF.finditer(content):
content = content.replace(match.group(0), '`{text} <{url}>`_'.format(
text=match.group('text'),
url=refs[match.group('ref')]
))
for match in RE_TITLE.finditer(content):
underchar = RST_TITLE_LEVELS[len(match.group('level')) - 1]
title = match.group('title')
underline = underchar * len(title)
full_title = '\n'.join((title, underline))
content = content.replace(match.group(0), full_title)
content = RE_CODE.sub('``\g<1>``', content)
return content
|
def function[md2pypi, parameter[filename]]:
constant[
Load .md (markdown) file and sanitize it for PyPI.
Remove unsupported github tags:
- code-block directive
- travis ci build badges
]
variable[content] assign[=] call[call[name[io].open, parameter[name[filename]]].read, parameter[]]
for taget[name[match]] in starred[call[name[RE_MD_CODE_BLOCK].finditer, parameter[name[content]]]] begin[:]
variable[rst_block] assign[=] call[constant[
].join, parameter[binary_operation[binary_operation[list[[<ast.Call object at 0x7da18fe93490>, <ast.Constant object at 0x7da18fe90fa0>]] + <ast.ListComp object at 0x7da18fe92ce0>] + list[[<ast.Constant object at 0x7da18fe93f70>]]]]]
variable[content] assign[=] call[name[content].replace, parameter[call[name[match].group, parameter[constant[0]]], name[rst_block]]]
variable[refs] assign[=] call[name[dict], parameter[call[name[RE_LINK_REF].findall, parameter[name[content]]]]]
variable[content] assign[=] call[name[RE_LINK_REF].sub, parameter[constant[.. _\g<key>: \g<url>], name[content]]]
variable[content] assign[=] call[name[RE_SELF_LINK].sub, parameter[constant[`\g<1>`_], name[content]]]
variable[content] assign[=] call[name[RE_LINK_TO_URL].sub, parameter[constant[`\g<text> <\g<url>>`_], name[content]]]
for taget[name[match]] in starred[call[name[RE_BADGE].finditer, parameter[name[content]]]] begin[:]
variable[params] assign[=] call[name[match].groupdict, parameter[]]
call[name[params]][constant[badge]] assign[=] call[name[refs]][call[name[match].group, parameter[constant[badge]]]]
call[name[params]][constant[target]] assign[=] call[name[refs]][call[name[match].group, parameter[constant[target]]]]
variable[content] assign[=] call[name[content].replace, parameter[call[name[match].group, parameter[constant[0]]], call[name[RST_BADGE].format, parameter[]]]]
for taget[name[match]] in starred[call[name[RE_IMAGE].finditer, parameter[name[content]]]] begin[:]
variable[url] assign[=] call[name[match].group, parameter[constant[url]]]
if <ast.UnaryOp object at 0x7da20c794790> begin[:]
variable[url] assign[=] call[constant[/].join, parameter[tuple[[<ast.Name object at 0x7da20c7957e0>, <ast.Constant object at 0x7da20c794d60>, <ast.Name object at 0x7da20c794310>]]]]
variable[rst_block] assign[=] call[constant[
].join, parameter[list[[<ast.Call object at 0x7da1b0a4ead0>, <ast.Call object at 0x7da1b0a4d840>]]]]
variable[content] assign[=] call[name[content].replace, parameter[call[name[match].group, parameter[constant[0]]], name[rst_block]]]
for taget[name[match]] in starred[call[name[RE_LINK_TO_REF].finditer, parameter[name[content]]]] begin[:]
variable[content] assign[=] call[name[content].replace, parameter[call[name[match].group, parameter[constant[0]]], call[constant[`{text} <{url}>`_].format, parameter[]]]]
for taget[name[match]] in starred[call[name[RE_TITLE].finditer, parameter[name[content]]]] begin[:]
variable[underchar] assign[=] call[name[RST_TITLE_LEVELS]][binary_operation[call[name[len], parameter[call[name[match].group, parameter[constant[level]]]]] - constant[1]]]
variable[title] assign[=] call[name[match].group, parameter[constant[title]]]
variable[underline] assign[=] binary_operation[name[underchar] * call[name[len], parameter[name[title]]]]
variable[full_title] assign[=] call[constant[
].join, parameter[tuple[[<ast.Name object at 0x7da1b0a4ef20>, <ast.Name object at 0x7da1b0a4d450>]]]]
variable[content] assign[=] call[name[content].replace, parameter[call[name[match].group, parameter[constant[0]]], name[full_title]]]
variable[content] assign[=] call[name[RE_CODE].sub, parameter[constant[``\g<1>``], name[content]]]
return[name[content]]
|
keyword[def] identifier[md2pypi] ( identifier[filename] ):
literal[string]
identifier[content] = identifier[io] . identifier[open] ( identifier[filename] ). identifier[read] ()
keyword[for] identifier[match] keyword[in] identifier[RE_MD_CODE_BLOCK] . identifier[finditer] ( identifier[content] ):
identifier[rst_block] = literal[string] . identifier[join] (
[ literal[string] . identifier[format] (** identifier[match] . identifier[groupdict] ()). identifier[replace] ( literal[string] , literal[string] ), literal[string] ]+
[ literal[string] . identifier[format] ( identifier[l] ) keyword[for] identifier[l] keyword[in] identifier[match] . identifier[group] ( literal[string] ). identifier[split] ( literal[string] )]+
[ literal[string] ]
)
identifier[content] = identifier[content] . identifier[replace] ( identifier[match] . identifier[group] ( literal[int] ), identifier[rst_block] )
identifier[refs] = identifier[dict] ( identifier[RE_LINK_REF] . identifier[findall] ( identifier[content] ))
identifier[content] = identifier[RE_LINK_REF] . identifier[sub] ( literal[string] , identifier[content] )
identifier[content] = identifier[RE_SELF_LINK] . identifier[sub] ( literal[string] , identifier[content] )
identifier[content] = identifier[RE_LINK_TO_URL] . identifier[sub] ( literal[string] , identifier[content] )
keyword[for] identifier[match] keyword[in] identifier[RE_BADGE] . identifier[finditer] ( identifier[content] ):
identifier[params] = identifier[match] . identifier[groupdict] ()
identifier[params] [ literal[string] ]= identifier[refs] [ identifier[match] . identifier[group] ( literal[string] )]
identifier[params] [ literal[string] ]= identifier[refs] [ identifier[match] . identifier[group] ( literal[string] )]
identifier[content] = identifier[content] . identifier[replace] ( identifier[match] . identifier[group] ( literal[int] ), identifier[RST_BADGE] . identifier[format] (** identifier[params] ))
keyword[for] identifier[match] keyword[in] identifier[RE_IMAGE] . identifier[finditer] ( identifier[content] ):
identifier[url] = identifier[match] . identifier[group] ( literal[string] )
keyword[if] keyword[not] identifier[url] . identifier[startswith] ( literal[string] ):
identifier[url] = literal[string] . identifier[join] (( identifier[GITHUB_REPOSITORY] , literal[string] , identifier[url] ))
identifier[rst_block] = literal[string] . identifier[join] ([
literal[string] . identifier[format] ( identifier[url] ),
literal[string] . identifier[format] ( identifier[match] . identifier[group] ( literal[string] ))
])
identifier[content] = identifier[content] . identifier[replace] ( identifier[match] . identifier[group] ( literal[int] ), identifier[rst_block] )
keyword[for] identifier[match] keyword[in] identifier[RE_LINK_TO_REF] . identifier[finditer] ( identifier[content] ):
identifier[content] = identifier[content] . identifier[replace] ( identifier[match] . identifier[group] ( literal[int] ), literal[string] . identifier[format] (
identifier[text] = identifier[match] . identifier[group] ( literal[string] ),
identifier[url] = identifier[refs] [ identifier[match] . identifier[group] ( literal[string] )]
))
keyword[for] identifier[match] keyword[in] identifier[RE_TITLE] . identifier[finditer] ( identifier[content] ):
identifier[underchar] = identifier[RST_TITLE_LEVELS] [ identifier[len] ( identifier[match] . identifier[group] ( literal[string] ))- literal[int] ]
identifier[title] = identifier[match] . identifier[group] ( literal[string] )
identifier[underline] = identifier[underchar] * identifier[len] ( identifier[title] )
identifier[full_title] = literal[string] . identifier[join] (( identifier[title] , identifier[underline] ))
identifier[content] = identifier[content] . identifier[replace] ( identifier[match] . identifier[group] ( literal[int] ), identifier[full_title] )
identifier[content] = identifier[RE_CODE] . identifier[sub] ( literal[string] , identifier[content] )
keyword[return] identifier[content]
|
def md2pypi(filename):
"""
Load .md (markdown) file and sanitize it for PyPI.
Remove unsupported github tags:
- code-block directive
- travis ci build badges
"""
content = io.open(filename).read()
for match in RE_MD_CODE_BLOCK.finditer(content):
rst_block = '\n'.join(['.. code-block:: {language}'.format(**match.groupdict()).replace('markdown', ''), ''] + [' {0}'.format(l) for l in match.group('lines').split('\n')] + [''])
content = content.replace(match.group(0), rst_block) # depends on [control=['for'], data=['match']]
refs = dict(RE_LINK_REF.findall(content))
content = RE_LINK_REF.sub('.. _\\g<key>: \\g<url>', content)
content = RE_SELF_LINK.sub('`\\g<1>`_', content)
content = RE_LINK_TO_URL.sub('`\\g<text> <\\g<url>>`_', content)
for match in RE_BADGE.finditer(content):
params = match.groupdict()
params['badge'] = refs[match.group('badge')]
params['target'] = refs[match.group('target')]
content = content.replace(match.group(0), RST_BADGE.format(**params)) # depends on [control=['for'], data=['match']]
for match in RE_IMAGE.finditer(content):
url = match.group('url')
if not url.startswith('http'):
url = '/'.join((GITHUB_REPOSITORY, 'raw/master', url)) # depends on [control=['if'], data=[]]
rst_block = '\n'.join(['.. image:: {0}'.format(url), ' :alt: {0}'.format(match.group('text'))])
content = content.replace(match.group(0), rst_block) # depends on [control=['for'], data=['match']]
# Must occur after badges
for match in RE_LINK_TO_REF.finditer(content):
content = content.replace(match.group(0), '`{text} <{url}>`_'.format(text=match.group('text'), url=refs[match.group('ref')])) # depends on [control=['for'], data=['match']]
for match in RE_TITLE.finditer(content):
underchar = RST_TITLE_LEVELS[len(match.group('level')) - 1]
title = match.group('title')
underline = underchar * len(title)
full_title = '\n'.join((title, underline))
content = content.replace(match.group(0), full_title) # depends on [control=['for'], data=['match']]
content = RE_CODE.sub('``\\g<1>``', content)
return content
|
def export(self):
"""
Returns a dictionary with all album information.
Use the :meth:`from_export` method to recreate the
:class:`Album` object.
"""
return {'id' : self.id, 'name' : self.name, 'artist' : self._artist_name, 'artist_id' : self._artist_id, 'cover' : self._cover_url}
|
def function[export, parameter[self]]:
constant[
Returns a dictionary with all album information.
Use the :meth:`from_export` method to recreate the
:class:`Album` object.
]
return[dictionary[[<ast.Constant object at 0x7da1b28be740>, <ast.Constant object at 0x7da1b28be7d0>, <ast.Constant object at 0x7da1b28be950>, <ast.Constant object at 0x7da1b28bf220>, <ast.Constant object at 0x7da1b28bf340>], [<ast.Attribute object at 0x7da1b28bf2b0>, <ast.Attribute object at 0x7da1b28bf280>, <ast.Attribute object at 0x7da1b28bf3a0>, <ast.Attribute object at 0x7da1b28bf3d0>, <ast.Attribute object at 0x7da1b28bde40>]]]
|
keyword[def] identifier[export] ( identifier[self] ):
literal[string]
keyword[return] { literal[string] : identifier[self] . identifier[id] , literal[string] : identifier[self] . identifier[name] , literal[string] : identifier[self] . identifier[_artist_name] , literal[string] : identifier[self] . identifier[_artist_id] , literal[string] : identifier[self] . identifier[_cover_url] }
|
def export(self):
"""
Returns a dictionary with all album information.
Use the :meth:`from_export` method to recreate the
:class:`Album` object.
"""
return {'id': self.id, 'name': self.name, 'artist': self._artist_name, 'artist_id': self._artist_id, 'cover': self._cover_url}
|
def post_task(task_data, task_uri='/tasks'):
"""Create Spinnaker Task.
Args:
task_data (str): Task JSON definition.
Returns:
str: Spinnaker Task ID.
Raises:
AssertionError: Error response from Spinnaker.
"""
url = '{}/{}'.format(API_URL, task_uri.lstrip('/'))
if isinstance(task_data, str):
task_json = task_data
else:
task_json = json.dumps(task_data)
resp = requests.post(url, data=task_json, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
resp_json = resp.json()
LOG.debug(resp_json)
assert resp.ok, 'Spinnaker communication error: {0}'.format(resp.text)
return resp_json['ref']
|
def function[post_task, parameter[task_data, task_uri]]:
constant[Create Spinnaker Task.
Args:
task_data (str): Task JSON definition.
Returns:
str: Spinnaker Task ID.
Raises:
AssertionError: Error response from Spinnaker.
]
variable[url] assign[=] call[constant[{}/{}].format, parameter[name[API_URL], call[name[task_uri].lstrip, parameter[constant[/]]]]]
if call[name[isinstance], parameter[name[task_data], name[str]]] begin[:]
variable[task_json] assign[=] name[task_data]
variable[resp] assign[=] call[name[requests].post, parameter[name[url]]]
variable[resp_json] assign[=] call[name[resp].json, parameter[]]
call[name[LOG].debug, parameter[name[resp_json]]]
assert[name[resp].ok]
return[call[name[resp_json]][constant[ref]]]
|
keyword[def] identifier[post_task] ( identifier[task_data] , identifier[task_uri] = literal[string] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[API_URL] , identifier[task_uri] . identifier[lstrip] ( literal[string] ))
keyword[if] identifier[isinstance] ( identifier[task_data] , identifier[str] ):
identifier[task_json] = identifier[task_data]
keyword[else] :
identifier[task_json] = identifier[json] . identifier[dumps] ( identifier[task_data] )
identifier[resp] = identifier[requests] . identifier[post] ( identifier[url] , identifier[data] = identifier[task_json] , identifier[headers] = identifier[HEADERS] , identifier[verify] = identifier[GATE_CA_BUNDLE] , identifier[cert] = identifier[GATE_CLIENT_CERT] )
identifier[resp_json] = identifier[resp] . identifier[json] ()
identifier[LOG] . identifier[debug] ( identifier[resp_json] )
keyword[assert] identifier[resp] . identifier[ok] , literal[string] . identifier[format] ( identifier[resp] . identifier[text] )
keyword[return] identifier[resp_json] [ literal[string] ]
|
def post_task(task_data, task_uri='/tasks'):
"""Create Spinnaker Task.
Args:
task_data (str): Task JSON definition.
Returns:
str: Spinnaker Task ID.
Raises:
AssertionError: Error response from Spinnaker.
"""
url = '{}/{}'.format(API_URL, task_uri.lstrip('/'))
if isinstance(task_data, str):
task_json = task_data # depends on [control=['if'], data=[]]
else:
task_json = json.dumps(task_data)
resp = requests.post(url, data=task_json, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
resp_json = resp.json()
LOG.debug(resp_json)
assert resp.ok, 'Spinnaker communication error: {0}'.format(resp.text)
return resp_json['ref']
|
def confirm_email(self, confirmation_key):
"""
Confirm an email address by checking a ``confirmation_key``.
A valid ``confirmation_key`` will set the newly wanted e-mail
address as the current e-mail address. Returns the user after
success or ``False`` when the confirmation key is
invalid. Also sends the ``confirmation_complete`` signal.
:param confirmation_key:
String containing the secret SHA1 that is used for verification.
:return:
The verified :class:`User` or ``False`` if not successful.
"""
if SHA1_RE.search(confirmation_key):
try:
userena = self.get(email_confirmation_key=confirmation_key,
email_unconfirmed__isnull=False)
except self.model.DoesNotExist:
return False
else:
user = userena.user
old_email = user.email
user.email = userena.email_unconfirmed
userena.email_unconfirmed, userena.email_confirmation_key = '',''
userena.save(using=self._db)
user.save(using=self._db)
# Send the confirmation_complete signal
userena_signals.confirmation_complete.send(sender=None,
user=user,
old_email=old_email)
return user
return False
|
def function[confirm_email, parameter[self, confirmation_key]]:
constant[
Confirm an email address by checking a ``confirmation_key``.
A valid ``confirmation_key`` will set the newly wanted e-mail
address as the current e-mail address. Returns the user after
success or ``False`` when the confirmation key is
invalid. Also sends the ``confirmation_complete`` signal.
:param confirmation_key:
String containing the secret SHA1 that is used for verification.
:return:
The verified :class:`User` or ``False`` if not successful.
]
if call[name[SHA1_RE].search, parameter[name[confirmation_key]]] begin[:]
<ast.Try object at 0x7da18f721060>
return[constant[False]]
|
keyword[def] identifier[confirm_email] ( identifier[self] , identifier[confirmation_key] ):
literal[string]
keyword[if] identifier[SHA1_RE] . identifier[search] ( identifier[confirmation_key] ):
keyword[try] :
identifier[userena] = identifier[self] . identifier[get] ( identifier[email_confirmation_key] = identifier[confirmation_key] ,
identifier[email_unconfirmed__isnull] = keyword[False] )
keyword[except] identifier[self] . identifier[model] . identifier[DoesNotExist] :
keyword[return] keyword[False]
keyword[else] :
identifier[user] = identifier[userena] . identifier[user]
identifier[old_email] = identifier[user] . identifier[email]
identifier[user] . identifier[email] = identifier[userena] . identifier[email_unconfirmed]
identifier[userena] . identifier[email_unconfirmed] , identifier[userena] . identifier[email_confirmation_key] = literal[string] , literal[string]
identifier[userena] . identifier[save] ( identifier[using] = identifier[self] . identifier[_db] )
identifier[user] . identifier[save] ( identifier[using] = identifier[self] . identifier[_db] )
identifier[userena_signals] . identifier[confirmation_complete] . identifier[send] ( identifier[sender] = keyword[None] ,
identifier[user] = identifier[user] ,
identifier[old_email] = identifier[old_email] )
keyword[return] identifier[user]
keyword[return] keyword[False]
|
def confirm_email(self, confirmation_key):
"""
Confirm an email address by checking a ``confirmation_key``.
A valid ``confirmation_key`` will set the newly wanted e-mail
address as the current e-mail address. Returns the user after
success or ``False`` when the confirmation key is
invalid. Also sends the ``confirmation_complete`` signal.
:param confirmation_key:
String containing the secret SHA1 that is used for verification.
:return:
The verified :class:`User` or ``False`` if not successful.
"""
if SHA1_RE.search(confirmation_key):
try:
userena = self.get(email_confirmation_key=confirmation_key, email_unconfirmed__isnull=False) # depends on [control=['try'], data=[]]
except self.model.DoesNotExist:
return False # depends on [control=['except'], data=[]]
else:
user = userena.user
old_email = user.email
user.email = userena.email_unconfirmed
(userena.email_unconfirmed, userena.email_confirmation_key) = ('', '')
userena.save(using=self._db)
user.save(using=self._db)
# Send the confirmation_complete signal
userena_signals.confirmation_complete.send(sender=None, user=user, old_email=old_email)
return user # depends on [control=['if'], data=[]]
return False
|
def module_path(self, filepath):
"""given a filepath like /base/path/to/module.py this will convert it to
path.to.module so it can be imported"""
possible_modbits = re.split('[\\/]', filepath.strip('\\/'))
basename = possible_modbits[-1]
prefixes = possible_modbits[0:-1]
modpath = []
discarded = []
# find the first directory that has an __init__.py
for i in range(len(prefixes)):
path_args = ["/"]
path_args.extend(prefixes[0:i+1])
path_args.append('__init__.py')
prefix_module = os.path.join(*path_args)
#logger.debug("Checking prefix modulepath: {}".format(prefix_module))
if os.path.isfile(prefix_module):
#logger.debug("Found start of modulepath: {}".format(prefixes[i]))
modpath = prefixes[i:]
break
else:
discarded = path_args[0:-1]
modpath.append(basename)
# convert the remaining file path to a python module path that can be imported
module_name = '.'.join(modpath)
module_name = re.sub(r'(?:\.__init__)?\.py$', '', module_name, flags=re.I)
logger.debug("Module path {} found in filepath {}".format(module_name, filepath))
return module_name
|
def function[module_path, parameter[self, filepath]]:
constant[given a filepath like /base/path/to/module.py this will convert it to
path.to.module so it can be imported]
variable[possible_modbits] assign[=] call[name[re].split, parameter[constant[[\/]], call[name[filepath].strip, parameter[constant[\/]]]]]
variable[basename] assign[=] call[name[possible_modbits]][<ast.UnaryOp object at 0x7da18eb57910>]
variable[prefixes] assign[=] call[name[possible_modbits]][<ast.Slice object at 0x7da18eb57f10>]
variable[modpath] assign[=] list[[]]
variable[discarded] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[prefixes]]]]]] begin[:]
variable[path_args] assign[=] list[[<ast.Constant object at 0x7da18eb56020>]]
call[name[path_args].extend, parameter[call[name[prefixes]][<ast.Slice object at 0x7da18eb54f10>]]]
call[name[path_args].append, parameter[constant[__init__.py]]]
variable[prefix_module] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da18eb553f0>]]
if call[name[os].path.isfile, parameter[name[prefix_module]]] begin[:]
variable[modpath] assign[=] call[name[prefixes]][<ast.Slice object at 0x7da18eb57be0>]
break
call[name[modpath].append, parameter[name[basename]]]
variable[module_name] assign[=] call[constant[.].join, parameter[name[modpath]]]
variable[module_name] assign[=] call[name[re].sub, parameter[constant[(?:\.__init__)?\.py$], constant[], name[module_name]]]
call[name[logger].debug, parameter[call[constant[Module path {} found in filepath {}].format, parameter[name[module_name], name[filepath]]]]]
return[name[module_name]]
|
keyword[def] identifier[module_path] ( identifier[self] , identifier[filepath] ):
literal[string]
identifier[possible_modbits] = identifier[re] . identifier[split] ( literal[string] , identifier[filepath] . identifier[strip] ( literal[string] ))
identifier[basename] = identifier[possible_modbits] [- literal[int] ]
identifier[prefixes] = identifier[possible_modbits] [ literal[int] :- literal[int] ]
identifier[modpath] =[]
identifier[discarded] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[prefixes] )):
identifier[path_args] =[ literal[string] ]
identifier[path_args] . identifier[extend] ( identifier[prefixes] [ literal[int] : identifier[i] + literal[int] ])
identifier[path_args] . identifier[append] ( literal[string] )
identifier[prefix_module] = identifier[os] . identifier[path] . identifier[join] (* identifier[path_args] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[prefix_module] ):
identifier[modpath] = identifier[prefixes] [ identifier[i] :]
keyword[break]
keyword[else] :
identifier[discarded] = identifier[path_args] [ literal[int] :- literal[int] ]
identifier[modpath] . identifier[append] ( identifier[basename] )
identifier[module_name] = literal[string] . identifier[join] ( identifier[modpath] )
identifier[module_name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[module_name] , identifier[flags] = identifier[re] . identifier[I] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[module_name] , identifier[filepath] ))
keyword[return] identifier[module_name]
|
def module_path(self, filepath):
"""given a filepath like /base/path/to/module.py this will convert it to
path.to.module so it can be imported"""
possible_modbits = re.split('[\\/]', filepath.strip('\\/'))
basename = possible_modbits[-1]
prefixes = possible_modbits[0:-1]
modpath = []
discarded = []
# find the first directory that has an __init__.py
for i in range(len(prefixes)):
path_args = ['/']
path_args.extend(prefixes[0:i + 1])
path_args.append('__init__.py')
prefix_module = os.path.join(*path_args)
#logger.debug("Checking prefix modulepath: {}".format(prefix_module))
if os.path.isfile(prefix_module):
#logger.debug("Found start of modulepath: {}".format(prefixes[i]))
modpath = prefixes[i:]
break # depends on [control=['if'], data=[]]
else:
discarded = path_args[0:-1] # depends on [control=['for'], data=['i']]
modpath.append(basename)
# convert the remaining file path to a python module path that can be imported
module_name = '.'.join(modpath)
module_name = re.sub('(?:\\.__init__)?\\.py$', '', module_name, flags=re.I)
logger.debug('Module path {} found in filepath {}'.format(module_name, filepath))
return module_name
|
def delete(method, hmc, uri, uri_parms, logon_required):
"""Operation: Delete <resource>."""
try:
resource = hmc.lookup_by_uri(uri)
except KeyError:
raise InvalidResourceError(method, uri)
resource.manager.remove(resource.oid)
|
def function[delete, parameter[method, hmc, uri, uri_parms, logon_required]]:
constant[Operation: Delete <resource>.]
<ast.Try object at 0x7da1b0592b30>
call[name[resource].manager.remove, parameter[name[resource].oid]]
|
keyword[def] identifier[delete] ( identifier[method] , identifier[hmc] , identifier[uri] , identifier[uri_parms] , identifier[logon_required] ):
literal[string]
keyword[try] :
identifier[resource] = identifier[hmc] . identifier[lookup_by_uri] ( identifier[uri] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[InvalidResourceError] ( identifier[method] , identifier[uri] )
identifier[resource] . identifier[manager] . identifier[remove] ( identifier[resource] . identifier[oid] )
|
def delete(method, hmc, uri, uri_parms, logon_required):
"""Operation: Delete <resource>."""
try:
resource = hmc.lookup_by_uri(uri) # depends on [control=['try'], data=[]]
except KeyError:
raise InvalidResourceError(method, uri) # depends on [control=['except'], data=[]]
resource.manager.remove(resource.oid)
|
def Write(self, grr_message):
"""Write the message into the transaction log."""
grr_message = grr_message.SerializeToString()
try:
with io.open(self.logfile, "wb") as fd:
fd.write(grr_message)
except (IOError, OSError):
# Check if we're missing directories and try to create them.
if not os.path.isdir(os.path.dirname(self.logfile)):
try:
os.makedirs(os.path.dirname(self.logfile))
with io.open(self.logfile, "wb") as fd:
fd.write(grr_message)
except (IOError, OSError):
logging.exception("Couldn't write nanny transaction log to %s",
self.logfile)
|
def function[Write, parameter[self, grr_message]]:
constant[Write the message into the transaction log.]
variable[grr_message] assign[=] call[name[grr_message].SerializeToString, parameter[]]
<ast.Try object at 0x7da1b1c0f310>
|
keyword[def] identifier[Write] ( identifier[self] , identifier[grr_message] ):
literal[string]
identifier[grr_message] = identifier[grr_message] . identifier[SerializeToString] ()
keyword[try] :
keyword[with] identifier[io] . identifier[open] ( identifier[self] . identifier[logfile] , literal[string] ) keyword[as] identifier[fd] :
identifier[fd] . identifier[write] ( identifier[grr_message] )
keyword[except] ( identifier[IOError] , identifier[OSError] ):
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[logfile] )):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[logfile] ))
keyword[with] identifier[io] . identifier[open] ( identifier[self] . identifier[logfile] , literal[string] ) keyword[as] identifier[fd] :
identifier[fd] . identifier[write] ( identifier[grr_message] )
keyword[except] ( identifier[IOError] , identifier[OSError] ):
identifier[logging] . identifier[exception] ( literal[string] ,
identifier[self] . identifier[logfile] )
|
def Write(self, grr_message):
"""Write the message into the transaction log."""
grr_message = grr_message.SerializeToString()
try:
with io.open(self.logfile, 'wb') as fd:
fd.write(grr_message) # depends on [control=['with'], data=['fd']] # depends on [control=['try'], data=[]]
except (IOError, OSError):
# Check if we're missing directories and try to create them.
if not os.path.isdir(os.path.dirname(self.logfile)):
try:
os.makedirs(os.path.dirname(self.logfile))
with io.open(self.logfile, 'wb') as fd:
fd.write(grr_message) # depends on [control=['with'], data=['fd']] # depends on [control=['try'], data=[]]
except (IOError, OSError):
logging.exception("Couldn't write nanny transaction log to %s", self.logfile) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
|
def _dist(self, x, y):
"""Return ``self.dist(x, y)``."""
if self.is_uniform and not self.is_uniformly_weighted:
bdry_fracs = self.partition.boundary_cell_fractions
func_list = _scaling_func_list(bdry_fracs, exponent=self.exponent)
arrs = [apply_on_boundary(vec, func=func_list, only_once=False)
for vec in (x, y)]
return super(DiscreteLp, self)._dist(
self.element(arrs[0]), self.element(arrs[1]))
else:
return super(DiscreteLp, self)._dist(x, y)
|
def function[_dist, parameter[self, x, y]]:
constant[Return ``self.dist(x, y)``.]
if <ast.BoolOp object at 0x7da1b20b6e00> begin[:]
variable[bdry_fracs] assign[=] name[self].partition.boundary_cell_fractions
variable[func_list] assign[=] call[name[_scaling_func_list], parameter[name[bdry_fracs]]]
variable[arrs] assign[=] <ast.ListComp object at 0x7da1b1e90970>
return[call[call[name[super], parameter[name[DiscreteLp], name[self]]]._dist, parameter[call[name[self].element, parameter[call[name[arrs]][constant[0]]]], call[name[self].element, parameter[call[name[arrs]][constant[1]]]]]]]
|
keyword[def] identifier[_dist] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
keyword[if] identifier[self] . identifier[is_uniform] keyword[and] keyword[not] identifier[self] . identifier[is_uniformly_weighted] :
identifier[bdry_fracs] = identifier[self] . identifier[partition] . identifier[boundary_cell_fractions]
identifier[func_list] = identifier[_scaling_func_list] ( identifier[bdry_fracs] , identifier[exponent] = identifier[self] . identifier[exponent] )
identifier[arrs] =[ identifier[apply_on_boundary] ( identifier[vec] , identifier[func] = identifier[func_list] , identifier[only_once] = keyword[False] )
keyword[for] identifier[vec] keyword[in] ( identifier[x] , identifier[y] )]
keyword[return] identifier[super] ( identifier[DiscreteLp] , identifier[self] ). identifier[_dist] (
identifier[self] . identifier[element] ( identifier[arrs] [ literal[int] ]), identifier[self] . identifier[element] ( identifier[arrs] [ literal[int] ]))
keyword[else] :
keyword[return] identifier[super] ( identifier[DiscreteLp] , identifier[self] ). identifier[_dist] ( identifier[x] , identifier[y] )
|
def _dist(self, x, y):
"""Return ``self.dist(x, y)``."""
if self.is_uniform and (not self.is_uniformly_weighted):
bdry_fracs = self.partition.boundary_cell_fractions
func_list = _scaling_func_list(bdry_fracs, exponent=self.exponent)
arrs = [apply_on_boundary(vec, func=func_list, only_once=False) for vec in (x, y)]
return super(DiscreteLp, self)._dist(self.element(arrs[0]), self.element(arrs[1])) # depends on [control=['if'], data=[]]
else:
return super(DiscreteLp, self)._dist(x, y)
|
def xpartial(func, *xargs, **xkwargs):
"""
Like :func:`functools.partial`, but can take an :class:`XObject`
placeholder that will be replaced with the first positional argument
when the partially applied function is called.
Useful when the function's positional arguments' order doesn't fit your
situation, e.g.:
>>> reverse_range = xpartial(range, X, 0, -1)
>>> reverse_range(5)
[5, 4, 3, 2, 1]
It can also be used to transform the positional argument to a keyword
argument, which can come in handy inside a *pipe*::
xpartial(objects.get, id=X)
Also the XObjects are evaluated, which can be used for some sort of
destructuring of the argument::
xpartial(somefunc, name=X.name, number=X.contacts['number'])
Lastly, unlike :func:`functools.partial`, this creates a regular function
which will bind to classes (like the ``curry`` function from
``django.utils.functional``).
"""
any_x = any(isinstance(a, XObject) for a in xargs + tuple(xkwargs.values()))
use = lambda x, value: (~x)(value) if isinstance(x, XObject) else x
@wraps(func, assigned=filter(partial(hasattr, func), WRAPPER_ASSIGNMENTS))
def xpartially_applied(*func_args, **func_kwargs):
if any_x:
if not func_args:
raise ValueError('Function "%s" partially applied with an '
'X placeholder but called with no positional arguments.'
% get_name(func))
first = func_args[0]
rest = func_args[1:]
args = tuple(use(x, first) for x in xargs) + rest
kwargs = dict((k, use(x, first)) for k, x in dict_items(xkwargs))
kwargs.update(func_kwargs)
else:
args = xargs + func_args
kwargs = dict(xkwargs, **func_kwargs)
return func(*args, **kwargs)
name = lambda: '%s(%s)' % (get_name(func), repr_args(*xargs, **xkwargs))
return set_name(name, xpartially_applied)
|
def function[xpartial, parameter[func]]:
constant[
Like :func:`functools.partial`, but can take an :class:`XObject`
placeholder that will be replaced with the first positional argument
when the partially applied function is called.
Useful when the function's positional arguments' order doesn't fit your
situation, e.g.:
>>> reverse_range = xpartial(range, X, 0, -1)
>>> reverse_range(5)
[5, 4, 3, 2, 1]
It can also be used to transform the positional argument to a keyword
argument, which can come in handy inside a *pipe*::
xpartial(objects.get, id=X)
Also the XObjects are evaluated, which can be used for some sort of
destructuring of the argument::
xpartial(somefunc, name=X.name, number=X.contacts['number'])
Lastly, unlike :func:`functools.partial`, this creates a regular function
which will bind to classes (like the ``curry`` function from
``django.utils.functional``).
]
variable[any_x] assign[=] call[name[any], parameter[<ast.GeneratorExp object at 0x7da18f00e680>]]
variable[use] assign[=] <ast.Lambda object at 0x7da18f00c2b0>
def function[xpartially_applied, parameter[]]:
if name[any_x] begin[:]
if <ast.UnaryOp object at 0x7da18f00d180> begin[:]
<ast.Raise object at 0x7da18f00e2c0>
variable[first] assign[=] call[name[func_args]][constant[0]]
variable[rest] assign[=] call[name[func_args]][<ast.Slice object at 0x7da20e9b04f0>]
variable[args] assign[=] binary_operation[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da20e9b2860>]] + name[rest]]
variable[kwargs] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20e9b08b0>]]
call[name[kwargs].update, parameter[name[func_kwargs]]]
return[call[name[func], parameter[<ast.Starred object at 0x7da20e9b39a0>]]]
variable[name] assign[=] <ast.Lambda object at 0x7da2054a6b90>
return[call[name[set_name], parameter[name[name], name[xpartially_applied]]]]
|
keyword[def] identifier[xpartial] ( identifier[func] ,* identifier[xargs] ,** identifier[xkwargs] ):
literal[string]
identifier[any_x] = identifier[any] ( identifier[isinstance] ( identifier[a] , identifier[XObject] ) keyword[for] identifier[a] keyword[in] identifier[xargs] + identifier[tuple] ( identifier[xkwargs] . identifier[values] ()))
identifier[use] = keyword[lambda] identifier[x] , identifier[value] :(~ identifier[x] )( identifier[value] ) keyword[if] identifier[isinstance] ( identifier[x] , identifier[XObject] ) keyword[else] identifier[x]
@ identifier[wraps] ( identifier[func] , identifier[assigned] = identifier[filter] ( identifier[partial] ( identifier[hasattr] , identifier[func] ), identifier[WRAPPER_ASSIGNMENTS] ))
keyword[def] identifier[xpartially_applied] (* identifier[func_args] ,** identifier[func_kwargs] ):
keyword[if] identifier[any_x] :
keyword[if] keyword[not] identifier[func_args] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
% identifier[get_name] ( identifier[func] ))
identifier[first] = identifier[func_args] [ literal[int] ]
identifier[rest] = identifier[func_args] [ literal[int] :]
identifier[args] = identifier[tuple] ( identifier[use] ( identifier[x] , identifier[first] ) keyword[for] identifier[x] keyword[in] identifier[xargs] )+ identifier[rest]
identifier[kwargs] = identifier[dict] (( identifier[k] , identifier[use] ( identifier[x] , identifier[first] )) keyword[for] identifier[k] , identifier[x] keyword[in] identifier[dict_items] ( identifier[xkwargs] ))
identifier[kwargs] . identifier[update] ( identifier[func_kwargs] )
keyword[else] :
identifier[args] = identifier[xargs] + identifier[func_args]
identifier[kwargs] = identifier[dict] ( identifier[xkwargs] ,** identifier[func_kwargs] )
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
identifier[name] = keyword[lambda] : literal[string] %( identifier[get_name] ( identifier[func] ), identifier[repr_args] (* identifier[xargs] ,** identifier[xkwargs] ))
keyword[return] identifier[set_name] ( identifier[name] , identifier[xpartially_applied] )
|
def xpartial(func, *xargs, **xkwargs):
"""
Like :func:`functools.partial`, but can take an :class:`XObject`
placeholder that will be replaced with the first positional argument
when the partially applied function is called.
Useful when the function's positional arguments' order doesn't fit your
situation, e.g.:
>>> reverse_range = xpartial(range, X, 0, -1)
>>> reverse_range(5)
[5, 4, 3, 2, 1]
It can also be used to transform the positional argument to a keyword
argument, which can come in handy inside a *pipe*::
xpartial(objects.get, id=X)
Also the XObjects are evaluated, which can be used for some sort of
destructuring of the argument::
xpartial(somefunc, name=X.name, number=X.contacts['number'])
Lastly, unlike :func:`functools.partial`, this creates a regular function
which will bind to classes (like the ``curry`` function from
``django.utils.functional``).
"""
any_x = any((isinstance(a, XObject) for a in xargs + tuple(xkwargs.values())))
use = lambda x, value: (~x)(value) if isinstance(x, XObject) else x
@wraps(func, assigned=filter(partial(hasattr, func), WRAPPER_ASSIGNMENTS))
def xpartially_applied(*func_args, **func_kwargs):
if any_x:
if not func_args:
raise ValueError('Function "%s" partially applied with an X placeholder but called with no positional arguments.' % get_name(func)) # depends on [control=['if'], data=[]]
first = func_args[0]
rest = func_args[1:]
args = tuple((use(x, first) for x in xargs)) + rest
kwargs = dict(((k, use(x, first)) for (k, x) in dict_items(xkwargs)))
kwargs.update(func_kwargs) # depends on [control=['if'], data=[]]
else:
args = xargs + func_args
kwargs = dict(xkwargs, **func_kwargs)
return func(*args, **kwargs)
name = lambda : '%s(%s)' % (get_name(func), repr_args(*xargs, **xkwargs))
return set_name(name, xpartially_applied)
|
def generic_adjust(colors, light):
"""Generic color adjustment for themers."""
if light:
for color in colors:
color = util.saturate_color(color, 0.60)
color = util.darken_color(color, 0.5)
colors[0] = util.lighten_color(colors[0], 0.95)
colors[7] = util.darken_color(colors[0], 0.75)
colors[8] = util.darken_color(colors[0], 0.25)
colors[15] = colors[7]
else:
colors[0] = util.darken_color(colors[0], 0.80)
colors[7] = util.lighten_color(colors[0], 0.75)
colors[8] = util.lighten_color(colors[0], 0.25)
colors[15] = colors[7]
return colors
|
def function[generic_adjust, parameter[colors, light]]:
constant[Generic color adjustment for themers.]
if name[light] begin[:]
for taget[name[color]] in starred[name[colors]] begin[:]
variable[color] assign[=] call[name[util].saturate_color, parameter[name[color], constant[0.6]]]
variable[color] assign[=] call[name[util].darken_color, parameter[name[color], constant[0.5]]]
call[name[colors]][constant[0]] assign[=] call[name[util].lighten_color, parameter[call[name[colors]][constant[0]], constant[0.95]]]
call[name[colors]][constant[7]] assign[=] call[name[util].darken_color, parameter[call[name[colors]][constant[0]], constant[0.75]]]
call[name[colors]][constant[8]] assign[=] call[name[util].darken_color, parameter[call[name[colors]][constant[0]], constant[0.25]]]
call[name[colors]][constant[15]] assign[=] call[name[colors]][constant[7]]
return[name[colors]]
|
keyword[def] identifier[generic_adjust] ( identifier[colors] , identifier[light] ):
literal[string]
keyword[if] identifier[light] :
keyword[for] identifier[color] keyword[in] identifier[colors] :
identifier[color] = identifier[util] . identifier[saturate_color] ( identifier[color] , literal[int] )
identifier[color] = identifier[util] . identifier[darken_color] ( identifier[color] , literal[int] )
identifier[colors] [ literal[int] ]= identifier[util] . identifier[lighten_color] ( identifier[colors] [ literal[int] ], literal[int] )
identifier[colors] [ literal[int] ]= identifier[util] . identifier[darken_color] ( identifier[colors] [ literal[int] ], literal[int] )
identifier[colors] [ literal[int] ]= identifier[util] . identifier[darken_color] ( identifier[colors] [ literal[int] ], literal[int] )
identifier[colors] [ literal[int] ]= identifier[colors] [ literal[int] ]
keyword[else] :
identifier[colors] [ literal[int] ]= identifier[util] . identifier[darken_color] ( identifier[colors] [ literal[int] ], literal[int] )
identifier[colors] [ literal[int] ]= identifier[util] . identifier[lighten_color] ( identifier[colors] [ literal[int] ], literal[int] )
identifier[colors] [ literal[int] ]= identifier[util] . identifier[lighten_color] ( identifier[colors] [ literal[int] ], literal[int] )
identifier[colors] [ literal[int] ]= identifier[colors] [ literal[int] ]
keyword[return] identifier[colors]
|
def generic_adjust(colors, light):
"""Generic color adjustment for themers."""
if light:
for color in colors:
color = util.saturate_color(color, 0.6)
color = util.darken_color(color, 0.5) # depends on [control=['for'], data=['color']]
colors[0] = util.lighten_color(colors[0], 0.95)
colors[7] = util.darken_color(colors[0], 0.75)
colors[8] = util.darken_color(colors[0], 0.25)
colors[15] = colors[7] # depends on [control=['if'], data=[]]
else:
colors[0] = util.darken_color(colors[0], 0.8)
colors[7] = util.lighten_color(colors[0], 0.75)
colors[8] = util.lighten_color(colors[0], 0.25)
colors[15] = colors[7]
return colors
|
def parse_hh_mm_ss(self):
"""Parses raw time
:return: Time parsed
"""
split_count = self.raw.count(":")
if split_count == 2: # hh:mm:ss
return datetime.strptime(str(self.raw).strip(), "%H:%M:%S").time()
elif split_count == 1: # mm:ss
return datetime.strptime(str(self.raw).strip(), "%M:%S").time()
return datetime.strptime(str(self.raw).strip(), "%S").time()
|
def function[parse_hh_mm_ss, parameter[self]]:
constant[Parses raw time
:return: Time parsed
]
variable[split_count] assign[=] call[name[self].raw.count, parameter[constant[:]]]
if compare[name[split_count] equal[==] constant[2]] begin[:]
return[call[call[name[datetime].strptime, parameter[call[call[name[str], parameter[name[self].raw]].strip, parameter[]], constant[%H:%M:%S]]].time, parameter[]]]
return[call[call[name[datetime].strptime, parameter[call[call[name[str], parameter[name[self].raw]].strip, parameter[]], constant[%S]]].time, parameter[]]]
|
keyword[def] identifier[parse_hh_mm_ss] ( identifier[self] ):
literal[string]
identifier[split_count] = identifier[self] . identifier[raw] . identifier[count] ( literal[string] )
keyword[if] identifier[split_count] == literal[int] :
keyword[return] identifier[datetime] . identifier[strptime] ( identifier[str] ( identifier[self] . identifier[raw] ). identifier[strip] (), literal[string] ). identifier[time] ()
keyword[elif] identifier[split_count] == literal[int] :
keyword[return] identifier[datetime] . identifier[strptime] ( identifier[str] ( identifier[self] . identifier[raw] ). identifier[strip] (), literal[string] ). identifier[time] ()
keyword[return] identifier[datetime] . identifier[strptime] ( identifier[str] ( identifier[self] . identifier[raw] ). identifier[strip] (), literal[string] ). identifier[time] ()
|
def parse_hh_mm_ss(self):
"""Parses raw time
:return: Time parsed
"""
split_count = self.raw.count(':')
if split_count == 2: # hh:mm:ss
return datetime.strptime(str(self.raw).strip(), '%H:%M:%S').time() # depends on [control=['if'], data=[]]
elif split_count == 1: # mm:ss
return datetime.strptime(str(self.raw).strip(), '%M:%S').time() # depends on [control=['if'], data=[]]
return datetime.strptime(str(self.raw).strip(), '%S').time()
|
def document_fromstring(html, guess_charset=True, parser=None):
"""Parse a whole document into a string."""
if not isinstance(html, _strings):
raise TypeError('string required')
if parser is None:
parser = html_parser
return parser.parse(html, useChardet=guess_charset).getroot()
|
def function[document_fromstring, parameter[html, guess_charset, parser]]:
constant[Parse a whole document into a string.]
if <ast.UnaryOp object at 0x7da18f8116c0> begin[:]
<ast.Raise object at 0x7da18f8132b0>
if compare[name[parser] is constant[None]] begin[:]
variable[parser] assign[=] name[html_parser]
return[call[call[name[parser].parse, parameter[name[html]]].getroot, parameter[]]]
|
keyword[def] identifier[document_fromstring] ( identifier[html] , identifier[guess_charset] = keyword[True] , identifier[parser] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[html] , identifier[_strings] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[parser] keyword[is] keyword[None] :
identifier[parser] = identifier[html_parser]
keyword[return] identifier[parser] . identifier[parse] ( identifier[html] , identifier[useChardet] = identifier[guess_charset] ). identifier[getroot] ()
|
def document_fromstring(html, guess_charset=True, parser=None):
"""Parse a whole document into a string."""
if not isinstance(html, _strings):
raise TypeError('string required') # depends on [control=['if'], data=[]]
if parser is None:
parser = html_parser # depends on [control=['if'], data=['parser']]
return parser.parse(html, useChardet=guess_charset).getroot()
|
def login(request, template_name=None, extra_context=None, **kwargs):
"""Logs a user in using the :class:`~openstack_auth.forms.Login` form."""
# If the user enabled websso and the default redirect
# redirect to the default websso url
if (request.method == 'GET' and utils.is_websso_enabled and
utils.is_websso_default_redirect()):
protocol = utils.get_websso_default_redirect_protocol()
region = utils.get_websso_default_redirect_region()
origin = utils.build_absolute_uri(request, '/auth/websso/')
url = ('%s/auth/OS-FEDERATION/websso/%s?origin=%s' %
(region, protocol, origin))
return shortcuts.redirect(url)
# If the user enabled websso and selects default protocol
# from the dropdown, We need to redirect user to the websso url
if request.method == 'POST':
auth_type = request.POST.get('auth_type', 'credentials')
if utils.is_websso_enabled() and auth_type != 'credentials':
region_id = request.POST.get('region')
auth_url = getattr(settings, 'WEBSSO_KEYSTONE_URL',
forms.get_region_endpoint(region_id))
url = utils.get_websso_url(request, auth_url, auth_type)
return shortcuts.redirect(url)
if not request.is_ajax():
# If the user is already authenticated, redirect them to the
# dashboard straight away, unless the 'next' parameter is set as it
# usually indicates requesting access to a page that requires different
# permissions.
if (request.user.is_authenticated and
auth.REDIRECT_FIELD_NAME not in request.GET and
auth.REDIRECT_FIELD_NAME not in request.POST):
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
# Get our initial region for the form.
initial = {}
current_region = request.session.get('region_endpoint', None)
requested_region = request.GET.get('region', None)
regions = dict(getattr(settings, "AVAILABLE_REGIONS", []))
if requested_region in regions and requested_region != current_region:
initial.update({'region': requested_region})
if request.method == "POST":
form = functional.curry(forms.Login)
else:
form = functional.curry(forms.Login, initial=initial)
if extra_context is None:
extra_context = {'redirect_field_name': auth.REDIRECT_FIELD_NAME}
extra_context['csrf_failure'] = request.GET.get('csrf_failure')
choices = getattr(settings, 'WEBSSO_CHOICES', ())
extra_context['show_sso_opts'] = (utils.is_websso_enabled() and
len(choices) > 1)
if not template_name:
if request.is_ajax():
template_name = 'auth/_login.html'
extra_context['hide'] = True
else:
template_name = 'auth/login.html'
res = django_auth_views.login(request,
template_name=template_name,
authentication_form=form,
extra_context=extra_context,
**kwargs)
# Save the region in the cookie, this is used as the default
# selected region next time the Login form loads.
if request.method == "POST":
utils.set_response_cookie(res, 'login_region',
request.POST.get('region', ''))
utils.set_response_cookie(res, 'login_domain',
request.POST.get('domain', ''))
# Set the session data here because django's session key rotation
# will erase it if we set it earlier.
if request.user.is_authenticated:
auth_user.set_session_from_user(request, request.user)
regions = dict(forms.Login.get_region_choices())
region = request.user.endpoint
login_region = request.POST.get('region')
region_name = regions.get(login_region)
request.session['region_endpoint'] = region
request.session['region_name'] = region_name
expiration_time = request.user.time_until_expiration()
threshold_days = getattr(
settings, 'PASSWORD_EXPIRES_WARNING_THRESHOLD_DAYS', -1)
if expiration_time is not None and \
expiration_time.days <= threshold_days:
expiration_time = str(expiration_time).rsplit(':', 1)[0]
msg = (_('Please consider changing your password, it will expire'
' in %s minutes') %
expiration_time).replace(':', ' Hours and ')
messages.warning(request, msg)
return res
|
def function[login, parameter[request, template_name, extra_context]]:
constant[Logs a user in using the :class:`~openstack_auth.forms.Login` form.]
if <ast.BoolOp object at 0x7da1b1916950> begin[:]
variable[protocol] assign[=] call[name[utils].get_websso_default_redirect_protocol, parameter[]]
variable[region] assign[=] call[name[utils].get_websso_default_redirect_region, parameter[]]
variable[origin] assign[=] call[name[utils].build_absolute_uri, parameter[name[request], constant[/auth/websso/]]]
variable[url] assign[=] binary_operation[constant[%s/auth/OS-FEDERATION/websso/%s?origin=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1916a10>, <ast.Name object at 0x7da1b1914130>, <ast.Name object at 0x7da1b19173d0>]]]
return[call[name[shortcuts].redirect, parameter[name[url]]]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
variable[auth_type] assign[=] call[name[request].POST.get, parameter[constant[auth_type], constant[credentials]]]
if <ast.BoolOp object at 0x7da1b1915cf0> begin[:]
variable[region_id] assign[=] call[name[request].POST.get, parameter[constant[region]]]
variable[auth_url] assign[=] call[name[getattr], parameter[name[settings], constant[WEBSSO_KEYSTONE_URL], call[name[forms].get_region_endpoint, parameter[name[region_id]]]]]
variable[url] assign[=] call[name[utils].get_websso_url, parameter[name[request], name[auth_url], name[auth_type]]]
return[call[name[shortcuts].redirect, parameter[name[url]]]]
if <ast.UnaryOp object at 0x7da1b1916f20> begin[:]
if <ast.BoolOp object at 0x7da1b1916ef0> begin[:]
return[call[name[shortcuts].redirect, parameter[name[settings].LOGIN_REDIRECT_URL]]]
variable[initial] assign[=] dictionary[[], []]
variable[current_region] assign[=] call[name[request].session.get, parameter[constant[region_endpoint], constant[None]]]
variable[requested_region] assign[=] call[name[request].GET.get, parameter[constant[region], constant[None]]]
variable[regions] assign[=] call[name[dict], parameter[call[name[getattr], parameter[name[settings], constant[AVAILABLE_REGIONS], list[[]]]]]]
if <ast.BoolOp object at 0x7da1b1916c50> begin[:]
call[name[initial].update, parameter[dictionary[[<ast.Constant object at 0x7da1b19161a0>], [<ast.Name object at 0x7da1b1916110>]]]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
variable[form] assign[=] call[name[functional].curry, parameter[name[forms].Login]]
if compare[name[extra_context] is constant[None]] begin[:]
variable[extra_context] assign[=] dictionary[[<ast.Constant object at 0x7da1b1916170>], [<ast.Attribute object at 0x7da1b1914460>]]
call[name[extra_context]][constant[csrf_failure]] assign[=] call[name[request].GET.get, parameter[constant[csrf_failure]]]
variable[choices] assign[=] call[name[getattr], parameter[name[settings], constant[WEBSSO_CHOICES], tuple[[]]]]
call[name[extra_context]][constant[show_sso_opts]] assign[=] <ast.BoolOp object at 0x7da18ede6c50>
if <ast.UnaryOp object at 0x7da1b18bd120> begin[:]
if call[name[request].is_ajax, parameter[]] begin[:]
variable[template_name] assign[=] constant[auth/_login.html]
call[name[extra_context]][constant[hide]] assign[=] constant[True]
variable[res] assign[=] call[name[django_auth_views].login, parameter[name[request]]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
call[name[utils].set_response_cookie, parameter[name[res], constant[login_region], call[name[request].POST.get, parameter[constant[region], constant[]]]]]
call[name[utils].set_response_cookie, parameter[name[res], constant[login_domain], call[name[request].POST.get, parameter[constant[domain], constant[]]]]]
if name[request].user.is_authenticated begin[:]
call[name[auth_user].set_session_from_user, parameter[name[request], name[request].user]]
variable[regions] assign[=] call[name[dict], parameter[call[name[forms].Login.get_region_choices, parameter[]]]]
variable[region] assign[=] name[request].user.endpoint
variable[login_region] assign[=] call[name[request].POST.get, parameter[constant[region]]]
variable[region_name] assign[=] call[name[regions].get, parameter[name[login_region]]]
call[name[request].session][constant[region_endpoint]] assign[=] name[region]
call[name[request].session][constant[region_name]] assign[=] name[region_name]
variable[expiration_time] assign[=] call[name[request].user.time_until_expiration, parameter[]]
variable[threshold_days] assign[=] call[name[getattr], parameter[name[settings], constant[PASSWORD_EXPIRES_WARNING_THRESHOLD_DAYS], <ast.UnaryOp object at 0x7da1b19ee110>]]
if <ast.BoolOp object at 0x7da1b19ef490> begin[:]
variable[expiration_time] assign[=] call[call[call[name[str], parameter[name[expiration_time]]].rsplit, parameter[constant[:], constant[1]]]][constant[0]]
variable[msg] assign[=] call[binary_operation[call[name[_], parameter[constant[Please consider changing your password, it will expire in %s minutes]]] <ast.Mod object at 0x7da2590d6920> name[expiration_time]].replace, parameter[constant[:], constant[ Hours and ]]]
call[name[messages].warning, parameter[name[request], name[msg]]]
return[name[res]]
|
keyword[def] identifier[login] ( identifier[request] , identifier[template_name] = keyword[None] , identifier[extra_context] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] ( identifier[request] . identifier[method] == literal[string] keyword[and] identifier[utils] . identifier[is_websso_enabled] keyword[and]
identifier[utils] . identifier[is_websso_default_redirect] ()):
identifier[protocol] = identifier[utils] . identifier[get_websso_default_redirect_protocol] ()
identifier[region] = identifier[utils] . identifier[get_websso_default_redirect_region] ()
identifier[origin] = identifier[utils] . identifier[build_absolute_uri] ( identifier[request] , literal[string] )
identifier[url] =( literal[string] %
( identifier[region] , identifier[protocol] , identifier[origin] ))
keyword[return] identifier[shortcuts] . identifier[redirect] ( identifier[url] )
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[auth_type] = identifier[request] . identifier[POST] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[utils] . identifier[is_websso_enabled] () keyword[and] identifier[auth_type] != literal[string] :
identifier[region_id] = identifier[request] . identifier[POST] . identifier[get] ( literal[string] )
identifier[auth_url] = identifier[getattr] ( identifier[settings] , literal[string] ,
identifier[forms] . identifier[get_region_endpoint] ( identifier[region_id] ))
identifier[url] = identifier[utils] . identifier[get_websso_url] ( identifier[request] , identifier[auth_url] , identifier[auth_type] )
keyword[return] identifier[shortcuts] . identifier[redirect] ( identifier[url] )
keyword[if] keyword[not] identifier[request] . identifier[is_ajax] ():
keyword[if] ( identifier[request] . identifier[user] . identifier[is_authenticated] keyword[and]
identifier[auth] . identifier[REDIRECT_FIELD_NAME] keyword[not] keyword[in] identifier[request] . identifier[GET] keyword[and]
identifier[auth] . identifier[REDIRECT_FIELD_NAME] keyword[not] keyword[in] identifier[request] . identifier[POST] ):
keyword[return] identifier[shortcuts] . identifier[redirect] ( identifier[settings] . identifier[LOGIN_REDIRECT_URL] )
identifier[initial] ={}
identifier[current_region] = identifier[request] . identifier[session] . identifier[get] ( literal[string] , keyword[None] )
identifier[requested_region] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] , keyword[None] )
identifier[regions] = identifier[dict] ( identifier[getattr] ( identifier[settings] , literal[string] ,[]))
keyword[if] identifier[requested_region] keyword[in] identifier[regions] keyword[and] identifier[requested_region] != identifier[current_region] :
identifier[initial] . identifier[update] ({ literal[string] : identifier[requested_region] })
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[form] = identifier[functional] . identifier[curry] ( identifier[forms] . identifier[Login] )
keyword[else] :
identifier[form] = identifier[functional] . identifier[curry] ( identifier[forms] . identifier[Login] , identifier[initial] = identifier[initial] )
keyword[if] identifier[extra_context] keyword[is] keyword[None] :
identifier[extra_context] ={ literal[string] : identifier[auth] . identifier[REDIRECT_FIELD_NAME] }
identifier[extra_context] [ literal[string] ]= identifier[request] . identifier[GET] . identifier[get] ( literal[string] )
identifier[choices] = identifier[getattr] ( identifier[settings] , literal[string] ,())
identifier[extra_context] [ literal[string] ]=( identifier[utils] . identifier[is_websso_enabled] () keyword[and]
identifier[len] ( identifier[choices] )> literal[int] )
keyword[if] keyword[not] identifier[template_name] :
keyword[if] identifier[request] . identifier[is_ajax] ():
identifier[template_name] = literal[string]
identifier[extra_context] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[template_name] = literal[string]
identifier[res] = identifier[django_auth_views] . identifier[login] ( identifier[request] ,
identifier[template_name] = identifier[template_name] ,
identifier[authentication_form] = identifier[form] ,
identifier[extra_context] = identifier[extra_context] ,
** identifier[kwargs] )
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[utils] . identifier[set_response_cookie] ( identifier[res] , literal[string] ,
identifier[request] . identifier[POST] . identifier[get] ( literal[string] , literal[string] ))
identifier[utils] . identifier[set_response_cookie] ( identifier[res] , literal[string] ,
identifier[request] . identifier[POST] . identifier[get] ( literal[string] , literal[string] ))
keyword[if] identifier[request] . identifier[user] . identifier[is_authenticated] :
identifier[auth_user] . identifier[set_session_from_user] ( identifier[request] , identifier[request] . identifier[user] )
identifier[regions] = identifier[dict] ( identifier[forms] . identifier[Login] . identifier[get_region_choices] ())
identifier[region] = identifier[request] . identifier[user] . identifier[endpoint]
identifier[login_region] = identifier[request] . identifier[POST] . identifier[get] ( literal[string] )
identifier[region_name] = identifier[regions] . identifier[get] ( identifier[login_region] )
identifier[request] . identifier[session] [ literal[string] ]= identifier[region]
identifier[request] . identifier[session] [ literal[string] ]= identifier[region_name]
identifier[expiration_time] = identifier[request] . identifier[user] . identifier[time_until_expiration] ()
identifier[threshold_days] = identifier[getattr] (
identifier[settings] , literal[string] ,- literal[int] )
keyword[if] identifier[expiration_time] keyword[is] keyword[not] keyword[None] keyword[and] identifier[expiration_time] . identifier[days] <= identifier[threshold_days] :
identifier[expiration_time] = identifier[str] ( identifier[expiration_time] ). identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
identifier[msg] =( identifier[_] ( literal[string]
literal[string] )%
identifier[expiration_time] ). identifier[replace] ( literal[string] , literal[string] )
identifier[messages] . identifier[warning] ( identifier[request] , identifier[msg] )
keyword[return] identifier[res]
|
def login(request, template_name=None, extra_context=None, **kwargs):
"""Logs a user in using the :class:`~openstack_auth.forms.Login` form."""
# If the user enabled websso and the default redirect
# redirect to the default websso url
if request.method == 'GET' and utils.is_websso_enabled and utils.is_websso_default_redirect():
protocol = utils.get_websso_default_redirect_protocol()
region = utils.get_websso_default_redirect_region()
origin = utils.build_absolute_uri(request, '/auth/websso/')
url = '%s/auth/OS-FEDERATION/websso/%s?origin=%s' % (region, protocol, origin)
return shortcuts.redirect(url) # depends on [control=['if'], data=[]]
# If the user enabled websso and selects default protocol
# from the dropdown, We need to redirect user to the websso url
if request.method == 'POST':
auth_type = request.POST.get('auth_type', 'credentials')
if utils.is_websso_enabled() and auth_type != 'credentials':
region_id = request.POST.get('region')
auth_url = getattr(settings, 'WEBSSO_KEYSTONE_URL', forms.get_region_endpoint(region_id))
url = utils.get_websso_url(request, auth_url, auth_type)
return shortcuts.redirect(url) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not request.is_ajax():
# If the user is already authenticated, redirect them to the
# dashboard straight away, unless the 'next' parameter is set as it
# usually indicates requesting access to a page that requires different
# permissions.
if request.user.is_authenticated and auth.REDIRECT_FIELD_NAME not in request.GET and (auth.REDIRECT_FIELD_NAME not in request.POST):
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Get our initial region for the form.
initial = {}
current_region = request.session.get('region_endpoint', None)
requested_region = request.GET.get('region', None)
regions = dict(getattr(settings, 'AVAILABLE_REGIONS', []))
if requested_region in regions and requested_region != current_region:
initial.update({'region': requested_region}) # depends on [control=['if'], data=[]]
if request.method == 'POST':
form = functional.curry(forms.Login) # depends on [control=['if'], data=[]]
else:
form = functional.curry(forms.Login, initial=initial)
if extra_context is None:
extra_context = {'redirect_field_name': auth.REDIRECT_FIELD_NAME} # depends on [control=['if'], data=['extra_context']]
extra_context['csrf_failure'] = request.GET.get('csrf_failure')
choices = getattr(settings, 'WEBSSO_CHOICES', ())
extra_context['show_sso_opts'] = utils.is_websso_enabled() and len(choices) > 1
if not template_name:
if request.is_ajax():
template_name = 'auth/_login.html'
extra_context['hide'] = True # depends on [control=['if'], data=[]]
else:
template_name = 'auth/login.html' # depends on [control=['if'], data=[]]
res = django_auth_views.login(request, template_name=template_name, authentication_form=form, extra_context=extra_context, **kwargs)
# Save the region in the cookie, this is used as the default
# selected region next time the Login form loads.
if request.method == 'POST':
utils.set_response_cookie(res, 'login_region', request.POST.get('region', ''))
utils.set_response_cookie(res, 'login_domain', request.POST.get('domain', '')) # depends on [control=['if'], data=[]]
# Set the session data here because django's session key rotation
# will erase it if we set it earlier.
if request.user.is_authenticated:
auth_user.set_session_from_user(request, request.user)
regions = dict(forms.Login.get_region_choices())
region = request.user.endpoint
login_region = request.POST.get('region')
region_name = regions.get(login_region)
request.session['region_endpoint'] = region
request.session['region_name'] = region_name
expiration_time = request.user.time_until_expiration()
threshold_days = getattr(settings, 'PASSWORD_EXPIRES_WARNING_THRESHOLD_DAYS', -1)
if expiration_time is not None and expiration_time.days <= threshold_days:
expiration_time = str(expiration_time).rsplit(':', 1)[0]
msg = (_('Please consider changing your password, it will expire in %s minutes') % expiration_time).replace(':', ' Hours and ')
messages.warning(request, msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return res
|
def SetValue(self, row, col, value, refresh=True):
"""Set the value of a cell, merge line breaks"""
# Join code that has been split because of long line issue
value = "".join(value.split("\n"))
key = row, col, self.grid.current_table
old_code = self.grid.code_array(key)
if old_code is None:
old_code = ""
if value != old_code:
self.grid.actions.set_code(key, value)
|
def function[SetValue, parameter[self, row, col, value, refresh]]:
constant[Set the value of a cell, merge line breaks]
variable[value] assign[=] call[constant[].join, parameter[call[name[value].split, parameter[constant[
]]]]]
variable[key] assign[=] tuple[[<ast.Name object at 0x7da1b1518220>, <ast.Name object at 0x7da1b151b910>, <ast.Attribute object at 0x7da1b151b1f0>]]
variable[old_code] assign[=] call[name[self].grid.code_array, parameter[name[key]]]
if compare[name[old_code] is constant[None]] begin[:]
variable[old_code] assign[=] constant[]
if compare[name[value] not_equal[!=] name[old_code]] begin[:]
call[name[self].grid.actions.set_code, parameter[name[key], name[value]]]
|
keyword[def] identifier[SetValue] ( identifier[self] , identifier[row] , identifier[col] , identifier[value] , identifier[refresh] = keyword[True] ):
literal[string]
identifier[value] = literal[string] . identifier[join] ( identifier[value] . identifier[split] ( literal[string] ))
identifier[key] = identifier[row] , identifier[col] , identifier[self] . identifier[grid] . identifier[current_table]
identifier[old_code] = identifier[self] . identifier[grid] . identifier[code_array] ( identifier[key] )
keyword[if] identifier[old_code] keyword[is] keyword[None] :
identifier[old_code] = literal[string]
keyword[if] identifier[value] != identifier[old_code] :
identifier[self] . identifier[grid] . identifier[actions] . identifier[set_code] ( identifier[key] , identifier[value] )
|
def SetValue(self, row, col, value, refresh=True):
"""Set the value of a cell, merge line breaks"""
# Join code that has been split because of long line issue
value = ''.join(value.split('\n'))
key = (row, col, self.grid.current_table)
old_code = self.grid.code_array(key)
if old_code is None:
old_code = '' # depends on [control=['if'], data=['old_code']]
if value != old_code:
self.grid.actions.set_code(key, value) # depends on [control=['if'], data=['value']]
|
def pkg_data_filename(resource_name, filename=None):
"""Returns the path of a file installed along the package
"""
resource_filename = pkg_resources.resource_filename(
tripleohelper.__name__,
resource_name
)
if filename is not None:
resource_filename = os.path.join(resource_filename, filename)
return resource_filename
|
def function[pkg_data_filename, parameter[resource_name, filename]]:
constant[Returns the path of a file installed along the package
]
variable[resource_filename] assign[=] call[name[pkg_resources].resource_filename, parameter[name[tripleohelper].__name__, name[resource_name]]]
if compare[name[filename] is_not constant[None]] begin[:]
variable[resource_filename] assign[=] call[name[os].path.join, parameter[name[resource_filename], name[filename]]]
return[name[resource_filename]]
|
keyword[def] identifier[pkg_data_filename] ( identifier[resource_name] , identifier[filename] = keyword[None] ):
literal[string]
identifier[resource_filename] = identifier[pkg_resources] . identifier[resource_filename] (
identifier[tripleohelper] . identifier[__name__] ,
identifier[resource_name]
)
keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] :
identifier[resource_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[resource_filename] , identifier[filename] )
keyword[return] identifier[resource_filename]
|
def pkg_data_filename(resource_name, filename=None):
"""Returns the path of a file installed along the package
"""
resource_filename = pkg_resources.resource_filename(tripleohelper.__name__, resource_name)
if filename is not None:
resource_filename = os.path.join(resource_filename, filename) # depends on [control=['if'], data=['filename']]
return resource_filename
|
def search_modeltypes(self, *models: ModelTypesArg,
name: str = 'modeltypes') -> 'Selection':
"""Return a |Selection| object containing only the elements
currently handling models of the given types.
>>> from hydpy.core.examples import prepare_full_example_2
>>> hp, pub, _ = prepare_full_example_2()
You can pass both |Model| objects and names and, as a keyword
argument, the name of the newly created |Selection| object:
>>> test = pub.selections.complete.copy('test')
>>> from hydpy import prepare_model
>>> hland_v1 = prepare_model('hland_v1')
>>> test.search_modeltypes(hland_v1)
Selection("modeltypes",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3"))
>>> test.search_modeltypes(
... hland_v1, 'hstream_v1', 'lland_v1', name='MODELTYPES')
Selection("MODELTYPES",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3", "stream_dill_lahn_2",
"stream_lahn_1_lahn_2", "stream_lahn_2_lahn_3"))
Wrong model specifications result in errors like the following:
>>> test.search_modeltypes('wrong')
Traceback (most recent call last):
...
ModuleNotFoundError: While trying to determine the elements of \
selection `test` handling the model defined by the argument(s) `wrong` \
of type(s) `str`, the following error occurred: \
No module named 'hydpy.models.wrong'
Method |Selection.select_modeltypes| restricts the current selection to
the one determined with the method the |Selection.search_modeltypes|:
>>> test.select_modeltypes(hland_v1)
Selection("test",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3"))
On the contrary, the method |Selection.deselect_upstream| restricts
the current selection to all devices not determined by method the
|Selection.search_upstream|:
>>> pub.selections.complete.deselect_modeltypes(hland_v1)
Selection("complete",
nodes=(),
elements=("stream_dill_lahn_2", "stream_lahn_1_lahn_2",
"stream_lahn_2_lahn_3"))
"""
try:
typelist = []
for model in models:
if not isinstance(model, modeltools.Model):
model = importtools.prepare_model(model)
typelist.append(type(model))
typetuple = tuple(typelist)
selection = Selection(name)
for element in self.elements:
if isinstance(element.model, typetuple):
selection.elements += element
return selection
except BaseException:
values = objecttools.enumeration(models)
classes = objecttools.enumeration(
objecttools.classname(model) for model in models)
objecttools.augment_excmessage(
f'While trying to determine the elements of selection '
f'`{self.name}` handling the model defined by the '
f'argument(s) `{values}` of type(s) `{classes}`')
|
def function[search_modeltypes, parameter[self]]:
constant[Return a |Selection| object containing only the elements
currently handling models of the given types.
>>> from hydpy.core.examples import prepare_full_example_2
>>> hp, pub, _ = prepare_full_example_2()
You can pass both |Model| objects and names and, as a keyword
argument, the name of the newly created |Selection| object:
>>> test = pub.selections.complete.copy('test')
>>> from hydpy import prepare_model
>>> hland_v1 = prepare_model('hland_v1')
>>> test.search_modeltypes(hland_v1)
Selection("modeltypes",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3"))
>>> test.search_modeltypes(
... hland_v1, 'hstream_v1', 'lland_v1', name='MODELTYPES')
Selection("MODELTYPES",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3", "stream_dill_lahn_2",
"stream_lahn_1_lahn_2", "stream_lahn_2_lahn_3"))
Wrong model specifications result in errors like the following:
>>> test.search_modeltypes('wrong')
Traceback (most recent call last):
...
ModuleNotFoundError: While trying to determine the elements of selection `test` handling the model defined by the argument(s) `wrong` of type(s) `str`, the following error occurred: No module named 'hydpy.models.wrong'
Method |Selection.select_modeltypes| restricts the current selection to
the one determined with the method the |Selection.search_modeltypes|:
>>> test.select_modeltypes(hland_v1)
Selection("test",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3"))
On the contrary, the method |Selection.deselect_upstream| restricts
the current selection to all devices not determined by method the
|Selection.search_upstream|:
>>> pub.selections.complete.deselect_modeltypes(hland_v1)
Selection("complete",
nodes=(),
elements=("stream_dill_lahn_2", "stream_lahn_1_lahn_2",
"stream_lahn_2_lahn_3"))
]
<ast.Try object at 0x7da2044c06d0>
|
keyword[def] identifier[search_modeltypes] ( identifier[self] ,* identifier[models] : identifier[ModelTypesArg] ,
identifier[name] : identifier[str] = literal[string] )-> literal[string] :
literal[string]
keyword[try] :
identifier[typelist] =[]
keyword[for] identifier[model] keyword[in] identifier[models] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[model] , identifier[modeltools] . identifier[Model] ):
identifier[model] = identifier[importtools] . identifier[prepare_model] ( identifier[model] )
identifier[typelist] . identifier[append] ( identifier[type] ( identifier[model] ))
identifier[typetuple] = identifier[tuple] ( identifier[typelist] )
identifier[selection] = identifier[Selection] ( identifier[name] )
keyword[for] identifier[element] keyword[in] identifier[self] . identifier[elements] :
keyword[if] identifier[isinstance] ( identifier[element] . identifier[model] , identifier[typetuple] ):
identifier[selection] . identifier[elements] += identifier[element]
keyword[return] identifier[selection]
keyword[except] identifier[BaseException] :
identifier[values] = identifier[objecttools] . identifier[enumeration] ( identifier[models] )
identifier[classes] = identifier[objecttools] . identifier[enumeration] (
identifier[objecttools] . identifier[classname] ( identifier[model] ) keyword[for] identifier[model] keyword[in] identifier[models] )
identifier[objecttools] . identifier[augment_excmessage] (
literal[string]
literal[string]
literal[string] )
|
def search_modeltypes(self, *models: ModelTypesArg, name: str='modeltypes') -> 'Selection':
"""Return a |Selection| object containing only the elements
currently handling models of the given types.
>>> from hydpy.core.examples import prepare_full_example_2
>>> hp, pub, _ = prepare_full_example_2()
You can pass both |Model| objects and names and, as a keyword
argument, the name of the newly created |Selection| object:
>>> test = pub.selections.complete.copy('test')
>>> from hydpy import prepare_model
>>> hland_v1 = prepare_model('hland_v1')
>>> test.search_modeltypes(hland_v1)
Selection("modeltypes",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3"))
>>> test.search_modeltypes(
... hland_v1, 'hstream_v1', 'lland_v1', name='MODELTYPES')
Selection("MODELTYPES",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3", "stream_dill_lahn_2",
"stream_lahn_1_lahn_2", "stream_lahn_2_lahn_3"))
Wrong model specifications result in errors like the following:
>>> test.search_modeltypes('wrong')
Traceback (most recent call last):
...
ModuleNotFoundError: While trying to determine the elements of selection `test` handling the model defined by the argument(s) `wrong` of type(s) `str`, the following error occurred: No module named 'hydpy.models.wrong'
Method |Selection.select_modeltypes| restricts the current selection to
the one determined with the method the |Selection.search_modeltypes|:
>>> test.select_modeltypes(hland_v1)
Selection("test",
nodes=(),
elements=("land_dill", "land_lahn_1", "land_lahn_2",
"land_lahn_3"))
On the contrary, the method |Selection.deselect_upstream| restricts
the current selection to all devices not determined by method the
|Selection.search_upstream|:
>>> pub.selections.complete.deselect_modeltypes(hland_v1)
Selection("complete",
nodes=(),
elements=("stream_dill_lahn_2", "stream_lahn_1_lahn_2",
"stream_lahn_2_lahn_3"))
"""
try:
typelist = []
for model in models:
if not isinstance(model, modeltools.Model):
model = importtools.prepare_model(model) # depends on [control=['if'], data=[]]
typelist.append(type(model)) # depends on [control=['for'], data=['model']]
typetuple = tuple(typelist)
selection = Selection(name)
for element in self.elements:
if isinstance(element.model, typetuple):
selection.elements += element # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element']]
return selection # depends on [control=['try'], data=[]]
except BaseException:
values = objecttools.enumeration(models)
classes = objecttools.enumeration((objecttools.classname(model) for model in models))
objecttools.augment_excmessage(f'While trying to determine the elements of selection `{self.name}` handling the model defined by the argument(s) `{values}` of type(s) `{classes}`') # depends on [control=['except'], data=[]]
|
def find_and_apply_best_mask(matrix, version, is_micro, proposed_mask=None):
"""\
Applies all mask patterns against the provided QR Code matrix and returns
the best matrix and best pattern.
ISO/IEC 18004:2015(E) -- 7.8.2 Data mask patterns (page 50)
ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results (page 53)
ISO/IEC 18004:2015(E) -- 7.8.3.1 Evaluation of QR Code symbols (page 53/54)
ISO/IEC 18004:2015(E) -- 7.8.3.2 Evaluation of Micro QR Code symbols (page 54/55)
:param matrix: A matrix.
:param int version: A version (Micro) QR Code version constant.
:param bool is_micro: Indicates if the matrix represents a Micro QR Code
:param proposed_mask: Optional int to indicate the preferred mask.
:rtype: tuple
:return: A tuple of the best matrix and best data mask pattern index.
"""
matrix_size = len(matrix)
# ISO/IEC 18004:2015 -- 7.8.3.1 Evaluation of QR Code symbols (page 53/54)
# The data mask pattern which results in the lowest penalty score shall
# be selected for the symbol.
is_better = lt
best_score = _MAX_PENALTY_SCORE
eval_mask = evaluate_mask
if is_micro:
# ISO/IEC 18004:2015(E) - 7.8.3.2 Evaluation of Micro QR Code symbols (page 54/55)
# The data mask pattern which results in the highest score shall be
# selected for the symbol.
is_better = gt
best_score = -1
eval_mask = evaluate_micro_mask
# Matrix to check if a module belongs to the encoding region
# or function patterns
function_matrix = make_matrix(version)
add_finder_patterns(function_matrix, is_micro)
add_alignment_patterns(function_matrix, version)
if not is_micro:
function_matrix[-8][8] = 0x1
def is_encoding_region(i, j):
return function_matrix[i][j] == 0x2
mask_patterns = get_data_mask_functions(is_micro)
# If the user supplied a mask pattern, the evaluation step is skipped
if proposed_mask is not None:
apply_mask(matrix, mask_patterns[proposed_mask], matrix_size,
is_encoding_region)
return proposed_mask
for mask_number, mask_pattern in enumerate(mask_patterns):
apply_mask(matrix, mask_pattern, matrix_size, is_encoding_region)
# NOTE: DO NOT add format / version info in advance of evaluation
# See ISO/IEC 18004:2015(E) -- 7.8. Data masking (page 50)
score = eval_mask(matrix, matrix_size)
if is_better(score, best_score):
best_score = score
best_pattern = mask_number
# Undo mask
apply_mask(matrix, mask_pattern, matrix_size, is_encoding_region)
apply_mask(matrix, mask_patterns[best_pattern], matrix_size, is_encoding_region)
return best_pattern
|
def function[find_and_apply_best_mask, parameter[matrix, version, is_micro, proposed_mask]]:
constant[ Applies all mask patterns against the provided QR Code matrix and returns
the best matrix and best pattern.
ISO/IEC 18004:2015(E) -- 7.8.2 Data mask patterns (page 50)
ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results (page 53)
ISO/IEC 18004:2015(E) -- 7.8.3.1 Evaluation of QR Code symbols (page 53/54)
ISO/IEC 18004:2015(E) -- 7.8.3.2 Evaluation of Micro QR Code symbols (page 54/55)
:param matrix: A matrix.
:param int version: A version (Micro) QR Code version constant.
:param bool is_micro: Indicates if the matrix represents a Micro QR Code
:param proposed_mask: Optional int to indicate the preferred mask.
:rtype: tuple
:return: A tuple of the best matrix and best data mask pattern index.
]
variable[matrix_size] assign[=] call[name[len], parameter[name[matrix]]]
variable[is_better] assign[=] name[lt]
variable[best_score] assign[=] name[_MAX_PENALTY_SCORE]
variable[eval_mask] assign[=] name[evaluate_mask]
if name[is_micro] begin[:]
variable[is_better] assign[=] name[gt]
variable[best_score] assign[=] <ast.UnaryOp object at 0x7da18bccabc0>
variable[eval_mask] assign[=] name[evaluate_micro_mask]
variable[function_matrix] assign[=] call[name[make_matrix], parameter[name[version]]]
call[name[add_finder_patterns], parameter[name[function_matrix], name[is_micro]]]
call[name[add_alignment_patterns], parameter[name[function_matrix], name[version]]]
if <ast.UnaryOp object at 0x7da18bcc9390> begin[:]
call[call[name[function_matrix]][<ast.UnaryOp object at 0x7da1b0cb1de0>]][constant[8]] assign[=] constant[1]
def function[is_encoding_region, parameter[i, j]]:
return[compare[call[call[name[function_matrix]][name[i]]][name[j]] equal[==] constant[2]]]
variable[mask_patterns] assign[=] call[name[get_data_mask_functions], parameter[name[is_micro]]]
if compare[name[proposed_mask] is_not constant[None]] begin[:]
call[name[apply_mask], parameter[name[matrix], call[name[mask_patterns]][name[proposed_mask]], name[matrix_size], name[is_encoding_region]]]
return[name[proposed_mask]]
for taget[tuple[[<ast.Name object at 0x7da1b0cb0610>, <ast.Name object at 0x7da1b0cb34c0>]]] in starred[call[name[enumerate], parameter[name[mask_patterns]]]] begin[:]
call[name[apply_mask], parameter[name[matrix], name[mask_pattern], name[matrix_size], name[is_encoding_region]]]
variable[score] assign[=] call[name[eval_mask], parameter[name[matrix], name[matrix_size]]]
if call[name[is_better], parameter[name[score], name[best_score]]] begin[:]
variable[best_score] assign[=] name[score]
variable[best_pattern] assign[=] name[mask_number]
call[name[apply_mask], parameter[name[matrix], name[mask_pattern], name[matrix_size], name[is_encoding_region]]]
call[name[apply_mask], parameter[name[matrix], call[name[mask_patterns]][name[best_pattern]], name[matrix_size], name[is_encoding_region]]]
return[name[best_pattern]]
|
keyword[def] identifier[find_and_apply_best_mask] ( identifier[matrix] , identifier[version] , identifier[is_micro] , identifier[proposed_mask] = keyword[None] ):
literal[string]
identifier[matrix_size] = identifier[len] ( identifier[matrix] )
identifier[is_better] = identifier[lt]
identifier[best_score] = identifier[_MAX_PENALTY_SCORE]
identifier[eval_mask] = identifier[evaluate_mask]
keyword[if] identifier[is_micro] :
identifier[is_better] = identifier[gt]
identifier[best_score] =- literal[int]
identifier[eval_mask] = identifier[evaluate_micro_mask]
identifier[function_matrix] = identifier[make_matrix] ( identifier[version] )
identifier[add_finder_patterns] ( identifier[function_matrix] , identifier[is_micro] )
identifier[add_alignment_patterns] ( identifier[function_matrix] , identifier[version] )
keyword[if] keyword[not] identifier[is_micro] :
identifier[function_matrix] [- literal[int] ][ literal[int] ]= literal[int]
keyword[def] identifier[is_encoding_region] ( identifier[i] , identifier[j] ):
keyword[return] identifier[function_matrix] [ identifier[i] ][ identifier[j] ]== literal[int]
identifier[mask_patterns] = identifier[get_data_mask_functions] ( identifier[is_micro] )
keyword[if] identifier[proposed_mask] keyword[is] keyword[not] keyword[None] :
identifier[apply_mask] ( identifier[matrix] , identifier[mask_patterns] [ identifier[proposed_mask] ], identifier[matrix_size] ,
identifier[is_encoding_region] )
keyword[return] identifier[proposed_mask]
keyword[for] identifier[mask_number] , identifier[mask_pattern] keyword[in] identifier[enumerate] ( identifier[mask_patterns] ):
identifier[apply_mask] ( identifier[matrix] , identifier[mask_pattern] , identifier[matrix_size] , identifier[is_encoding_region] )
identifier[score] = identifier[eval_mask] ( identifier[matrix] , identifier[matrix_size] )
keyword[if] identifier[is_better] ( identifier[score] , identifier[best_score] ):
identifier[best_score] = identifier[score]
identifier[best_pattern] = identifier[mask_number]
identifier[apply_mask] ( identifier[matrix] , identifier[mask_pattern] , identifier[matrix_size] , identifier[is_encoding_region] )
identifier[apply_mask] ( identifier[matrix] , identifier[mask_patterns] [ identifier[best_pattern] ], identifier[matrix_size] , identifier[is_encoding_region] )
keyword[return] identifier[best_pattern]
|
def find_and_apply_best_mask(matrix, version, is_micro, proposed_mask=None):
""" Applies all mask patterns against the provided QR Code matrix and returns
the best matrix and best pattern.
ISO/IEC 18004:2015(E) -- 7.8.2 Data mask patterns (page 50)
ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results (page 53)
ISO/IEC 18004:2015(E) -- 7.8.3.1 Evaluation of QR Code symbols (page 53/54)
ISO/IEC 18004:2015(E) -- 7.8.3.2 Evaluation of Micro QR Code symbols (page 54/55)
:param matrix: A matrix.
:param int version: A version (Micro) QR Code version constant.
:param bool is_micro: Indicates if the matrix represents a Micro QR Code
:param proposed_mask: Optional int to indicate the preferred mask.
:rtype: tuple
:return: A tuple of the best matrix and best data mask pattern index.
"""
matrix_size = len(matrix)
# ISO/IEC 18004:2015 -- 7.8.3.1 Evaluation of QR Code symbols (page 53/54)
# The data mask pattern which results in the lowest penalty score shall
# be selected for the symbol.
is_better = lt
best_score = _MAX_PENALTY_SCORE
eval_mask = evaluate_mask
if is_micro:
# ISO/IEC 18004:2015(E) - 7.8.3.2 Evaluation of Micro QR Code symbols (page 54/55)
# The data mask pattern which results in the highest score shall be
# selected for the symbol.
is_better = gt
best_score = -1
eval_mask = evaluate_micro_mask # depends on [control=['if'], data=[]]
# Matrix to check if a module belongs to the encoding region
# or function patterns
function_matrix = make_matrix(version)
add_finder_patterns(function_matrix, is_micro)
add_alignment_patterns(function_matrix, version)
if not is_micro:
function_matrix[-8][8] = 1 # depends on [control=['if'], data=[]]
def is_encoding_region(i, j):
return function_matrix[i][j] == 2
mask_patterns = get_data_mask_functions(is_micro)
# If the user supplied a mask pattern, the evaluation step is skipped
if proposed_mask is not None:
apply_mask(matrix, mask_patterns[proposed_mask], matrix_size, is_encoding_region)
return proposed_mask # depends on [control=['if'], data=['proposed_mask']]
for (mask_number, mask_pattern) in enumerate(mask_patterns):
apply_mask(matrix, mask_pattern, matrix_size, is_encoding_region)
# NOTE: DO NOT add format / version info in advance of evaluation
# See ISO/IEC 18004:2015(E) -- 7.8. Data masking (page 50)
score = eval_mask(matrix, matrix_size)
if is_better(score, best_score):
best_score = score
best_pattern = mask_number # depends on [control=['if'], data=[]]
# Undo mask
apply_mask(matrix, mask_pattern, matrix_size, is_encoding_region) # depends on [control=['for'], data=[]]
apply_mask(matrix, mask_patterns[best_pattern], matrix_size, is_encoding_region)
return best_pattern
|
def stop(self):
""" Stop logging with this logger.
"""
if not self.active:
return
self.removeHandler(self.handlers[-1])
self.active = False
return
|
def function[stop, parameter[self]]:
constant[ Stop logging with this logger.
]
if <ast.UnaryOp object at 0x7da1b1435ba0> begin[:]
return[None]
call[name[self].removeHandler, parameter[call[name[self].handlers][<ast.UnaryOp object at 0x7da1b1437b80>]]]
name[self].active assign[=] constant[False]
return[None]
|
keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[active] :
keyword[return]
identifier[self] . identifier[removeHandler] ( identifier[self] . identifier[handlers] [- literal[int] ])
identifier[self] . identifier[active] = keyword[False]
keyword[return]
|
def stop(self):
""" Stop logging with this logger.
"""
if not self.active:
return # depends on [control=['if'], data=[]]
self.removeHandler(self.handlers[-1])
self.active = False
return
|
def plot_contour_matrix(arrays,
fields,
filename,
weights=None,
sample_names=None,
sample_lines=None,
sample_colors=None,
color_map=None,
num_bins=20,
num_contours=3,
cell_width=2,
cell_height=2,
cell_margin_x=0.05,
cell_margin_y=0.05,
dpi=100,
padding=0,
animate_field=None,
animate_steps=10,
animate_delay=20,
animate_loop=0):
"""
Create a matrix of contour plots showing all possible 2D projections of a
multivariate dataset. You may optionally animate the contours as a cut on
one of the fields is increased. ImageMagick must be installed to produce
animations.
Parameters
----------
arrays : list of arrays of shape [n_samples, n_fields]
A list of 2D NumPy arrays for each sample. All arrays must have the
same number of columns.
fields : list of strings
A list of the field names.
filename : string
The output filename. If animatation is enabled
``animate_field is not None`` then ``filename`` must have the .gif
extension.
weights : list of arrays, optional (default=None)
List of 1D NumPy arrays of sample weights corresponding to the arrays
in ``arrays``.
sample_names : list of strings, optional (default=None)
A list of the sample names for the legend. If None, then no legend will
be shown.
sample_lines : list of strings, optional (default=None)
A list of matplotlib line styles for each sample. If None then line
styles will cycle through 'dashed', 'solid', 'dashdot', and 'dotted'.
Elements of this list may also be a list of line styles which will be
cycled through for the contour lines of the corresponding sample.
sample_colors : list of matplotlib colors, optional (default=None)
The color of the contours for each sample. If None, then colors will be
selected according to regular intervals along the ``color_map``.
color_map : a matplotlib color map, optional (default=None)
If ``sample_colors is None`` then select colors according to regular
intervals along this matplotlib color map. If ``color_map`` is None,
then the spectral color map is used.
num_bins : int, optional (default=20)
The number of bins along both axes of the 2D histograms.
num_contours : int, optional (default=3)
The number of contour line to show for each sample.
cell_width : float, optional (default=2)
The width, in inches, of each subplot in the matrix.
cell_height : float, optional (default=2)
The height, in inches, of each subplot in the matrix.
cell_margin_x : float, optional (default=0.05)
The horizontal margin between adjacent subplots, as a fraction
of the subplot size.
cell_margin_y : float, optional (default=0.05)
The vertical margin between adjacent subplots, as a fraction
of the subplot size.
dpi : int, optional (default=100)
The number of pixels per inch.
padding : float, optional (default=0)
The padding, as a fraction of the range of the value along each axes to
guarantee around each sample's contour plot.
animate_field : string, optional (default=None)
The field to animate a cut along. By default no animation is produced.
If ``animate_field is not None`` then ``filename`` must end in the .gif
extension and an animated GIF is produced.
animate_steps : int, optional (default=10)
The number of frames in the animation, corresponding to the number of
regularly spaced cut values to show along the range of the
``animate_field``.
animate_delay : int, optional (default=20)
The duration that each frame is shown in the animation as a multiple of
1 / 100 of a second.
animate_loop : int, optional (default=0)
The number of times to loop the animation. If zero, then loop forever.
Notes
-----
NumPy and matplotlib are required
"""
import numpy as np
from .. import root2matplotlib as r2m
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import cm
from matplotlib.lines import Line2D
# we must have at least two fields (columns)
num_fields = len(fields)
if num_fields < 2:
raise ValueError(
"record arrays must have at least two fields")
# check that all arrays have the same number of columns
for array in arrays:
if array.shape[1] != num_fields:
raise ValueError(
"number of array columns does not match number of fields")
if sample_colors is None:
if color_map is None:
color_map = cm.spectral
steps = np.linspace(0, 1, len(arrays) + 2)[1:-1]
sample_colors = [color_map(s) for s in steps]
# determine range of each field
low = np.vstack([a.min(axis=0) for a in arrays]).min(axis=0)
high = np.vstack([a.max(axis=0) for a in arrays]).max(axis=0)
width = np.abs(high - low)
width *= padding
low -= width
high += width
def single_frame(arrays, filename, label=None):
# create the canvas and divide into matrix
fig, axes = plt.subplots(
nrows=num_fields,
ncols=num_fields,
figsize=(cell_width * num_fields, cell_height * num_fields))
fig.subplots_adjust(hspace=cell_margin_y, wspace=cell_margin_x)
for ax in axes.flat:
# only show the left and bottom axes ticks and labels
if ax.is_last_row() and not ax.is_last_col():
ax.xaxis.set_visible(True)
ax.xaxis.set_ticks_position('bottom')
ax.xaxis.set_major_locator(MaxNLocator(4, prune='both'))
for tick in ax.xaxis.get_major_ticks():
tick.label.set_rotation('vertical')
else:
ax.xaxis.set_visible(False)
if ax.is_first_col() and not ax.is_first_row():
ax.yaxis.set_visible(True)
ax.yaxis.set_ticks_position('left')
ax.yaxis.set_major_locator(MaxNLocator(4, prune='both'))
else:
ax.yaxis.set_visible(False)
# turn off axes frames in upper triangular matrix
for ix, iy in zip(*np.triu_indices_from(axes, k=0)):
axes[ix, iy].axis('off')
levels = np.linspace(0, 1, num_contours + 2)[1:-1]
# plot the data
for iy, ix in zip(*np.tril_indices_from(axes, k=-1)):
ymin = float(low[iy])
ymax = float(high[iy])
xmin = float(low[ix])
xmax = float(high[ix])
for isample, a in enumerate(arrays):
hist = Hist2D(
num_bins, xmin, xmax,
num_bins, ymin, ymax)
if weights is not None:
hist.fill_array(a[:, [ix, iy]], weights[isample])
else:
hist.fill_array(a[:, [ix, iy]])
# normalize so maximum is 1.0
_max = hist.GetMaximum()
if _max != 0:
hist /= _max
r2m.contour(hist,
axes=axes[iy, ix],
levels=levels,
linestyles=sample_lines[isample] if sample_lines else LINES,
colors=sample_colors[isample])
# label the diagonal subplots
for i, field in enumerate(fields):
axes[i, i].annotate(field,
(0.1, 0.2),
rotation=45,
xycoords='axes fraction',
ha='left', va='center')
# make proxy artists for legend
lines = []
for color in sample_colors:
lines.append(Line2D([0, 0], [0, 0], color=color))
if sample_names is not None:
# draw the legend
leg = fig.legend(lines, sample_names, loc=(0.65, 0.8))
leg.set_frame_on(False)
if label is not None:
axes[0, 0].annotate(label, (0, 1),
ha='left', va='top',
xycoords='axes fraction')
fig.savefig(filename, bbox_inches='tight', dpi=dpi)
plt.close(fig)
if animate_field is not None:
_, ext = os.path.splitext(filename)
if ext != '.gif':
raise ValueError(
"animation is only supported for .gif files")
field_idx = fields.index(animate_field)
cuts = np.linspace(
low[field_idx],
high[field_idx],
animate_steps + 1)[:-1]
gif = GIF()
temp_dir = tempfile.mkdtemp()
for i, cut in enumerate(cuts):
frame_filename = os.path.join(temp_dir, 'frame_{0:d}.png'.format(i))
label = '{0} > {1:.2f}'.format(animate_field, cut)
log.info("creating frame for {0} ...".format(label))
new_arrays = []
for array in arrays:
new_arrays.append(array[array[:, field_idx] > cut])
single_frame(new_arrays,
filename=frame_filename,
label=label)
gif.add_frame(frame_filename)
gif.write(filename, delay=animate_delay, loop=animate_loop)
shutil.rmtree(temp_dir)
else:
single_frame(arrays, filename=filename)
|
def function[plot_contour_matrix, parameter[arrays, fields, filename, weights, sample_names, sample_lines, sample_colors, color_map, num_bins, num_contours, cell_width, cell_height, cell_margin_x, cell_margin_y, dpi, padding, animate_field, animate_steps, animate_delay, animate_loop]]:
constant[
Create a matrix of contour plots showing all possible 2D projections of a
multivariate dataset. You may optionally animate the contours as a cut on
one of the fields is increased. ImageMagick must be installed to produce
animations.
Parameters
----------
arrays : list of arrays of shape [n_samples, n_fields]
A list of 2D NumPy arrays for each sample. All arrays must have the
same number of columns.
fields : list of strings
A list of the field names.
filename : string
The output filename. If animatation is enabled
``animate_field is not None`` then ``filename`` must have the .gif
extension.
weights : list of arrays, optional (default=None)
List of 1D NumPy arrays of sample weights corresponding to the arrays
in ``arrays``.
sample_names : list of strings, optional (default=None)
A list of the sample names for the legend. If None, then no legend will
be shown.
sample_lines : list of strings, optional (default=None)
A list of matplotlib line styles for each sample. If None then line
styles will cycle through 'dashed', 'solid', 'dashdot', and 'dotted'.
Elements of this list may also be a list of line styles which will be
cycled through for the contour lines of the corresponding sample.
sample_colors : list of matplotlib colors, optional (default=None)
The color of the contours for each sample. If None, then colors will be
selected according to regular intervals along the ``color_map``.
color_map : a matplotlib color map, optional (default=None)
If ``sample_colors is None`` then select colors according to regular
intervals along this matplotlib color map. If ``color_map`` is None,
then the spectral color map is used.
num_bins : int, optional (default=20)
The number of bins along both axes of the 2D histograms.
num_contours : int, optional (default=3)
The number of contour line to show for each sample.
cell_width : float, optional (default=2)
The width, in inches, of each subplot in the matrix.
cell_height : float, optional (default=2)
The height, in inches, of each subplot in the matrix.
cell_margin_x : float, optional (default=0.05)
The horizontal margin between adjacent subplots, as a fraction
of the subplot size.
cell_margin_y : float, optional (default=0.05)
The vertical margin between adjacent subplots, as a fraction
of the subplot size.
dpi : int, optional (default=100)
The number of pixels per inch.
padding : float, optional (default=0)
The padding, as a fraction of the range of the value along each axes to
guarantee around each sample's contour plot.
animate_field : string, optional (default=None)
The field to animate a cut along. By default no animation is produced.
If ``animate_field is not None`` then ``filename`` must end in the .gif
extension and an animated GIF is produced.
animate_steps : int, optional (default=10)
The number of frames in the animation, corresponding to the number of
regularly spaced cut values to show along the range of the
``animate_field``.
animate_delay : int, optional (default=20)
The duration that each frame is shown in the animation as a multiple of
1 / 100 of a second.
animate_loop : int, optional (default=0)
The number of times to loop the animation. If zero, then loop forever.
Notes
-----
NumPy and matplotlib are required
]
import module[numpy] as alias[np]
from relative_module[None] import module[root2matplotlib]
import module[matplotlib.pyplot] as alias[plt]
from relative_module[matplotlib.ticker] import module[MaxNLocator]
from relative_module[matplotlib] import module[cm]
from relative_module[matplotlib.lines] import module[Line2D]
variable[num_fields] assign[=] call[name[len], parameter[name[fields]]]
if compare[name[num_fields] less[<] constant[2]] begin[:]
<ast.Raise object at 0x7da1b101b370>
for taget[name[array]] in starred[name[arrays]] begin[:]
if compare[call[name[array].shape][constant[1]] not_equal[!=] name[num_fields]] begin[:]
<ast.Raise object at 0x7da1b101b0d0>
if compare[name[sample_colors] is constant[None]] begin[:]
if compare[name[color_map] is constant[None]] begin[:]
variable[color_map] assign[=] name[cm].spectral
variable[steps] assign[=] call[call[name[np].linspace, parameter[constant[0], constant[1], binary_operation[call[name[len], parameter[name[arrays]]] + constant[2]]]]][<ast.Slice object at 0x7da1b101ab60>]
variable[sample_colors] assign[=] <ast.ListComp object at 0x7da1b101aa40>
variable[low] assign[=] call[call[name[np].vstack, parameter[<ast.ListComp object at 0x7da1b101a770>]].min, parameter[]]
variable[high] assign[=] call[call[name[np].vstack, parameter[<ast.ListComp object at 0x7da1b101a3e0>]].max, parameter[]]
variable[width] assign[=] call[name[np].abs, parameter[binary_operation[name[high] - name[low]]]]
<ast.AugAssign object at 0x7da1b101a020>
<ast.AugAssign object at 0x7da1b1019f90>
<ast.AugAssign object at 0x7da1b1019f00>
def function[single_frame, parameter[arrays, filename, label]]:
<ast.Tuple object at 0x7da1b1019d20> assign[=] call[name[plt].subplots, parameter[]]
call[name[fig].subplots_adjust, parameter[]]
for taget[name[ax]] in starred[name[axes].flat] begin[:]
if <ast.BoolOp object at 0x7da1b1019720> begin[:]
call[name[ax].xaxis.set_visible, parameter[constant[True]]]
call[name[ax].xaxis.set_ticks_position, parameter[constant[bottom]]]
call[name[ax].xaxis.set_major_locator, parameter[call[name[MaxNLocator], parameter[constant[4]]]]]
for taget[name[tick]] in starred[call[name[ax].xaxis.get_major_ticks, parameter[]]] begin[:]
call[name[tick].label.set_rotation, parameter[constant[vertical]]]
if <ast.BoolOp object at 0x7da1b1016830> begin[:]
call[name[ax].yaxis.set_visible, parameter[constant[True]]]
call[name[ax].yaxis.set_ticks_position, parameter[constant[left]]]
call[name[ax].yaxis.set_major_locator, parameter[call[name[MaxNLocator], parameter[constant[4]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1016050>, <ast.Name object at 0x7da1b1016020>]]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da1b1015f90>]]] begin[:]
call[call[name[axes]][tuple[[<ast.Name object at 0x7da1b1015d20>, <ast.Name object at 0x7da1b1015cf0>]]].axis, parameter[constant[off]]]
variable[levels] assign[=] call[call[name[np].linspace, parameter[constant[0], constant[1], binary_operation[name[num_contours] + constant[2]]]]][<ast.Slice object at 0x7da1b1015a50>]
for taget[tuple[[<ast.Name object at 0x7da1b1015930>, <ast.Name object at 0x7da1b1015900>]]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da1b1015870>]]] begin[:]
variable[ymin] assign[=] call[name[float], parameter[call[name[low]][name[iy]]]]
variable[ymax] assign[=] call[name[float], parameter[call[name[high]][name[iy]]]]
variable[xmin] assign[=] call[name[float], parameter[call[name[low]][name[ix]]]]
variable[xmax] assign[=] call[name[float], parameter[call[name[high]][name[ix]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1015120>, <ast.Name object at 0x7da1b10150f0>]]] in starred[call[name[enumerate], parameter[name[arrays]]]] begin[:]
variable[hist] assign[=] call[name[Hist2D], parameter[name[num_bins], name[xmin], name[xmax], name[num_bins], name[ymin], name[ymax]]]
if compare[name[weights] is_not constant[None]] begin[:]
call[name[hist].fill_array, parameter[call[name[a]][tuple[[<ast.Slice object at 0x7da1b1014be0>, <ast.List object at 0x7da1b1014bb0>]]], call[name[weights]][name[isample]]]]
variable[_max] assign[=] call[name[hist].GetMaximum, parameter[]]
if compare[name[_max] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b1014670>
call[name[r2m].contour, parameter[name[hist]]]
for taget[tuple[[<ast.Name object at 0x7da1b10140d0>, <ast.Name object at 0x7da1b10140a0>]]] in starred[call[name[enumerate], parameter[name[fields]]]] begin[:]
call[call[name[axes]][tuple[[<ast.Name object at 0x7da1b1013e80>, <ast.Name object at 0x7da1b1013e50>]]].annotate, parameter[name[field], tuple[[<ast.Constant object at 0x7da1b1013dc0>, <ast.Constant object at 0x7da1b1013d90>]]]]
variable[lines] assign[=] list[[]]
for taget[name[color]] in starred[name[sample_colors]] begin[:]
call[name[lines].append, parameter[call[name[Line2D], parameter[list[[<ast.Constant object at 0x7da1b1120fd0>, <ast.Constant object at 0x7da1b1120a90>]], list[[<ast.Constant object at 0x7da1b11209a0>, <ast.Constant object at 0x7da1b11204f0>]]]]]]
if compare[name[sample_names] is_not constant[None]] begin[:]
variable[leg] assign[=] call[name[fig].legend, parameter[name[lines], name[sample_names]]]
call[name[leg].set_frame_on, parameter[constant[False]]]
if compare[name[label] is_not constant[None]] begin[:]
call[call[name[axes]][tuple[[<ast.Constant object at 0x7da1b1120190>, <ast.Constant object at 0x7da1b1120700>]]].annotate, parameter[name[label], tuple[[<ast.Constant object at 0x7da1b1119960>, <ast.Constant object at 0x7da1b1118a30>]]]]
call[name[fig].savefig, parameter[name[filename]]]
call[name[plt].close, parameter[name[fig]]]
if compare[name[animate_field] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b111a3e0> assign[=] call[name[os].path.splitext, parameter[name[filename]]]
if compare[name[ext] not_equal[!=] constant[.gif]] begin[:]
<ast.Raise object at 0x7da1b111a350>
variable[field_idx] assign[=] call[name[fields].index, parameter[name[animate_field]]]
variable[cuts] assign[=] call[call[name[np].linspace, parameter[call[name[low]][name[field_idx]], call[name[high]][name[field_idx]], binary_operation[name[animate_steps] + constant[1]]]]][<ast.Slice object at 0x7da1b1118af0>]
variable[gif] assign[=] call[name[GIF], parameter[]]
variable[temp_dir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1119d20>, <ast.Name object at 0x7da1b1118760>]]] in starred[call[name[enumerate], parameter[name[cuts]]]] begin[:]
variable[frame_filename] assign[=] call[name[os].path.join, parameter[name[temp_dir], call[constant[frame_{0:d}.png].format, parameter[name[i]]]]]
variable[label] assign[=] call[constant[{0} > {1:.2f}].format, parameter[name[animate_field], name[cut]]]
call[name[log].info, parameter[call[constant[creating frame for {0} ...].format, parameter[name[label]]]]]
variable[new_arrays] assign[=] list[[]]
for taget[name[array]] in starred[name[arrays]] begin[:]
call[name[new_arrays].append, parameter[call[name[array]][compare[call[name[array]][tuple[[<ast.Slice object at 0x7da1b11185b0>, <ast.Name object at 0x7da1b1119ea0>]]] greater[>] name[cut]]]]]
call[name[single_frame], parameter[name[new_arrays]]]
call[name[gif].add_frame, parameter[name[frame_filename]]]
call[name[gif].write, parameter[name[filename]]]
call[name[shutil].rmtree, parameter[name[temp_dir]]]
|
keyword[def] identifier[plot_contour_matrix] ( identifier[arrays] ,
identifier[fields] ,
identifier[filename] ,
identifier[weights] = keyword[None] ,
identifier[sample_names] = keyword[None] ,
identifier[sample_lines] = keyword[None] ,
identifier[sample_colors] = keyword[None] ,
identifier[color_map] = keyword[None] ,
identifier[num_bins] = literal[int] ,
identifier[num_contours] = literal[int] ,
identifier[cell_width] = literal[int] ,
identifier[cell_height] = literal[int] ,
identifier[cell_margin_x] = literal[int] ,
identifier[cell_margin_y] = literal[int] ,
identifier[dpi] = literal[int] ,
identifier[padding] = literal[int] ,
identifier[animate_field] = keyword[None] ,
identifier[animate_steps] = literal[int] ,
identifier[animate_delay] = literal[int] ,
identifier[animate_loop] = literal[int] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[from] .. keyword[import] identifier[root2matplotlib] keyword[as] identifier[r2m]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[from] identifier[matplotlib] . identifier[ticker] keyword[import] identifier[MaxNLocator]
keyword[from] identifier[matplotlib] keyword[import] identifier[cm]
keyword[from] identifier[matplotlib] . identifier[lines] keyword[import] identifier[Line2D]
identifier[num_fields] = identifier[len] ( identifier[fields] )
keyword[if] identifier[num_fields] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[for] identifier[array] keyword[in] identifier[arrays] :
keyword[if] identifier[array] . identifier[shape] [ literal[int] ]!= identifier[num_fields] :
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] identifier[sample_colors] keyword[is] keyword[None] :
keyword[if] identifier[color_map] keyword[is] keyword[None] :
identifier[color_map] = identifier[cm] . identifier[spectral]
identifier[steps] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[arrays] )+ literal[int] )[ literal[int] :- literal[int] ]
identifier[sample_colors] =[ identifier[color_map] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[steps] ]
identifier[low] = identifier[np] . identifier[vstack] ([ identifier[a] . identifier[min] ( identifier[axis] = literal[int] ) keyword[for] identifier[a] keyword[in] identifier[arrays] ]). identifier[min] ( identifier[axis] = literal[int] )
identifier[high] = identifier[np] . identifier[vstack] ([ identifier[a] . identifier[max] ( identifier[axis] = literal[int] ) keyword[for] identifier[a] keyword[in] identifier[arrays] ]). identifier[max] ( identifier[axis] = literal[int] )
identifier[width] = identifier[np] . identifier[abs] ( identifier[high] - identifier[low] )
identifier[width] *= identifier[padding]
identifier[low] -= identifier[width]
identifier[high] += identifier[width]
keyword[def] identifier[single_frame] ( identifier[arrays] , identifier[filename] , identifier[label] = keyword[None] ):
identifier[fig] , identifier[axes] = identifier[plt] . identifier[subplots] (
identifier[nrows] = identifier[num_fields] ,
identifier[ncols] = identifier[num_fields] ,
identifier[figsize] =( identifier[cell_width] * identifier[num_fields] , identifier[cell_height] * identifier[num_fields] ))
identifier[fig] . identifier[subplots_adjust] ( identifier[hspace] = identifier[cell_margin_y] , identifier[wspace] = identifier[cell_margin_x] )
keyword[for] identifier[ax] keyword[in] identifier[axes] . identifier[flat] :
keyword[if] identifier[ax] . identifier[is_last_row] () keyword[and] keyword[not] identifier[ax] . identifier[is_last_col] ():
identifier[ax] . identifier[xaxis] . identifier[set_visible] ( keyword[True] )
identifier[ax] . identifier[xaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[xaxis] . identifier[set_major_locator] ( identifier[MaxNLocator] ( literal[int] , identifier[prune] = literal[string] ))
keyword[for] identifier[tick] keyword[in] identifier[ax] . identifier[xaxis] . identifier[get_major_ticks] ():
identifier[tick] . identifier[label] . identifier[set_rotation] ( literal[string] )
keyword[else] :
identifier[ax] . identifier[xaxis] . identifier[set_visible] ( keyword[False] )
keyword[if] identifier[ax] . identifier[is_first_col] () keyword[and] keyword[not] identifier[ax] . identifier[is_first_row] ():
identifier[ax] . identifier[yaxis] . identifier[set_visible] ( keyword[True] )
identifier[ax] . identifier[yaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[yaxis] . identifier[set_major_locator] ( identifier[MaxNLocator] ( literal[int] , identifier[prune] = literal[string] ))
keyword[else] :
identifier[ax] . identifier[yaxis] . identifier[set_visible] ( keyword[False] )
keyword[for] identifier[ix] , identifier[iy] keyword[in] identifier[zip] (* identifier[np] . identifier[triu_indices_from] ( identifier[axes] , identifier[k] = literal[int] )):
identifier[axes] [ identifier[ix] , identifier[iy] ]. identifier[axis] ( literal[string] )
identifier[levels] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[num_contours] + literal[int] )[ literal[int] :- literal[int] ]
keyword[for] identifier[iy] , identifier[ix] keyword[in] identifier[zip] (* identifier[np] . identifier[tril_indices_from] ( identifier[axes] , identifier[k] =- literal[int] )):
identifier[ymin] = identifier[float] ( identifier[low] [ identifier[iy] ])
identifier[ymax] = identifier[float] ( identifier[high] [ identifier[iy] ])
identifier[xmin] = identifier[float] ( identifier[low] [ identifier[ix] ])
identifier[xmax] = identifier[float] ( identifier[high] [ identifier[ix] ])
keyword[for] identifier[isample] , identifier[a] keyword[in] identifier[enumerate] ( identifier[arrays] ):
identifier[hist] = identifier[Hist2D] (
identifier[num_bins] , identifier[xmin] , identifier[xmax] ,
identifier[num_bins] , identifier[ymin] , identifier[ymax] )
keyword[if] identifier[weights] keyword[is] keyword[not] keyword[None] :
identifier[hist] . identifier[fill_array] ( identifier[a] [:,[ identifier[ix] , identifier[iy] ]], identifier[weights] [ identifier[isample] ])
keyword[else] :
identifier[hist] . identifier[fill_array] ( identifier[a] [:,[ identifier[ix] , identifier[iy] ]])
identifier[_max] = identifier[hist] . identifier[GetMaximum] ()
keyword[if] identifier[_max] != literal[int] :
identifier[hist] /= identifier[_max]
identifier[r2m] . identifier[contour] ( identifier[hist] ,
identifier[axes] = identifier[axes] [ identifier[iy] , identifier[ix] ],
identifier[levels] = identifier[levels] ,
identifier[linestyles] = identifier[sample_lines] [ identifier[isample] ] keyword[if] identifier[sample_lines] keyword[else] identifier[LINES] ,
identifier[colors] = identifier[sample_colors] [ identifier[isample] ])
keyword[for] identifier[i] , identifier[field] keyword[in] identifier[enumerate] ( identifier[fields] ):
identifier[axes] [ identifier[i] , identifier[i] ]. identifier[annotate] ( identifier[field] ,
( literal[int] , literal[int] ),
identifier[rotation] = literal[int] ,
identifier[xycoords] = literal[string] ,
identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[lines] =[]
keyword[for] identifier[color] keyword[in] identifier[sample_colors] :
identifier[lines] . identifier[append] ( identifier[Line2D] ([ literal[int] , literal[int] ],[ literal[int] , literal[int] ], identifier[color] = identifier[color] ))
keyword[if] identifier[sample_names] keyword[is] keyword[not] keyword[None] :
identifier[leg] = identifier[fig] . identifier[legend] ( identifier[lines] , identifier[sample_names] , identifier[loc] =( literal[int] , literal[int] ))
identifier[leg] . identifier[set_frame_on] ( keyword[False] )
keyword[if] identifier[label] keyword[is] keyword[not] keyword[None] :
identifier[axes] [ literal[int] , literal[int] ]. identifier[annotate] ( identifier[label] ,( literal[int] , literal[int] ),
identifier[ha] = literal[string] , identifier[va] = literal[string] ,
identifier[xycoords] = literal[string] )
identifier[fig] . identifier[savefig] ( identifier[filename] , identifier[bbox_inches] = literal[string] , identifier[dpi] = identifier[dpi] )
identifier[plt] . identifier[close] ( identifier[fig] )
keyword[if] identifier[animate_field] keyword[is] keyword[not] keyword[None] :
identifier[_] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] )
keyword[if] identifier[ext] != literal[string] :
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[field_idx] = identifier[fields] . identifier[index] ( identifier[animate_field] )
identifier[cuts] = identifier[np] . identifier[linspace] (
identifier[low] [ identifier[field_idx] ],
identifier[high] [ identifier[field_idx] ],
identifier[animate_steps] + literal[int] )[:- literal[int] ]
identifier[gif] = identifier[GIF] ()
identifier[temp_dir] = identifier[tempfile] . identifier[mkdtemp] ()
keyword[for] identifier[i] , identifier[cut] keyword[in] identifier[enumerate] ( identifier[cuts] ):
identifier[frame_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[temp_dir] , literal[string] . identifier[format] ( identifier[i] ))
identifier[label] = literal[string] . identifier[format] ( identifier[animate_field] , identifier[cut] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[label] ))
identifier[new_arrays] =[]
keyword[for] identifier[array] keyword[in] identifier[arrays] :
identifier[new_arrays] . identifier[append] ( identifier[array] [ identifier[array] [:, identifier[field_idx] ]> identifier[cut] ])
identifier[single_frame] ( identifier[new_arrays] ,
identifier[filename] = identifier[frame_filename] ,
identifier[label] = identifier[label] )
identifier[gif] . identifier[add_frame] ( identifier[frame_filename] )
identifier[gif] . identifier[write] ( identifier[filename] , identifier[delay] = identifier[animate_delay] , identifier[loop] = identifier[animate_loop] )
identifier[shutil] . identifier[rmtree] ( identifier[temp_dir] )
keyword[else] :
identifier[single_frame] ( identifier[arrays] , identifier[filename] = identifier[filename] )
|
def plot_contour_matrix(arrays, fields, filename, weights=None, sample_names=None, sample_lines=None, sample_colors=None, color_map=None, num_bins=20, num_contours=3, cell_width=2, cell_height=2, cell_margin_x=0.05, cell_margin_y=0.05, dpi=100, padding=0, animate_field=None, animate_steps=10, animate_delay=20, animate_loop=0):
"""
Create a matrix of contour plots showing all possible 2D projections of a
multivariate dataset. You may optionally animate the contours as a cut on
one of the fields is increased. ImageMagick must be installed to produce
animations.
Parameters
----------
arrays : list of arrays of shape [n_samples, n_fields]
A list of 2D NumPy arrays for each sample. All arrays must have the
same number of columns.
fields : list of strings
A list of the field names.
filename : string
The output filename. If animatation is enabled
``animate_field is not None`` then ``filename`` must have the .gif
extension.
weights : list of arrays, optional (default=None)
List of 1D NumPy arrays of sample weights corresponding to the arrays
in ``arrays``.
sample_names : list of strings, optional (default=None)
A list of the sample names for the legend. If None, then no legend will
be shown.
sample_lines : list of strings, optional (default=None)
A list of matplotlib line styles for each sample. If None then line
styles will cycle through 'dashed', 'solid', 'dashdot', and 'dotted'.
Elements of this list may also be a list of line styles which will be
cycled through for the contour lines of the corresponding sample.
sample_colors : list of matplotlib colors, optional (default=None)
The color of the contours for each sample. If None, then colors will be
selected according to regular intervals along the ``color_map``.
color_map : a matplotlib color map, optional (default=None)
If ``sample_colors is None`` then select colors according to regular
intervals along this matplotlib color map. If ``color_map`` is None,
then the spectral color map is used.
num_bins : int, optional (default=20)
The number of bins along both axes of the 2D histograms.
num_contours : int, optional (default=3)
The number of contour line to show for each sample.
cell_width : float, optional (default=2)
The width, in inches, of each subplot in the matrix.
cell_height : float, optional (default=2)
The height, in inches, of each subplot in the matrix.
cell_margin_x : float, optional (default=0.05)
The horizontal margin between adjacent subplots, as a fraction
of the subplot size.
cell_margin_y : float, optional (default=0.05)
The vertical margin between adjacent subplots, as a fraction
of the subplot size.
dpi : int, optional (default=100)
The number of pixels per inch.
padding : float, optional (default=0)
The padding, as a fraction of the range of the value along each axes to
guarantee around each sample's contour plot.
animate_field : string, optional (default=None)
The field to animate a cut along. By default no animation is produced.
If ``animate_field is not None`` then ``filename`` must end in the .gif
extension and an animated GIF is produced.
animate_steps : int, optional (default=10)
The number of frames in the animation, corresponding to the number of
regularly spaced cut values to show along the range of the
``animate_field``.
animate_delay : int, optional (default=20)
The duration that each frame is shown in the animation as a multiple of
1 / 100 of a second.
animate_loop : int, optional (default=0)
The number of times to loop the animation. If zero, then loop forever.
Notes
-----
NumPy and matplotlib are required
"""
import numpy as np
from .. import root2matplotlib as r2m
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import cm
from matplotlib.lines import Line2D
# we must have at least two fields (columns)
num_fields = len(fields)
if num_fields < 2:
raise ValueError('record arrays must have at least two fields') # depends on [control=['if'], data=[]]
# check that all arrays have the same number of columns
for array in arrays:
if array.shape[1] != num_fields:
raise ValueError('number of array columns does not match number of fields') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['array']]
if sample_colors is None:
if color_map is None:
color_map = cm.spectral # depends on [control=['if'], data=['color_map']]
steps = np.linspace(0, 1, len(arrays) + 2)[1:-1]
sample_colors = [color_map(s) for s in steps] # depends on [control=['if'], data=['sample_colors']]
# determine range of each field
low = np.vstack([a.min(axis=0) for a in arrays]).min(axis=0)
high = np.vstack([a.max(axis=0) for a in arrays]).max(axis=0)
width = np.abs(high - low)
width *= padding
low -= width
high += width
def single_frame(arrays, filename, label=None):
# create the canvas and divide into matrix
(fig, axes) = plt.subplots(nrows=num_fields, ncols=num_fields, figsize=(cell_width * num_fields, cell_height * num_fields))
fig.subplots_adjust(hspace=cell_margin_y, wspace=cell_margin_x)
for ax in axes.flat:
# only show the left and bottom axes ticks and labels
if ax.is_last_row() and (not ax.is_last_col()):
ax.xaxis.set_visible(True)
ax.xaxis.set_ticks_position('bottom')
ax.xaxis.set_major_locator(MaxNLocator(4, prune='both'))
for tick in ax.xaxis.get_major_ticks():
tick.label.set_rotation('vertical') # depends on [control=['for'], data=['tick']] # depends on [control=['if'], data=[]]
else:
ax.xaxis.set_visible(False)
if ax.is_first_col() and (not ax.is_first_row()):
ax.yaxis.set_visible(True)
ax.yaxis.set_ticks_position('left')
ax.yaxis.set_major_locator(MaxNLocator(4, prune='both')) # depends on [control=['if'], data=[]]
else:
ax.yaxis.set_visible(False) # depends on [control=['for'], data=['ax']]
# turn off axes frames in upper triangular matrix
for (ix, iy) in zip(*np.triu_indices_from(axes, k=0)):
axes[ix, iy].axis('off') # depends on [control=['for'], data=[]]
levels = np.linspace(0, 1, num_contours + 2)[1:-1]
# plot the data
for (iy, ix) in zip(*np.tril_indices_from(axes, k=-1)):
ymin = float(low[iy])
ymax = float(high[iy])
xmin = float(low[ix])
xmax = float(high[ix])
for (isample, a) in enumerate(arrays):
hist = Hist2D(num_bins, xmin, xmax, num_bins, ymin, ymax)
if weights is not None:
hist.fill_array(a[:, [ix, iy]], weights[isample]) # depends on [control=['if'], data=['weights']]
else:
hist.fill_array(a[:, [ix, iy]])
# normalize so maximum is 1.0
_max = hist.GetMaximum()
if _max != 0:
hist /= _max # depends on [control=['if'], data=['_max']]
r2m.contour(hist, axes=axes[iy, ix], levels=levels, linestyles=sample_lines[isample] if sample_lines else LINES, colors=sample_colors[isample]) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
# label the diagonal subplots
for (i, field) in enumerate(fields):
axes[i, i].annotate(field, (0.1, 0.2), rotation=45, xycoords='axes fraction', ha='left', va='center') # depends on [control=['for'], data=[]]
# make proxy artists for legend
lines = []
for color in sample_colors:
lines.append(Line2D([0, 0], [0, 0], color=color)) # depends on [control=['for'], data=['color']]
if sample_names is not None:
# draw the legend
leg = fig.legend(lines, sample_names, loc=(0.65, 0.8))
leg.set_frame_on(False) # depends on [control=['if'], data=['sample_names']]
if label is not None:
axes[0, 0].annotate(label, (0, 1), ha='left', va='top', xycoords='axes fraction') # depends on [control=['if'], data=['label']]
fig.savefig(filename, bbox_inches='tight', dpi=dpi)
plt.close(fig)
if animate_field is not None:
(_, ext) = os.path.splitext(filename)
if ext != '.gif':
raise ValueError('animation is only supported for .gif files') # depends on [control=['if'], data=[]]
field_idx = fields.index(animate_field)
cuts = np.linspace(low[field_idx], high[field_idx], animate_steps + 1)[:-1]
gif = GIF()
temp_dir = tempfile.mkdtemp()
for (i, cut) in enumerate(cuts):
frame_filename = os.path.join(temp_dir, 'frame_{0:d}.png'.format(i))
label = '{0} > {1:.2f}'.format(animate_field, cut)
log.info('creating frame for {0} ...'.format(label))
new_arrays = []
for array in arrays:
new_arrays.append(array[array[:, field_idx] > cut]) # depends on [control=['for'], data=['array']]
single_frame(new_arrays, filename=frame_filename, label=label)
gif.add_frame(frame_filename) # depends on [control=['for'], data=[]]
gif.write(filename, delay=animate_delay, loop=animate_loop)
shutil.rmtree(temp_dir) # depends on [control=['if'], data=['animate_field']]
else:
single_frame(arrays, filename=filename)
|
def redirect_stdout(self):
"""Redirect stdout to file so that it can be tailed and aggregated with the other logs."""
self.hijacked_stdout = sys.stdout
self.hijacked_stderr = sys.stderr
# 0 must be set as the buffer, otherwise lines won't get logged in time.
sys.stdout = open(self.hitch_dir.driverout(), "ab", 0)
sys.stderr = open(self.hitch_dir.drivererr(), "ab", 0)
|
def function[redirect_stdout, parameter[self]]:
constant[Redirect stdout to file so that it can be tailed and aggregated with the other logs.]
name[self].hijacked_stdout assign[=] name[sys].stdout
name[self].hijacked_stderr assign[=] name[sys].stderr
name[sys].stdout assign[=] call[name[open], parameter[call[name[self].hitch_dir.driverout, parameter[]], constant[ab], constant[0]]]
name[sys].stderr assign[=] call[name[open], parameter[call[name[self].hitch_dir.drivererr, parameter[]], constant[ab], constant[0]]]
|
keyword[def] identifier[redirect_stdout] ( identifier[self] ):
literal[string]
identifier[self] . identifier[hijacked_stdout] = identifier[sys] . identifier[stdout]
identifier[self] . identifier[hijacked_stderr] = identifier[sys] . identifier[stderr]
identifier[sys] . identifier[stdout] = identifier[open] ( identifier[self] . identifier[hitch_dir] . identifier[driverout] (), literal[string] , literal[int] )
identifier[sys] . identifier[stderr] = identifier[open] ( identifier[self] . identifier[hitch_dir] . identifier[drivererr] (), literal[string] , literal[int] )
|
def redirect_stdout(self):
"""Redirect stdout to file so that it can be tailed and aggregated with the other logs."""
self.hijacked_stdout = sys.stdout
self.hijacked_stderr = sys.stderr
# 0 must be set as the buffer, otherwise lines won't get logged in time.
sys.stdout = open(self.hitch_dir.driverout(), 'ab', 0)
sys.stderr = open(self.hitch_dir.drivererr(), 'ab', 0)
|
def eeg_create_mne_events(onsets, conditions=None):
"""
Create MNE compatible events.
Parameters
----------
onsets : list or array
Events onsets.
conditions : list
A list of equal length containing the stimuli types/conditions.
Returns
----------
(events, event_id) : tuple
MNE-formated events and a dictionary with event's names.
Example
----------
>>> import neurokit as nk
>>> events, event_id = nk.eeg_create_mne_events(events_onset, conditions)
Authors
----------
- `Dominique Makowski <https://dominiquemakowski.github.io/>`_
"""
event_id = {}
if conditions is None:
conditions = ["Event"] * len(onsets)
# Sanity check
if len(conditions) != len(onsets):
print("NeuroKit Warning: eeg_create_events(): conditions parameter of different length than onsets. Aborting.")
return()
event_names = list(set(conditions))
# event_index = [1, 2, 3, 4, 5, 32, 64, 128]
event_index = list(range(len(event_names)))
for i in enumerate(event_names):
conditions = [event_index[i[0]] if x==i[1] else x for x in conditions]
event_id[i[1]] = event_index[i[0]]
events = np.array([onsets, [0]*len(onsets), conditions]).T
return(events, event_id)
|
def function[eeg_create_mne_events, parameter[onsets, conditions]]:
constant[
Create MNE compatible events.
Parameters
----------
onsets : list or array
Events onsets.
conditions : list
A list of equal length containing the stimuli types/conditions.
Returns
----------
(events, event_id) : tuple
MNE-formated events and a dictionary with event's names.
Example
----------
>>> import neurokit as nk
>>> events, event_id = nk.eeg_create_mne_events(events_onset, conditions)
Authors
----------
- `Dominique Makowski <https://dominiquemakowski.github.io/>`_
]
variable[event_id] assign[=] dictionary[[], []]
if compare[name[conditions] is constant[None]] begin[:]
variable[conditions] assign[=] binary_operation[list[[<ast.Constant object at 0x7da2043459c0>]] * call[name[len], parameter[name[onsets]]]]
if compare[call[name[len], parameter[name[conditions]]] not_equal[!=] call[name[len], parameter[name[onsets]]]] begin[:]
call[name[print], parameter[constant[NeuroKit Warning: eeg_create_events(): conditions parameter of different length than onsets. Aborting.]]]
return[tuple[[]]]
variable[event_names] assign[=] call[name[list], parameter[call[name[set], parameter[name[conditions]]]]]
variable[event_index] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[event_names]]]]]]]
for taget[name[i]] in starred[call[name[enumerate], parameter[name[event_names]]]] begin[:]
variable[conditions] assign[=] <ast.ListComp object at 0x7da204347190>
call[name[event_id]][call[name[i]][constant[1]]] assign[=] call[name[event_index]][call[name[i]][constant[0]]]
variable[events] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da18f813a90>, <ast.BinOp object at 0x7da18f813a00>, <ast.Name object at 0x7da18f810f40>]]]].T
return[tuple[[<ast.Name object at 0x7da18f813bb0>, <ast.Name object at 0x7da18f813160>]]]
|
keyword[def] identifier[eeg_create_mne_events] ( identifier[onsets] , identifier[conditions] = keyword[None] ):
literal[string]
identifier[event_id] ={}
keyword[if] identifier[conditions] keyword[is] keyword[None] :
identifier[conditions] =[ literal[string] ]* identifier[len] ( identifier[onsets] )
keyword[if] identifier[len] ( identifier[conditions] )!= identifier[len] ( identifier[onsets] ):
identifier[print] ( literal[string] )
keyword[return] ()
identifier[event_names] = identifier[list] ( identifier[set] ( identifier[conditions] ))
identifier[event_index] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[event_names] )))
keyword[for] identifier[i] keyword[in] identifier[enumerate] ( identifier[event_names] ):
identifier[conditions] =[ identifier[event_index] [ identifier[i] [ literal[int] ]] keyword[if] identifier[x] == identifier[i] [ literal[int] ] keyword[else] identifier[x] keyword[for] identifier[x] keyword[in] identifier[conditions] ]
identifier[event_id] [ identifier[i] [ literal[int] ]]= identifier[event_index] [ identifier[i] [ literal[int] ]]
identifier[events] = identifier[np] . identifier[array] ([ identifier[onsets] ,[ literal[int] ]* identifier[len] ( identifier[onsets] ), identifier[conditions] ]). identifier[T]
keyword[return] ( identifier[events] , identifier[event_id] )
|
def eeg_create_mne_events(onsets, conditions=None):
"""
Create MNE compatible events.
Parameters
----------
onsets : list or array
Events onsets.
conditions : list
A list of equal length containing the stimuli types/conditions.
Returns
----------
(events, event_id) : tuple
MNE-formated events and a dictionary with event's names.
Example
----------
>>> import neurokit as nk
>>> events, event_id = nk.eeg_create_mne_events(events_onset, conditions)
Authors
----------
- `Dominique Makowski <https://dominiquemakowski.github.io/>`_
"""
event_id = {}
if conditions is None:
conditions = ['Event'] * len(onsets) # depends on [control=['if'], data=['conditions']]
# Sanity check
if len(conditions) != len(onsets):
print('NeuroKit Warning: eeg_create_events(): conditions parameter of different length than onsets. Aborting.')
return () # depends on [control=['if'], data=[]]
event_names = list(set(conditions))
# event_index = [1, 2, 3, 4, 5, 32, 64, 128]
event_index = list(range(len(event_names)))
for i in enumerate(event_names):
conditions = [event_index[i[0]] if x == i[1] else x for x in conditions]
event_id[i[1]] = event_index[i[0]] # depends on [control=['for'], data=['i']]
events = np.array([onsets, [0] * len(onsets), conditions]).T
return (events, event_id)
|
def _to_add_with_category(self, catid):
'''
Used for info2.
:param catid: the uid of category
'''
catinfo = MCategory.get_by_uid(catid)
kwd = {
'uid': self._gen_uid(),
'userid': self.userinfo.user_name if self.userinfo else '',
'gcat0': catid,
'parentname': MCategory.get_by_uid(catinfo.pid).name,
'catname': MCategory.get_by_uid(catid).name,
}
self.render('autogen/add/add_{0}.html'.format(catid),
userinfo=self.userinfo,
kwd=kwd)
|
def function[_to_add_with_category, parameter[self, catid]]:
constant[
Used for info2.
:param catid: the uid of category
]
variable[catinfo] assign[=] call[name[MCategory].get_by_uid, parameter[name[catid]]]
variable[kwd] assign[=] dictionary[[<ast.Constant object at 0x7da1b06692d0>, <ast.Constant object at 0x7da1b0668df0>, <ast.Constant object at 0x7da1b0668220>, <ast.Constant object at 0x7da1b0669300>, <ast.Constant object at 0x7da1b0668f10>], [<ast.Call object at 0x7da1b0668f70>, <ast.IfExp object at 0x7da1b06685b0>, <ast.Name object at 0x7da1b0669390>, <ast.Attribute object at 0x7da1b0668a60>, <ast.Attribute object at 0x7da1b04f4fa0>]]
call[name[self].render, parameter[call[constant[autogen/add/add_{0}.html].format, parameter[name[catid]]]]]
|
keyword[def] identifier[_to_add_with_category] ( identifier[self] , identifier[catid] ):
literal[string]
identifier[catinfo] = identifier[MCategory] . identifier[get_by_uid] ( identifier[catid] )
identifier[kwd] ={
literal[string] : identifier[self] . identifier[_gen_uid] (),
literal[string] : identifier[self] . identifier[userinfo] . identifier[user_name] keyword[if] identifier[self] . identifier[userinfo] keyword[else] literal[string] ,
literal[string] : identifier[catid] ,
literal[string] : identifier[MCategory] . identifier[get_by_uid] ( identifier[catinfo] . identifier[pid] ). identifier[name] ,
literal[string] : identifier[MCategory] . identifier[get_by_uid] ( identifier[catid] ). identifier[name] ,
}
identifier[self] . identifier[render] ( literal[string] . identifier[format] ( identifier[catid] ),
identifier[userinfo] = identifier[self] . identifier[userinfo] ,
identifier[kwd] = identifier[kwd] )
|
def _to_add_with_category(self, catid):
"""
Used for info2.
:param catid: the uid of category
"""
catinfo = MCategory.get_by_uid(catid)
kwd = {'uid': self._gen_uid(), 'userid': self.userinfo.user_name if self.userinfo else '', 'gcat0': catid, 'parentname': MCategory.get_by_uid(catinfo.pid).name, 'catname': MCategory.get_by_uid(catid).name}
self.render('autogen/add/add_{0}.html'.format(catid), userinfo=self.userinfo, kwd=kwd)
|
def sample_f(self, f, *args, **kwargs):
r"""Evaluated method f for all samples
Calls f(\*args, \*\*kwargs) on all samples.
Parameters
----------
f : method reference or name (str)
Model method to be evaluated for each model sample
args : arguments
Non-keyword arguments to be passed to the method in each call
kwargs : keyword-argments
Keyword arguments to be passed to the method in each call
Returns
-------
vals : list
list of results of the method calls
"""
self._check_samples_available()
# TODO: can we use np.fromiter here? We would ne the same shape of every member for this!
return [call_member(M, f, *args, **kwargs) for M in self.samples]
|
def function[sample_f, parameter[self, f]]:
constant[Evaluated method f for all samples
Calls f(\*args, \*\*kwargs) on all samples.
Parameters
----------
f : method reference or name (str)
Model method to be evaluated for each model sample
args : arguments
Non-keyword arguments to be passed to the method in each call
kwargs : keyword-argments
Keyword arguments to be passed to the method in each call
Returns
-------
vals : list
list of results of the method calls
]
call[name[self]._check_samples_available, parameter[]]
return[<ast.ListComp object at 0x7da20e9573a0>]
|
keyword[def] identifier[sample_f] ( identifier[self] , identifier[f] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_check_samples_available] ()
keyword[return] [ identifier[call_member] ( identifier[M] , identifier[f] ,* identifier[args] ,** identifier[kwargs] ) keyword[for] identifier[M] keyword[in] identifier[self] . identifier[samples] ]
|
def sample_f(self, f, *args, **kwargs):
"""Evaluated method f for all samples
Calls f(\\*args, \\*\\*kwargs) on all samples.
Parameters
----------
f : method reference or name (str)
Model method to be evaluated for each model sample
args : arguments
Non-keyword arguments to be passed to the method in each call
kwargs : keyword-argments
Keyword arguments to be passed to the method in each call
Returns
-------
vals : list
list of results of the method calls
"""
self._check_samples_available()
# TODO: can we use np.fromiter here? We would ne the same shape of every member for this!
return [call_member(M, f, *args, **kwargs) for M in self.samples]
|
def _find_read_pos(self) -> Optional[int]:
"""Attempts to find a position in the read buffer that satisfies
the currently-pending read.
Returns a position in the buffer if the current read can be satisfied,
or None if it cannot.
"""
if self._read_bytes is not None and (
self._read_buffer_size >= self._read_bytes
or (self._read_partial and self._read_buffer_size > 0)
):
num_bytes = min(self._read_bytes, self._read_buffer_size)
return num_bytes
elif self._read_delimiter is not None:
# Multi-byte delimiters (e.g. '\r\n') may straddle two
# chunks in the read buffer, so we can't easily find them
# without collapsing the buffer. However, since protocols
# using delimited reads (as opposed to reads of a known
# length) tend to be "line" oriented, the delimiter is likely
# to be in the first few chunks. Merge the buffer gradually
# since large merges are relatively expensive and get undone in
# _consume().
if self._read_buffer:
loc = self._read_buffer.find(
self._read_delimiter, self._read_buffer_pos
)
if loc != -1:
loc -= self._read_buffer_pos
delimiter_len = len(self._read_delimiter)
self._check_max_bytes(self._read_delimiter, loc + delimiter_len)
return loc + delimiter_len
self._check_max_bytes(self._read_delimiter, self._read_buffer_size)
elif self._read_regex is not None:
if self._read_buffer:
m = self._read_regex.search(self._read_buffer, self._read_buffer_pos)
if m is not None:
loc = m.end() - self._read_buffer_pos
self._check_max_bytes(self._read_regex, loc)
return loc
self._check_max_bytes(self._read_regex, self._read_buffer_size)
return None
|
def function[_find_read_pos, parameter[self]]:
constant[Attempts to find a position in the read buffer that satisfies
the currently-pending read.
Returns a position in the buffer if the current read can be satisfied,
or None if it cannot.
]
if <ast.BoolOp object at 0x7da1b1fe4fa0> begin[:]
variable[num_bytes] assign[=] call[name[min], parameter[name[self]._read_bytes, name[self]._read_buffer_size]]
return[name[num_bytes]]
return[constant[None]]
|
keyword[def] identifier[_find_read_pos] ( identifier[self] )-> identifier[Optional] [ identifier[int] ]:
literal[string]
keyword[if] identifier[self] . identifier[_read_bytes] keyword[is] keyword[not] keyword[None] keyword[and] (
identifier[self] . identifier[_read_buffer_size] >= identifier[self] . identifier[_read_bytes]
keyword[or] ( identifier[self] . identifier[_read_partial] keyword[and] identifier[self] . identifier[_read_buffer_size] > literal[int] )
):
identifier[num_bytes] = identifier[min] ( identifier[self] . identifier[_read_bytes] , identifier[self] . identifier[_read_buffer_size] )
keyword[return] identifier[num_bytes]
keyword[elif] identifier[self] . identifier[_read_delimiter] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[_read_buffer] :
identifier[loc] = identifier[self] . identifier[_read_buffer] . identifier[find] (
identifier[self] . identifier[_read_delimiter] , identifier[self] . identifier[_read_buffer_pos]
)
keyword[if] identifier[loc] !=- literal[int] :
identifier[loc] -= identifier[self] . identifier[_read_buffer_pos]
identifier[delimiter_len] = identifier[len] ( identifier[self] . identifier[_read_delimiter] )
identifier[self] . identifier[_check_max_bytes] ( identifier[self] . identifier[_read_delimiter] , identifier[loc] + identifier[delimiter_len] )
keyword[return] identifier[loc] + identifier[delimiter_len]
identifier[self] . identifier[_check_max_bytes] ( identifier[self] . identifier[_read_delimiter] , identifier[self] . identifier[_read_buffer_size] )
keyword[elif] identifier[self] . identifier[_read_regex] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[_read_buffer] :
identifier[m] = identifier[self] . identifier[_read_regex] . identifier[search] ( identifier[self] . identifier[_read_buffer] , identifier[self] . identifier[_read_buffer_pos] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[loc] = identifier[m] . identifier[end] ()- identifier[self] . identifier[_read_buffer_pos]
identifier[self] . identifier[_check_max_bytes] ( identifier[self] . identifier[_read_regex] , identifier[loc] )
keyword[return] identifier[loc]
identifier[self] . identifier[_check_max_bytes] ( identifier[self] . identifier[_read_regex] , identifier[self] . identifier[_read_buffer_size] )
keyword[return] keyword[None]
|
def _find_read_pos(self) -> Optional[int]:
"""Attempts to find a position in the read buffer that satisfies
the currently-pending read.
Returns a position in the buffer if the current read can be satisfied,
or None if it cannot.
"""
if self._read_bytes is not None and (self._read_buffer_size >= self._read_bytes or (self._read_partial and self._read_buffer_size > 0)):
num_bytes = min(self._read_bytes, self._read_buffer_size)
return num_bytes # depends on [control=['if'], data=[]]
elif self._read_delimiter is not None:
# Multi-byte delimiters (e.g. '\r\n') may straddle two
# chunks in the read buffer, so we can't easily find them
# without collapsing the buffer. However, since protocols
# using delimited reads (as opposed to reads of a known
# length) tend to be "line" oriented, the delimiter is likely
# to be in the first few chunks. Merge the buffer gradually
# since large merges are relatively expensive and get undone in
# _consume().
if self._read_buffer:
loc = self._read_buffer.find(self._read_delimiter, self._read_buffer_pos)
if loc != -1:
loc -= self._read_buffer_pos
delimiter_len = len(self._read_delimiter)
self._check_max_bytes(self._read_delimiter, loc + delimiter_len)
return loc + delimiter_len # depends on [control=['if'], data=['loc']]
self._check_max_bytes(self._read_delimiter, self._read_buffer_size) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self._read_regex is not None:
if self._read_buffer:
m = self._read_regex.search(self._read_buffer, self._read_buffer_pos)
if m is not None:
loc = m.end() - self._read_buffer_pos
self._check_max_bytes(self._read_regex, loc)
return loc # depends on [control=['if'], data=['m']]
self._check_max_bytes(self._read_regex, self._read_buffer_size) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return None
|
def _set_axis_limits(self, axis, view, subplots, ranges):
"""
Compute extents for current view and apply as axis limits
"""
# Extents
extents = self.get_extents(view, ranges)
if not extents or self.overlaid:
axis.autoscale_view(scalex=True, scaley=True)
return
valid_lim = lambda c: util.isnumeric(c) and not np.isnan(c)
coords = [coord if np.isreal(coord) or isinstance(coord, np.datetime64) else np.NaN for coord in extents]
coords = [date2num(util.dt64_to_dt(c)) if isinstance(c, np.datetime64) else c
for c in coords]
if self.projection == '3d' or len(extents) == 6:
l, b, zmin, r, t, zmax = coords
if self.invert_zaxis or any(p.invert_zaxis for p in subplots):
zmin, zmax = zmax, zmin
if zmin != zmax:
if valid_lim(zmin):
axis.set_zlim(bottom=zmin)
if valid_lim(zmax):
axis.set_zlim(top=zmax)
else:
l, b, r, t = coords
if self.invert_axes:
l, b, r, t = b, l, t, r
invertx = self.invert_xaxis or any(p.invert_xaxis for p in subplots)
xlim, scalex = self._compute_limits(l, r, self.logx, invertx, 'left', 'right')
inverty = self.invert_yaxis or any(p.invert_yaxis for p in subplots)
ylim, scaley = self._compute_limits(b, t, self.logy, inverty, 'bottom', 'top')
if xlim:
axis.set_xlim(**xlim)
if ylim:
axis.set_ylim(**ylim)
axis.autoscale_view(scalex=scalex, scaley=scaley)
|
def function[_set_axis_limits, parameter[self, axis, view, subplots, ranges]]:
constant[
Compute extents for current view and apply as axis limits
]
variable[extents] assign[=] call[name[self].get_extents, parameter[name[view], name[ranges]]]
if <ast.BoolOp object at 0x7da1b1acfe80> begin[:]
call[name[axis].autoscale_view, parameter[]]
return[None]
variable[valid_lim] assign[=] <ast.Lambda object at 0x7da1b1acdcc0>
variable[coords] assign[=] <ast.ListComp object at 0x7da1b1b0f970>
variable[coords] assign[=] <ast.ListComp object at 0x7da1b1b0d0f0>
if <ast.BoolOp object at 0x7da1b1b0c760> begin[:]
<ast.Tuple object at 0x7da1b1b0c700> assign[=] name[coords]
if <ast.BoolOp object at 0x7da1b1b0db10> begin[:]
<ast.Tuple object at 0x7da1b1b0c490> assign[=] tuple[[<ast.Name object at 0x7da1b1b0c220>, <ast.Name object at 0x7da1b1b0e5c0>]]
if compare[name[zmin] not_equal[!=] name[zmax]] begin[:]
if call[name[valid_lim], parameter[name[zmin]]] begin[:]
call[name[axis].set_zlim, parameter[]]
if call[name[valid_lim], parameter[name[zmax]]] begin[:]
call[name[axis].set_zlim, parameter[]]
if name[self].invert_axes begin[:]
<ast.Tuple object at 0x7da1b1ca3700> assign[=] tuple[[<ast.Name object at 0x7da1b1ca04f0>, <ast.Name object at 0x7da1b1ca04c0>, <ast.Name object at 0x7da1b1ca05b0>, <ast.Name object at 0x7da1b1ca0a90>]]
variable[invertx] assign[=] <ast.BoolOp object at 0x7da1b1ca0340>
<ast.Tuple object at 0x7da1b1ca2140> assign[=] call[name[self]._compute_limits, parameter[name[l], name[r], name[self].logx, name[invertx], constant[left], constant[right]]]
variable[inverty] assign[=] <ast.BoolOp object at 0x7da1b1ca1b40>
<ast.Tuple object at 0x7da1b1ca07f0> assign[=] call[name[self]._compute_limits, parameter[name[b], name[t], name[self].logy, name[inverty], constant[bottom], constant[top]]]
if name[xlim] begin[:]
call[name[axis].set_xlim, parameter[]]
if name[ylim] begin[:]
call[name[axis].set_ylim, parameter[]]
call[name[axis].autoscale_view, parameter[]]
|
keyword[def] identifier[_set_axis_limits] ( identifier[self] , identifier[axis] , identifier[view] , identifier[subplots] , identifier[ranges] ):
literal[string]
identifier[extents] = identifier[self] . identifier[get_extents] ( identifier[view] , identifier[ranges] )
keyword[if] keyword[not] identifier[extents] keyword[or] identifier[self] . identifier[overlaid] :
identifier[axis] . identifier[autoscale_view] ( identifier[scalex] = keyword[True] , identifier[scaley] = keyword[True] )
keyword[return]
identifier[valid_lim] = keyword[lambda] identifier[c] : identifier[util] . identifier[isnumeric] ( identifier[c] ) keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[c] )
identifier[coords] =[ identifier[coord] keyword[if] identifier[np] . identifier[isreal] ( identifier[coord] ) keyword[or] identifier[isinstance] ( identifier[coord] , identifier[np] . identifier[datetime64] ) keyword[else] identifier[np] . identifier[NaN] keyword[for] identifier[coord] keyword[in] identifier[extents] ]
identifier[coords] =[ identifier[date2num] ( identifier[util] . identifier[dt64_to_dt] ( identifier[c] )) keyword[if] identifier[isinstance] ( identifier[c] , identifier[np] . identifier[datetime64] ) keyword[else] identifier[c]
keyword[for] identifier[c] keyword[in] identifier[coords] ]
keyword[if] identifier[self] . identifier[projection] == literal[string] keyword[or] identifier[len] ( identifier[extents] )== literal[int] :
identifier[l] , identifier[b] , identifier[zmin] , identifier[r] , identifier[t] , identifier[zmax] = identifier[coords]
keyword[if] identifier[self] . identifier[invert_zaxis] keyword[or] identifier[any] ( identifier[p] . identifier[invert_zaxis] keyword[for] identifier[p] keyword[in] identifier[subplots] ):
identifier[zmin] , identifier[zmax] = identifier[zmax] , identifier[zmin]
keyword[if] identifier[zmin] != identifier[zmax] :
keyword[if] identifier[valid_lim] ( identifier[zmin] ):
identifier[axis] . identifier[set_zlim] ( identifier[bottom] = identifier[zmin] )
keyword[if] identifier[valid_lim] ( identifier[zmax] ):
identifier[axis] . identifier[set_zlim] ( identifier[top] = identifier[zmax] )
keyword[else] :
identifier[l] , identifier[b] , identifier[r] , identifier[t] = identifier[coords]
keyword[if] identifier[self] . identifier[invert_axes] :
identifier[l] , identifier[b] , identifier[r] , identifier[t] = identifier[b] , identifier[l] , identifier[t] , identifier[r]
identifier[invertx] = identifier[self] . identifier[invert_xaxis] keyword[or] identifier[any] ( identifier[p] . identifier[invert_xaxis] keyword[for] identifier[p] keyword[in] identifier[subplots] )
identifier[xlim] , identifier[scalex] = identifier[self] . identifier[_compute_limits] ( identifier[l] , identifier[r] , identifier[self] . identifier[logx] , identifier[invertx] , literal[string] , literal[string] )
identifier[inverty] = identifier[self] . identifier[invert_yaxis] keyword[or] identifier[any] ( identifier[p] . identifier[invert_yaxis] keyword[for] identifier[p] keyword[in] identifier[subplots] )
identifier[ylim] , identifier[scaley] = identifier[self] . identifier[_compute_limits] ( identifier[b] , identifier[t] , identifier[self] . identifier[logy] , identifier[inverty] , literal[string] , literal[string] )
keyword[if] identifier[xlim] :
identifier[axis] . identifier[set_xlim] (** identifier[xlim] )
keyword[if] identifier[ylim] :
identifier[axis] . identifier[set_ylim] (** identifier[ylim] )
identifier[axis] . identifier[autoscale_view] ( identifier[scalex] = identifier[scalex] , identifier[scaley] = identifier[scaley] )
|
def _set_axis_limits(self, axis, view, subplots, ranges):
"""
Compute extents for current view and apply as axis limits
"""
# Extents
extents = self.get_extents(view, ranges)
if not extents or self.overlaid:
axis.autoscale_view(scalex=True, scaley=True)
return # depends on [control=['if'], data=[]]
valid_lim = lambda c: util.isnumeric(c) and (not np.isnan(c))
coords = [coord if np.isreal(coord) or isinstance(coord, np.datetime64) else np.NaN for coord in extents]
coords = [date2num(util.dt64_to_dt(c)) if isinstance(c, np.datetime64) else c for c in coords]
if self.projection == '3d' or len(extents) == 6:
(l, b, zmin, r, t, zmax) = coords
if self.invert_zaxis or any((p.invert_zaxis for p in subplots)):
(zmin, zmax) = (zmax, zmin) # depends on [control=['if'], data=[]]
if zmin != zmax:
if valid_lim(zmin):
axis.set_zlim(bottom=zmin) # depends on [control=['if'], data=[]]
if valid_lim(zmax):
axis.set_zlim(top=zmax) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['zmin', 'zmax']] # depends on [control=['if'], data=[]]
else:
(l, b, r, t) = coords
if self.invert_axes:
(l, b, r, t) = (b, l, t, r) # depends on [control=['if'], data=[]]
invertx = self.invert_xaxis or any((p.invert_xaxis for p in subplots))
(xlim, scalex) = self._compute_limits(l, r, self.logx, invertx, 'left', 'right')
inverty = self.invert_yaxis or any((p.invert_yaxis for p in subplots))
(ylim, scaley) = self._compute_limits(b, t, self.logy, inverty, 'bottom', 'top')
if xlim:
axis.set_xlim(**xlim) # depends on [control=['if'], data=[]]
if ylim:
axis.set_ylim(**ylim) # depends on [control=['if'], data=[]]
axis.autoscale_view(scalex=scalex, scaley=scaley)
|
def run(self):
"""Run GapFill command"""
# Load compound information
def compound_name(id):
if id not in self._model.compounds:
return id
return self._model.compounds[id].properties.get('name', id)
# Calculate penalty if penalty file exists
penalties = {}
if self._args.penalty is not None:
for line in self._args.penalty:
line, _, comment = line.partition('#')
line = line.strip()
if line == '':
continue
rxnid, penalty = line.split(None, 1)
penalties[rxnid] = float(penalty)
core = set(self._mm.reactions)
solver = self._get_solver(integer=True)
default_comp = self._model.default_compartment
epsilon = self._args.epsilon
v_max = float(self._model.default_flux_limit)
blocked = set()
for compound in self._args.compound:
if compound.compartment is None:
compound = compound.in_compartment(default_comp)
blocked.add(compound)
if len(blocked) > 0:
logger.info('Unblocking compounds: {}...'.format(
', '.join(text_type(c) for c in sorted(blocked))))
else:
logger.info(
'Unblocking all compounds in model. Use --compound option to'
' unblock specific compounds.')
blocked = set(self._mm.compounds)
exclude = set()
if self._model.biomass_reaction is not None:
exclude.add(self._model.biomass_reaction)
# Add exchange and transport reactions to database
model_complete, weights = create_extended_model(
self._model,
db_penalty=self._args.db_penalty,
ex_penalty=self._args.ex_penalty,
tp_penalty=self._args.tp_penalty,
penalties=penalties)
implicit_sinks = not self._args.no_implicit_sinks
logger.info('Searching for reactions to fill gaps')
try:
added_reactions, no_bounds_reactions = gapfill(
model_complete, core, blocked, exclude, solver=solver,
epsilon=epsilon, v_max=v_max, weights=weights,
implicit_sinks=implicit_sinks,
allow_bounds_expansion=self._args.allow_bounds_expansion)
except GapFillError as e:
self._log_epsilon_and_fail(epsilon, e)
for reaction_id in sorted(self._mm.reactions):
rx = self._mm.get_reaction(reaction_id)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(reaction_id, 'Model', 0, rxt))
for rxnid in sorted(added_reactions):
rx = model_complete.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(
rxnid, 'Add', weights.get(rxnid, 1), rxt))
for rxnid in sorted(no_bounds_reactions):
rx = model_complete.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(
rxnid, 'Remove bounds', weights.get(rxnid, 1), rxt))
|
def function[run, parameter[self]]:
constant[Run GapFill command]
def function[compound_name, parameter[id]]:
if compare[name[id] <ast.NotIn object at 0x7da2590d7190> name[self]._model.compounds] begin[:]
return[name[id]]
return[call[call[name[self]._model.compounds][name[id]].properties.get, parameter[constant[name], name[id]]]]
variable[penalties] assign[=] dictionary[[], []]
if compare[name[self]._args.penalty is_not constant[None]] begin[:]
for taget[name[line]] in starred[name[self]._args.penalty] begin[:]
<ast.Tuple object at 0x7da18bc73f10> assign[=] call[name[line].partition, parameter[constant[#]]]
variable[line] assign[=] call[name[line].strip, parameter[]]
if compare[name[line] equal[==] constant[]] begin[:]
continue
<ast.Tuple object at 0x7da18bc70a60> assign[=] call[name[line].split, parameter[constant[None], constant[1]]]
call[name[penalties]][name[rxnid]] assign[=] call[name[float], parameter[name[penalty]]]
variable[core] assign[=] call[name[set], parameter[name[self]._mm.reactions]]
variable[solver] assign[=] call[name[self]._get_solver, parameter[]]
variable[default_comp] assign[=] name[self]._model.default_compartment
variable[epsilon] assign[=] name[self]._args.epsilon
variable[v_max] assign[=] call[name[float], parameter[name[self]._model.default_flux_limit]]
variable[blocked] assign[=] call[name[set], parameter[]]
for taget[name[compound]] in starred[name[self]._args.compound] begin[:]
if compare[name[compound].compartment is constant[None]] begin[:]
variable[compound] assign[=] call[name[compound].in_compartment, parameter[name[default_comp]]]
call[name[blocked].add, parameter[name[compound]]]
if compare[call[name[len], parameter[name[blocked]]] greater[>] constant[0]] begin[:]
call[name[logger].info, parameter[call[constant[Unblocking compounds: {}...].format, parameter[call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da18bc71030>]]]]]]
variable[exclude] assign[=] call[name[set], parameter[]]
if compare[name[self]._model.biomass_reaction is_not constant[None]] begin[:]
call[name[exclude].add, parameter[name[self]._model.biomass_reaction]]
<ast.Tuple object at 0x7da18bc71000> assign[=] call[name[create_extended_model], parameter[name[self]._model]]
variable[implicit_sinks] assign[=] <ast.UnaryOp object at 0x7da18bc72560>
call[name[logger].info, parameter[constant[Searching for reactions to fill gaps]]]
<ast.Try object at 0x7da18bc708e0>
for taget[name[reaction_id]] in starred[call[name[sorted], parameter[name[self]._mm.reactions]]] begin[:]
variable[rx] assign[=] call[name[self]._mm.get_reaction, parameter[name[reaction_id]]]
variable[rxt] assign[=] call[name[rx].translated_compounds, parameter[name[compound_name]]]
call[name[print], parameter[call[constant[{} {} {} {}].format, parameter[name[reaction_id], constant[Model], constant[0], name[rxt]]]]]
for taget[name[rxnid]] in starred[call[name[sorted], parameter[name[added_reactions]]]] begin[:]
variable[rx] assign[=] call[name[model_complete].get_reaction, parameter[name[rxnid]]]
variable[rxt] assign[=] call[name[rx].translated_compounds, parameter[name[compound_name]]]
call[name[print], parameter[call[constant[{} {} {} {}].format, parameter[name[rxnid], constant[Add], call[name[weights].get, parameter[name[rxnid], constant[1]]], name[rxt]]]]]
for taget[name[rxnid]] in starred[call[name[sorted], parameter[name[no_bounds_reactions]]]] begin[:]
variable[rx] assign[=] call[name[model_complete].get_reaction, parameter[name[rxnid]]]
variable[rxt] assign[=] call[name[rx].translated_compounds, parameter[name[compound_name]]]
call[name[print], parameter[call[constant[{} {} {} {}].format, parameter[name[rxnid], constant[Remove bounds], call[name[weights].get, parameter[name[rxnid], constant[1]]], name[rxt]]]]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[def] identifier[compound_name] ( identifier[id] ):
keyword[if] identifier[id] keyword[not] keyword[in] identifier[self] . identifier[_model] . identifier[compounds] :
keyword[return] identifier[id]
keyword[return] identifier[self] . identifier[_model] . identifier[compounds] [ identifier[id] ]. identifier[properties] . identifier[get] ( literal[string] , identifier[id] )
identifier[penalties] ={}
keyword[if] identifier[self] . identifier[_args] . identifier[penalty] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[_args] . identifier[penalty] :
identifier[line] , identifier[_] , identifier[comment] = identifier[line] . identifier[partition] ( literal[string] )
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] == literal[string] :
keyword[continue]
identifier[rxnid] , identifier[penalty] = identifier[line] . identifier[split] ( keyword[None] , literal[int] )
identifier[penalties] [ identifier[rxnid] ]= identifier[float] ( identifier[penalty] )
identifier[core] = identifier[set] ( identifier[self] . identifier[_mm] . identifier[reactions] )
identifier[solver] = identifier[self] . identifier[_get_solver] ( identifier[integer] = keyword[True] )
identifier[default_comp] = identifier[self] . identifier[_model] . identifier[default_compartment]
identifier[epsilon] = identifier[self] . identifier[_args] . identifier[epsilon]
identifier[v_max] = identifier[float] ( identifier[self] . identifier[_model] . identifier[default_flux_limit] )
identifier[blocked] = identifier[set] ()
keyword[for] identifier[compound] keyword[in] identifier[self] . identifier[_args] . identifier[compound] :
keyword[if] identifier[compound] . identifier[compartment] keyword[is] keyword[None] :
identifier[compound] = identifier[compound] . identifier[in_compartment] ( identifier[default_comp] )
identifier[blocked] . identifier[add] ( identifier[compound] )
keyword[if] identifier[len] ( identifier[blocked] )> literal[int] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[text_type] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[sorted] ( identifier[blocked] ))))
keyword[else] :
identifier[logger] . identifier[info] (
literal[string]
literal[string] )
identifier[blocked] = identifier[set] ( identifier[self] . identifier[_mm] . identifier[compounds] )
identifier[exclude] = identifier[set] ()
keyword[if] identifier[self] . identifier[_model] . identifier[biomass_reaction] keyword[is] keyword[not] keyword[None] :
identifier[exclude] . identifier[add] ( identifier[self] . identifier[_model] . identifier[biomass_reaction] )
identifier[model_complete] , identifier[weights] = identifier[create_extended_model] (
identifier[self] . identifier[_model] ,
identifier[db_penalty] = identifier[self] . identifier[_args] . identifier[db_penalty] ,
identifier[ex_penalty] = identifier[self] . identifier[_args] . identifier[ex_penalty] ,
identifier[tp_penalty] = identifier[self] . identifier[_args] . identifier[tp_penalty] ,
identifier[penalties] = identifier[penalties] )
identifier[implicit_sinks] = keyword[not] identifier[self] . identifier[_args] . identifier[no_implicit_sinks]
identifier[logger] . identifier[info] ( literal[string] )
keyword[try] :
identifier[added_reactions] , identifier[no_bounds_reactions] = identifier[gapfill] (
identifier[model_complete] , identifier[core] , identifier[blocked] , identifier[exclude] , identifier[solver] = identifier[solver] ,
identifier[epsilon] = identifier[epsilon] , identifier[v_max] = identifier[v_max] , identifier[weights] = identifier[weights] ,
identifier[implicit_sinks] = identifier[implicit_sinks] ,
identifier[allow_bounds_expansion] = identifier[self] . identifier[_args] . identifier[allow_bounds_expansion] )
keyword[except] identifier[GapFillError] keyword[as] identifier[e] :
identifier[self] . identifier[_log_epsilon_and_fail] ( identifier[epsilon] , identifier[e] )
keyword[for] identifier[reaction_id] keyword[in] identifier[sorted] ( identifier[self] . identifier[_mm] . identifier[reactions] ):
identifier[rx] = identifier[self] . identifier[_mm] . identifier[get_reaction] ( identifier[reaction_id] )
identifier[rxt] = identifier[rx] . identifier[translated_compounds] ( identifier[compound_name] )
identifier[print] ( literal[string] . identifier[format] ( identifier[reaction_id] , literal[string] , literal[int] , identifier[rxt] ))
keyword[for] identifier[rxnid] keyword[in] identifier[sorted] ( identifier[added_reactions] ):
identifier[rx] = identifier[model_complete] . identifier[get_reaction] ( identifier[rxnid] )
identifier[rxt] = identifier[rx] . identifier[translated_compounds] ( identifier[compound_name] )
identifier[print] ( literal[string] . identifier[format] (
identifier[rxnid] , literal[string] , identifier[weights] . identifier[get] ( identifier[rxnid] , literal[int] ), identifier[rxt] ))
keyword[for] identifier[rxnid] keyword[in] identifier[sorted] ( identifier[no_bounds_reactions] ):
identifier[rx] = identifier[model_complete] . identifier[get_reaction] ( identifier[rxnid] )
identifier[rxt] = identifier[rx] . identifier[translated_compounds] ( identifier[compound_name] )
identifier[print] ( literal[string] . identifier[format] (
identifier[rxnid] , literal[string] , identifier[weights] . identifier[get] ( identifier[rxnid] , literal[int] ), identifier[rxt] ))
|
def run(self):
"""Run GapFill command"""
# Load compound information
def compound_name(id):
if id not in self._model.compounds:
return id # depends on [control=['if'], data=['id']]
return self._model.compounds[id].properties.get('name', id)
# Calculate penalty if penalty file exists
penalties = {}
if self._args.penalty is not None:
for line in self._args.penalty:
(line, _, comment) = line.partition('#')
line = line.strip()
if line == '':
continue # depends on [control=['if'], data=[]]
(rxnid, penalty) = line.split(None, 1)
penalties[rxnid] = float(penalty) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
core = set(self._mm.reactions)
solver = self._get_solver(integer=True)
default_comp = self._model.default_compartment
epsilon = self._args.epsilon
v_max = float(self._model.default_flux_limit)
blocked = set()
for compound in self._args.compound:
if compound.compartment is None:
compound = compound.in_compartment(default_comp) # depends on [control=['if'], data=[]]
blocked.add(compound) # depends on [control=['for'], data=['compound']]
if len(blocked) > 0:
logger.info('Unblocking compounds: {}...'.format(', '.join((text_type(c) for c in sorted(blocked))))) # depends on [control=['if'], data=[]]
else:
logger.info('Unblocking all compounds in model. Use --compound option to unblock specific compounds.')
blocked = set(self._mm.compounds)
exclude = set()
if self._model.biomass_reaction is not None:
exclude.add(self._model.biomass_reaction) # depends on [control=['if'], data=[]]
# Add exchange and transport reactions to database
(model_complete, weights) = create_extended_model(self._model, db_penalty=self._args.db_penalty, ex_penalty=self._args.ex_penalty, tp_penalty=self._args.tp_penalty, penalties=penalties)
implicit_sinks = not self._args.no_implicit_sinks
logger.info('Searching for reactions to fill gaps')
try:
(added_reactions, no_bounds_reactions) = gapfill(model_complete, core, blocked, exclude, solver=solver, epsilon=epsilon, v_max=v_max, weights=weights, implicit_sinks=implicit_sinks, allow_bounds_expansion=self._args.allow_bounds_expansion) # depends on [control=['try'], data=[]]
except GapFillError as e:
self._log_epsilon_and_fail(epsilon, e) # depends on [control=['except'], data=['e']]
for reaction_id in sorted(self._mm.reactions):
rx = self._mm.get_reaction(reaction_id)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(reaction_id, 'Model', 0, rxt)) # depends on [control=['for'], data=['reaction_id']]
for rxnid in sorted(added_reactions):
rx = model_complete.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(rxnid, 'Add', weights.get(rxnid, 1), rxt)) # depends on [control=['for'], data=['rxnid']]
for rxnid in sorted(no_bounds_reactions):
rx = model_complete.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(rxnid, 'Remove bounds', weights.get(rxnid, 1), rxt)) # depends on [control=['for'], data=['rxnid']]
|
def read_xdg_config_home(name, extension):
"""
Read from file found in XDG-specified configuration home directory,
expanding to ``${HOME}/.config/name.extension`` by default. Depends on
``XDG_CONFIG_HOME`` or ``HOME`` environment variables.
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration` instance, possibly `.NotConfigured`
"""
# find optional value of ${XDG_CONFIG_HOME}
config_home = environ.get('XDG_CONFIG_HOME')
if not config_home:
# XDG spec: "If $XDG_CONFIG_HOME is either not set or empty, a default equal to $HOME/.config should be used."
# see https://specifications.freedesktop.org/basedir-spec/latest/ar01s03.html
config_home = path.expanduser('~/.config')
# expand to full path to configuration file in XDG config path
return loadf(path.join(config_home, '{name}.{extension}'.format(name=name, extension=extension)),
default=NotConfigured)
|
def function[read_xdg_config_home, parameter[name, extension]]:
constant[
Read from file found in XDG-specified configuration home directory,
expanding to ``${HOME}/.config/name.extension`` by default. Depends on
``XDG_CONFIG_HOME`` or ``HOME`` environment variables.
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration` instance, possibly `.NotConfigured`
]
variable[config_home] assign[=] call[name[environ].get, parameter[constant[XDG_CONFIG_HOME]]]
if <ast.UnaryOp object at 0x7da20c6c79d0> begin[:]
variable[config_home] assign[=] call[name[path].expanduser, parameter[constant[~/.config]]]
return[call[name[loadf], parameter[call[name[path].join, parameter[name[config_home], call[constant[{name}.{extension}].format, parameter[]]]]]]]
|
keyword[def] identifier[read_xdg_config_home] ( identifier[name] , identifier[extension] ):
literal[string]
identifier[config_home] = identifier[environ] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[config_home] :
identifier[config_home] = identifier[path] . identifier[expanduser] ( literal[string] )
keyword[return] identifier[loadf] ( identifier[path] . identifier[join] ( identifier[config_home] , literal[string] . identifier[format] ( identifier[name] = identifier[name] , identifier[extension] = identifier[extension] )),
identifier[default] = identifier[NotConfigured] )
|
def read_xdg_config_home(name, extension):
"""
Read from file found in XDG-specified configuration home directory,
expanding to ``${HOME}/.config/name.extension`` by default. Depends on
``XDG_CONFIG_HOME`` or ``HOME`` environment variables.
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration` instance, possibly `.NotConfigured`
"""
# find optional value of ${XDG_CONFIG_HOME}
config_home = environ.get('XDG_CONFIG_HOME')
if not config_home:
# XDG spec: "If $XDG_CONFIG_HOME is either not set or empty, a default equal to $HOME/.config should be used."
# see https://specifications.freedesktop.org/basedir-spec/latest/ar01s03.html
config_home = path.expanduser('~/.config') # depends on [control=['if'], data=[]]
# expand to full path to configuration file in XDG config path
return loadf(path.join(config_home, '{name}.{extension}'.format(name=name, extension=extension)), default=NotConfigured)
|
def categorymembers(
self,
page: 'WikipediaPage',
**kwargs
) -> PagesDict:
"""
Returns pages in given category with respect to parameters
API Calls for parameters:
- https://www.mediawiki.org/w/api.php?action=help&modules=query%2Bcategorymembers
- https://www.mediawiki.org/wiki/API:Categorymembers
:param page: :class:`WikipediaPage`
:param kwargs: parameters used in API call
:return: pages in given category
"""
params = {
'action': 'query',
'list': 'categorymembers',
'cmtitle': page.title,
'cmlimit': 500,
}
used_params = kwargs
used_params.update(params)
raw = self._query(
page,
used_params
)
self._common_attributes(raw['query'], page)
v = raw['query']
while 'continue' in raw:
params['cmcontinue'] = raw['continue']['cmcontinue']
raw = self._query(
page,
params
)
v['categorymembers'] += raw['query']['categorymembers']
return self._build_categorymembers(v, page)
|
def function[categorymembers, parameter[self, page]]:
constant[
Returns pages in given category with respect to parameters
API Calls for parameters:
- https://www.mediawiki.org/w/api.php?action=help&modules=query%2Bcategorymembers
- https://www.mediawiki.org/wiki/API:Categorymembers
:param page: :class:`WikipediaPage`
:param kwargs: parameters used in API call
:return: pages in given category
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18ede69b0>, <ast.Constant object at 0x7da18ede6d70>, <ast.Constant object at 0x7da18bccaf20>, <ast.Constant object at 0x7da18bccac20>], [<ast.Constant object at 0x7da18bcc8910>, <ast.Constant object at 0x7da18bccaad0>, <ast.Attribute object at 0x7da18bcc92d0>, <ast.Constant object at 0x7da18bccbb80>]]
variable[used_params] assign[=] name[kwargs]
call[name[used_params].update, parameter[name[params]]]
variable[raw] assign[=] call[name[self]._query, parameter[name[page], name[used_params]]]
call[name[self]._common_attributes, parameter[call[name[raw]][constant[query]], name[page]]]
variable[v] assign[=] call[name[raw]][constant[query]]
while compare[constant[continue] in name[raw]] begin[:]
call[name[params]][constant[cmcontinue]] assign[=] call[call[name[raw]][constant[continue]]][constant[cmcontinue]]
variable[raw] assign[=] call[name[self]._query, parameter[name[page], name[params]]]
<ast.AugAssign object at 0x7da18bcc8e20>
return[call[name[self]._build_categorymembers, parameter[name[v], name[page]]]]
|
keyword[def] identifier[categorymembers] (
identifier[self] ,
identifier[page] : literal[string] ,
** identifier[kwargs]
)-> identifier[PagesDict] :
literal[string]
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[page] . identifier[title] ,
literal[string] : literal[int] ,
}
identifier[used_params] = identifier[kwargs]
identifier[used_params] . identifier[update] ( identifier[params] )
identifier[raw] = identifier[self] . identifier[_query] (
identifier[page] ,
identifier[used_params]
)
identifier[self] . identifier[_common_attributes] ( identifier[raw] [ literal[string] ], identifier[page] )
identifier[v] = identifier[raw] [ literal[string] ]
keyword[while] literal[string] keyword[in] identifier[raw] :
identifier[params] [ literal[string] ]= identifier[raw] [ literal[string] ][ literal[string] ]
identifier[raw] = identifier[self] . identifier[_query] (
identifier[page] ,
identifier[params]
)
identifier[v] [ literal[string] ]+= identifier[raw] [ literal[string] ][ literal[string] ]
keyword[return] identifier[self] . identifier[_build_categorymembers] ( identifier[v] , identifier[page] )
|
def categorymembers(self, page: 'WikipediaPage', **kwargs) -> PagesDict:
"""
Returns pages in given category with respect to parameters
API Calls for parameters:
- https://www.mediawiki.org/w/api.php?action=help&modules=query%2Bcategorymembers
- https://www.mediawiki.org/wiki/API:Categorymembers
:param page: :class:`WikipediaPage`
:param kwargs: parameters used in API call
:return: pages in given category
"""
params = {'action': 'query', 'list': 'categorymembers', 'cmtitle': page.title, 'cmlimit': 500}
used_params = kwargs
used_params.update(params)
raw = self._query(page, used_params)
self._common_attributes(raw['query'], page)
v = raw['query']
while 'continue' in raw:
params['cmcontinue'] = raw['continue']['cmcontinue']
raw = self._query(page, params)
v['categorymembers'] += raw['query']['categorymembers'] # depends on [control=['while'], data=['raw']]
return self._build_categorymembers(v, page)
|
def construct_covariance_matrix(cvec, parallax, radial_velocity, radial_velocity_error):
"""
Take the astrometric parameter standard uncertainties and the uncertainty correlations as quoted in
the Gaia catalogue and construct the covariance matrix.
Parameters
----------
cvec : array_like
Array of shape (15,) (1 source) or (n,15) (n sources) for the astrometric parameter standard
uncertainties and their correlations, as listed in the Gaia catalogue [ra_error, dec_error,
parallax_error, pmra_error, pmdec_error, ra_dec_corr, ra_parallax_corr, ra_pmra_corr,
ra_pmdec_corr, dec_parallax_corr, dec_pmra_corr, dec_pmdec_corr, parallax_pmra_corr,
parallax_pmdec_corr, pmra_pmdec_corr]. Units are (mas^2, mas^2/yr, mas^2/yr^2).
parallax : array_like (n elements)
Source parallax (mas).
radial_velocity : array_like (n elements)
Source radial velocity (km/s, does not have to be from Gaia RVS!). If the radial velocity is not
known it can be set to zero.
radial_velocity_error : array_like (n elements)
Source radial velocity uncertainty (km/s). If the radial velocity is not know this can be set to
the radial velocity dispersion for the population the source was drawn from.
Returns
-------
Covariance matrix as a 6x6 array.
"""
if np.ndim(cvec)==1:
cmat = np.zeros((1,6,6))
nsources = 1
cv = np.atleast_2d(cvec)
else:
nsources = cvec.shape[0]
cmat = np.zeros((nsources,6,6))
cv = cvec
for k in range(nsources):
cmat[k,0:5,0:5] = cv[k,0:5]**2
iu = np.triu_indices(5,k=1)
for k in range(10):
i = iu[0][k]
j = iu[1][k]
cmat[:,i,j] = cv[:,i]*cv[:,j]*cv[:,k+5]
cmat[:,j,i] = cmat[:,i,j]
for k in range(nsources):
cmat[k,0:5,5] = cmat[k,0:5,2]*np.atleast_1d(radial_velocity)[k]/auKmYearPerSec
cmat[:,5,0:5] = cmat[:,0:5,5]
cmat[:,5,5] = cmat[:,2,2]*(radial_velocity**2 + radial_velocity_error**2)/auKmYearPerSec**2 + \
(parallax*radial_velocity_error/auKmYearPerSec)**2
return np.squeeze(cmat)
|
def function[construct_covariance_matrix, parameter[cvec, parallax, radial_velocity, radial_velocity_error]]:
constant[
Take the astrometric parameter standard uncertainties and the uncertainty correlations as quoted in
the Gaia catalogue and construct the covariance matrix.
Parameters
----------
cvec : array_like
Array of shape (15,) (1 source) or (n,15) (n sources) for the astrometric parameter standard
uncertainties and their correlations, as listed in the Gaia catalogue [ra_error, dec_error,
parallax_error, pmra_error, pmdec_error, ra_dec_corr, ra_parallax_corr, ra_pmra_corr,
ra_pmdec_corr, dec_parallax_corr, dec_pmra_corr, dec_pmdec_corr, parallax_pmra_corr,
parallax_pmdec_corr, pmra_pmdec_corr]. Units are (mas^2, mas^2/yr, mas^2/yr^2).
parallax : array_like (n elements)
Source parallax (mas).
radial_velocity : array_like (n elements)
Source radial velocity (km/s, does not have to be from Gaia RVS!). If the radial velocity is not
known it can be set to zero.
radial_velocity_error : array_like (n elements)
Source radial velocity uncertainty (km/s). If the radial velocity is not know this can be set to
the radial velocity dispersion for the population the source was drawn from.
Returns
-------
Covariance matrix as a 6x6 array.
]
if compare[call[name[np].ndim, parameter[name[cvec]]] equal[==] constant[1]] begin[:]
variable[cmat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da2044c2230>, <ast.Constant object at 0x7da2044c27a0>, <ast.Constant object at 0x7da2044c0040>]]]]
variable[nsources] assign[=] constant[1]
variable[cv] assign[=] call[name[np].atleast_2d, parameter[name[cvec]]]
for taget[name[k]] in starred[call[name[range], parameter[name[nsources]]]] begin[:]
call[name[cmat]][tuple[[<ast.Name object at 0x7da2044c1570>, <ast.Slice object at 0x7da2044c0e80>, <ast.Slice object at 0x7da2044c0a30>]]] assign[=] binary_operation[call[name[cv]][tuple[[<ast.Name object at 0x7da2044c3850>, <ast.Slice object at 0x7da2044c1c30>]]] ** constant[2]]
variable[iu] assign[=] call[name[np].triu_indices, parameter[constant[5]]]
for taget[name[k]] in starred[call[name[range], parameter[constant[10]]]] begin[:]
variable[i] assign[=] call[call[name[iu]][constant[0]]][name[k]]
variable[j] assign[=] call[call[name[iu]][constant[1]]][name[k]]
call[name[cmat]][tuple[[<ast.Slice object at 0x7da2044c3df0>, <ast.Name object at 0x7da2044c13c0>, <ast.Name object at 0x7da2044c2c20>]]] assign[=] binary_operation[binary_operation[call[name[cv]][tuple[[<ast.Slice object at 0x7da2044c34f0>, <ast.Name object at 0x7da2044c0310>]]] * call[name[cv]][tuple[[<ast.Slice object at 0x7da2044c1090>, <ast.Name object at 0x7da2044c2110>]]]] * call[name[cv]][tuple[[<ast.Slice object at 0x7da2044c3af0>, <ast.BinOp object at 0x7da2044c1150>]]]]
call[name[cmat]][tuple[[<ast.Slice object at 0x7da18bccafe0>, <ast.Name object at 0x7da18bcc8280>, <ast.Name object at 0x7da18bcc9360>]]] assign[=] call[name[cmat]][tuple[[<ast.Slice object at 0x7da18bcc9270>, <ast.Name object at 0x7da18bccb040>, <ast.Name object at 0x7da18bcc9d20>]]]
for taget[name[k]] in starred[call[name[range], parameter[name[nsources]]]] begin[:]
call[name[cmat]][tuple[[<ast.Name object at 0x7da18bcc8d90>, <ast.Slice object at 0x7da18bcc98d0>, <ast.Constant object at 0x7da18bcc9300>]]] assign[=] binary_operation[binary_operation[call[name[cmat]][tuple[[<ast.Name object at 0x7da18bcc94b0>, <ast.Slice object at 0x7da18bcc8eb0>, <ast.Constant object at 0x7da18bcc8fd0>]]] * call[call[name[np].atleast_1d, parameter[name[radial_velocity]]]][name[k]]] / name[auKmYearPerSec]]
call[name[cmat]][tuple[[<ast.Slice object at 0x7da18bccbd00>, <ast.Constant object at 0x7da18bccb1c0>, <ast.Slice object at 0x7da18bccacb0>]]] assign[=] call[name[cmat]][tuple[[<ast.Slice object at 0x7da18bccb580>, <ast.Slice object at 0x7da18bcca8f0>, <ast.Constant object at 0x7da18bcc9480>]]]
call[name[cmat]][tuple[[<ast.Slice object at 0x7da18bccab00>, <ast.Constant object at 0x7da18bcc8670>, <ast.Constant object at 0x7da18bccb250>]]] assign[=] binary_operation[binary_operation[binary_operation[call[name[cmat]][tuple[[<ast.Slice object at 0x7da2044c25c0>, <ast.Constant object at 0x7da2044c2b00>, <ast.Constant object at 0x7da2044c3cd0>]]] * binary_operation[binary_operation[name[radial_velocity] ** constant[2]] + binary_operation[name[radial_velocity_error] ** constant[2]]]] / binary_operation[name[auKmYearPerSec] ** constant[2]]] + binary_operation[binary_operation[binary_operation[name[parallax] * name[radial_velocity_error]] / name[auKmYearPerSec]] ** constant[2]]]
return[call[name[np].squeeze, parameter[name[cmat]]]]
|
keyword[def] identifier[construct_covariance_matrix] ( identifier[cvec] , identifier[parallax] , identifier[radial_velocity] , identifier[radial_velocity_error] ):
literal[string]
keyword[if] identifier[np] . identifier[ndim] ( identifier[cvec] )== literal[int] :
identifier[cmat] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] , literal[int] ))
identifier[nsources] = literal[int]
identifier[cv] = identifier[np] . identifier[atleast_2d] ( identifier[cvec] )
keyword[else] :
identifier[nsources] = identifier[cvec] . identifier[shape] [ literal[int] ]
identifier[cmat] = identifier[np] . identifier[zeros] (( identifier[nsources] , literal[int] , literal[int] ))
identifier[cv] = identifier[cvec]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[nsources] ):
identifier[cmat] [ identifier[k] , literal[int] : literal[int] , literal[int] : literal[int] ]= identifier[cv] [ identifier[k] , literal[int] : literal[int] ]** literal[int]
identifier[iu] = identifier[np] . identifier[triu_indices] ( literal[int] , identifier[k] = literal[int] )
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] ):
identifier[i] = identifier[iu] [ literal[int] ][ identifier[k] ]
identifier[j] = identifier[iu] [ literal[int] ][ identifier[k] ]
identifier[cmat] [:, identifier[i] , identifier[j] ]= identifier[cv] [:, identifier[i] ]* identifier[cv] [:, identifier[j] ]* identifier[cv] [:, identifier[k] + literal[int] ]
identifier[cmat] [:, identifier[j] , identifier[i] ]= identifier[cmat] [:, identifier[i] , identifier[j] ]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[nsources] ):
identifier[cmat] [ identifier[k] , literal[int] : literal[int] , literal[int] ]= identifier[cmat] [ identifier[k] , literal[int] : literal[int] , literal[int] ]* identifier[np] . identifier[atleast_1d] ( identifier[radial_velocity] )[ identifier[k] ]/ identifier[auKmYearPerSec]
identifier[cmat] [:, literal[int] , literal[int] : literal[int] ]= identifier[cmat] [:, literal[int] : literal[int] , literal[int] ]
identifier[cmat] [:, literal[int] , literal[int] ]= identifier[cmat] [:, literal[int] , literal[int] ]*( identifier[radial_velocity] ** literal[int] + identifier[radial_velocity_error] ** literal[int] )/ identifier[auKmYearPerSec] ** literal[int] +( identifier[parallax] * identifier[radial_velocity_error] / identifier[auKmYearPerSec] )** literal[int]
keyword[return] identifier[np] . identifier[squeeze] ( identifier[cmat] )
|
def construct_covariance_matrix(cvec, parallax, radial_velocity, radial_velocity_error):
"""
Take the astrometric parameter standard uncertainties and the uncertainty correlations as quoted in
the Gaia catalogue and construct the covariance matrix.
Parameters
----------
cvec : array_like
Array of shape (15,) (1 source) or (n,15) (n sources) for the astrometric parameter standard
uncertainties and their correlations, as listed in the Gaia catalogue [ra_error, dec_error,
parallax_error, pmra_error, pmdec_error, ra_dec_corr, ra_parallax_corr, ra_pmra_corr,
ra_pmdec_corr, dec_parallax_corr, dec_pmra_corr, dec_pmdec_corr, parallax_pmra_corr,
parallax_pmdec_corr, pmra_pmdec_corr]. Units are (mas^2, mas^2/yr, mas^2/yr^2).
parallax : array_like (n elements)
Source parallax (mas).
radial_velocity : array_like (n elements)
Source radial velocity (km/s, does not have to be from Gaia RVS!). If the radial velocity is not
known it can be set to zero.
radial_velocity_error : array_like (n elements)
Source radial velocity uncertainty (km/s). If the radial velocity is not know this can be set to
the radial velocity dispersion for the population the source was drawn from.
Returns
-------
Covariance matrix as a 6x6 array.
"""
if np.ndim(cvec) == 1:
cmat = np.zeros((1, 6, 6))
nsources = 1
cv = np.atleast_2d(cvec) # depends on [control=['if'], data=[]]
else:
nsources = cvec.shape[0]
cmat = np.zeros((nsources, 6, 6))
cv = cvec
for k in range(nsources):
cmat[k, 0:5, 0:5] = cv[k, 0:5] ** 2 # depends on [control=['for'], data=['k']]
iu = np.triu_indices(5, k=1)
for k in range(10):
i = iu[0][k]
j = iu[1][k]
cmat[:, i, j] = cv[:, i] * cv[:, j] * cv[:, k + 5]
cmat[:, j, i] = cmat[:, i, j] # depends on [control=['for'], data=['k']]
for k in range(nsources):
cmat[k, 0:5, 5] = cmat[k, 0:5, 2] * np.atleast_1d(radial_velocity)[k] / auKmYearPerSec # depends on [control=['for'], data=['k']]
cmat[:, 5, 0:5] = cmat[:, 0:5, 5]
cmat[:, 5, 5] = cmat[:, 2, 2] * (radial_velocity ** 2 + radial_velocity_error ** 2) / auKmYearPerSec ** 2 + (parallax * radial_velocity_error / auKmYearPerSec) ** 2
return np.squeeze(cmat)
|
def apply_theme(self, property_values):
''' Apply a set of theme values which will be used rather than
defaults, but will not override application-set values.
The passed-in dictionary may be kept around as-is and shared with
other instances to save memory (so neither the caller nor the
|HasProps| instance should modify it).
Args:
property_values (dict) : theme values to use in place of defaults
Returns:
None
'''
old_dict = self.themed_values()
# if the same theme is set again, it should reuse the same dict
if old_dict is property_values:
return
removed = set()
# we're doing a little song-and-dance to avoid storing __themed_values__ or
# an empty dict, if there's no theme that applies to this HasProps instance.
if old_dict is not None:
removed.update(set(old_dict.keys()))
added = set(property_values.keys())
old_values = dict()
for k in added.union(removed):
old_values[k] = getattr(self, k)
if len(property_values) > 0:
setattr(self, '__themed_values__', property_values)
elif hasattr(self, '__themed_values__'):
delattr(self, '__themed_values__')
# Property container values might be cached even if unmodified. Invalidate
# any cached values that are not modified at this point.
for k, v in old_values.items():
if k in self._unstable_themed_values:
del self._unstable_themed_values[k]
# Emit any change notifications that result
for k, v in old_values.items():
descriptor = self.lookup(k)
descriptor.trigger_if_changed(self, v)
|
def function[apply_theme, parameter[self, property_values]]:
constant[ Apply a set of theme values which will be used rather than
defaults, but will not override application-set values.
The passed-in dictionary may be kept around as-is and shared with
other instances to save memory (so neither the caller nor the
|HasProps| instance should modify it).
Args:
property_values (dict) : theme values to use in place of defaults
Returns:
None
]
variable[old_dict] assign[=] call[name[self].themed_values, parameter[]]
if compare[name[old_dict] is name[property_values]] begin[:]
return[None]
variable[removed] assign[=] call[name[set], parameter[]]
if compare[name[old_dict] is_not constant[None]] begin[:]
call[name[removed].update, parameter[call[name[set], parameter[call[name[old_dict].keys, parameter[]]]]]]
variable[added] assign[=] call[name[set], parameter[call[name[property_values].keys, parameter[]]]]
variable[old_values] assign[=] call[name[dict], parameter[]]
for taget[name[k]] in starred[call[name[added].union, parameter[name[removed]]]] begin[:]
call[name[old_values]][name[k]] assign[=] call[name[getattr], parameter[name[self], name[k]]]
if compare[call[name[len], parameter[name[property_values]]] greater[>] constant[0]] begin[:]
call[name[setattr], parameter[name[self], constant[__themed_values__], name[property_values]]]
for taget[tuple[[<ast.Name object at 0x7da1b20b8a60>, <ast.Name object at 0x7da1b20bb4c0>]]] in starred[call[name[old_values].items, parameter[]]] begin[:]
if compare[name[k] in name[self]._unstable_themed_values] begin[:]
<ast.Delete object at 0x7da1b20b9660>
for taget[tuple[[<ast.Name object at 0x7da1b20ba8f0>, <ast.Name object at 0x7da1b20bbdc0>]]] in starred[call[name[old_values].items, parameter[]]] begin[:]
variable[descriptor] assign[=] call[name[self].lookup, parameter[name[k]]]
call[name[descriptor].trigger_if_changed, parameter[name[self], name[v]]]
|
keyword[def] identifier[apply_theme] ( identifier[self] , identifier[property_values] ):
literal[string]
identifier[old_dict] = identifier[self] . identifier[themed_values] ()
keyword[if] identifier[old_dict] keyword[is] identifier[property_values] :
keyword[return]
identifier[removed] = identifier[set] ()
keyword[if] identifier[old_dict] keyword[is] keyword[not] keyword[None] :
identifier[removed] . identifier[update] ( identifier[set] ( identifier[old_dict] . identifier[keys] ()))
identifier[added] = identifier[set] ( identifier[property_values] . identifier[keys] ())
identifier[old_values] = identifier[dict] ()
keyword[for] identifier[k] keyword[in] identifier[added] . identifier[union] ( identifier[removed] ):
identifier[old_values] [ identifier[k] ]= identifier[getattr] ( identifier[self] , identifier[k] )
keyword[if] identifier[len] ( identifier[property_values] )> literal[int] :
identifier[setattr] ( identifier[self] , literal[string] , identifier[property_values] )
keyword[elif] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[delattr] ( identifier[self] , literal[string] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[old_values] . identifier[items] ():
keyword[if] identifier[k] keyword[in] identifier[self] . identifier[_unstable_themed_values] :
keyword[del] identifier[self] . identifier[_unstable_themed_values] [ identifier[k] ]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[old_values] . identifier[items] ():
identifier[descriptor] = identifier[self] . identifier[lookup] ( identifier[k] )
identifier[descriptor] . identifier[trigger_if_changed] ( identifier[self] , identifier[v] )
|
def apply_theme(self, property_values):
""" Apply a set of theme values which will be used rather than
defaults, but will not override application-set values.
The passed-in dictionary may be kept around as-is and shared with
other instances to save memory (so neither the caller nor the
|HasProps| instance should modify it).
Args:
property_values (dict) : theme values to use in place of defaults
Returns:
None
"""
old_dict = self.themed_values()
# if the same theme is set again, it should reuse the same dict
if old_dict is property_values:
return # depends on [control=['if'], data=[]]
removed = set()
# we're doing a little song-and-dance to avoid storing __themed_values__ or
# an empty dict, if there's no theme that applies to this HasProps instance.
if old_dict is not None:
removed.update(set(old_dict.keys())) # depends on [control=['if'], data=['old_dict']]
added = set(property_values.keys())
old_values = dict()
for k in added.union(removed):
old_values[k] = getattr(self, k) # depends on [control=['for'], data=['k']]
if len(property_values) > 0:
setattr(self, '__themed_values__', property_values) # depends on [control=['if'], data=[]]
elif hasattr(self, '__themed_values__'):
delattr(self, '__themed_values__') # depends on [control=['if'], data=[]]
# Property container values might be cached even if unmodified. Invalidate
# any cached values that are not modified at this point.
for (k, v) in old_values.items():
if k in self._unstable_themed_values:
del self._unstable_themed_values[k] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]]
# Emit any change notifications that result
for (k, v) in old_values.items():
descriptor = self.lookup(k)
descriptor.trigger_if_changed(self, v) # depends on [control=['for'], data=[]]
|
def validate_pin(pin):
""" Validate the given pin against the schema.
:param dict pin: The pin to validate:
:raises pypebbleapi.schemas.DocumentError: If the pin is not valid.
"""
v = _Validator(schemas.pin)
if v.validate(pin):
return
else:
raise schemas.DocumentError(errors=v.errors)
|
def function[validate_pin, parameter[pin]]:
constant[ Validate the given pin against the schema.
:param dict pin: The pin to validate:
:raises pypebbleapi.schemas.DocumentError: If the pin is not valid.
]
variable[v] assign[=] call[name[_Validator], parameter[name[schemas].pin]]
if call[name[v].validate, parameter[name[pin]]] begin[:]
return[None]
|
keyword[def] identifier[validate_pin] ( identifier[pin] ):
literal[string]
identifier[v] = identifier[_Validator] ( identifier[schemas] . identifier[pin] )
keyword[if] identifier[v] . identifier[validate] ( identifier[pin] ):
keyword[return]
keyword[else] :
keyword[raise] identifier[schemas] . identifier[DocumentError] ( identifier[errors] = identifier[v] . identifier[errors] )
|
def validate_pin(pin):
""" Validate the given pin against the schema.
:param dict pin: The pin to validate:
:raises pypebbleapi.schemas.DocumentError: If the pin is not valid.
"""
v = _Validator(schemas.pin)
if v.validate(pin):
return # depends on [control=['if'], data=[]]
else:
raise schemas.DocumentError(errors=v.errors)
|
def dictToH5(h5, d, link_copy=False):
""" Save a dictionary into an hdf5 file
h5py is not capable of handling dictionaries natively"""
global _array_cache
_array_cache = dict()
h5 = h5py.File(h5, mode="w")
dictToH5Group(d, h5["/"], link_copy=link_copy)
h5.close()
_array_cache = dict();
|
def function[dictToH5, parameter[h5, d, link_copy]]:
constant[ Save a dictionary into an hdf5 file
h5py is not capable of handling dictionaries natively]
<ast.Global object at 0x7da1b0a22ad0>
variable[_array_cache] assign[=] call[name[dict], parameter[]]
variable[h5] assign[=] call[name[h5py].File, parameter[name[h5]]]
call[name[dictToH5Group], parameter[name[d], call[name[h5]][constant[/]]]]
call[name[h5].close, parameter[]]
variable[_array_cache] assign[=] call[name[dict], parameter[]]
|
keyword[def] identifier[dictToH5] ( identifier[h5] , identifier[d] , identifier[link_copy] = keyword[False] ):
literal[string]
keyword[global] identifier[_array_cache]
identifier[_array_cache] = identifier[dict] ()
identifier[h5] = identifier[h5py] . identifier[File] ( identifier[h5] , identifier[mode] = literal[string] )
identifier[dictToH5Group] ( identifier[d] , identifier[h5] [ literal[string] ], identifier[link_copy] = identifier[link_copy] )
identifier[h5] . identifier[close] ()
identifier[_array_cache] = identifier[dict] ();
|
def dictToH5(h5, d, link_copy=False):
""" Save a dictionary into an hdf5 file
h5py is not capable of handling dictionaries natively"""
global _array_cache
_array_cache = dict()
h5 = h5py.File(h5, mode='w')
dictToH5Group(d, h5['/'], link_copy=link_copy)
h5.close()
_array_cache = dict()
|
def render(self):
""" Returns generated html code.
"""
with codecs.open(self.template_file, encoding=self.encoding) as template_src:
template = jinja2.Template(template_src.read())
slides = self.fetch_contents(self.source)
context = self.get_template_vars(slides)
html = template.render(context)
if self.embed:
images = re.findall(r'url\(["\']?(.*?\.(?:jpe?g|gif|png|svg)[\'"]?)\)',
html, re.DOTALL | re.UNICODE)
for img_url in images:
img_url = img_url.replace('"', '').replace("'", '')
if self.theme_dir:
source = os.path.join(self.theme_dir, 'css')
else:
source = os.path.join(THEMES_DIR, self.theme, 'css')
encoded_url = utils.encode_image_from_url(img_url, source)
if encoded_url:
html = html.replace(img_url, encoded_url, 1)
self.log("Embedded theme image %s from theme directory %s" % (img_url, source))
else:
# Missing file in theme directory. Try user_css folders
found = False
for css_entry in context['user_css']:
directory = os.path.dirname(css_entry['path_url'])
if not directory:
directory = "."
encoded_url = utils.encode_image_from_url(img_url, directory)
if encoded_url:
found = True
html = html.replace(img_url, encoded_url, 1)
self.log("Embedded theme image %s from directory %s" % (img_url, directory))
if not found:
# Missing image file, etc...
self.log(u"Failed to embed theme image %s" % img_url)
return html
|
def function[render, parameter[self]]:
constant[ Returns generated html code.
]
with call[name[codecs].open, parameter[name[self].template_file]] begin[:]
variable[template] assign[=] call[name[jinja2].Template, parameter[call[name[template_src].read, parameter[]]]]
variable[slides] assign[=] call[name[self].fetch_contents, parameter[name[self].source]]
variable[context] assign[=] call[name[self].get_template_vars, parameter[name[slides]]]
variable[html] assign[=] call[name[template].render, parameter[name[context]]]
if name[self].embed begin[:]
variable[images] assign[=] call[name[re].findall, parameter[constant[url\(["\']?(.*?\.(?:jpe?g|gif|png|svg)[\'"]?)\)], name[html], binary_operation[name[re].DOTALL <ast.BitOr object at 0x7da2590d6aa0> name[re].UNICODE]]]
for taget[name[img_url]] in starred[name[images]] begin[:]
variable[img_url] assign[=] call[call[name[img_url].replace, parameter[constant["], constant[]]].replace, parameter[constant['], constant[]]]
if name[self].theme_dir begin[:]
variable[source] assign[=] call[name[os].path.join, parameter[name[self].theme_dir, constant[css]]]
variable[encoded_url] assign[=] call[name[utils].encode_image_from_url, parameter[name[img_url], name[source]]]
if name[encoded_url] begin[:]
variable[html] assign[=] call[name[html].replace, parameter[name[img_url], name[encoded_url], constant[1]]]
call[name[self].log, parameter[binary_operation[constant[Embedded theme image %s from theme directory %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1ecdcc0>, <ast.Name object at 0x7da1b1eccc40>]]]]]
return[name[html]]
|
keyword[def] identifier[render] ( identifier[self] ):
literal[string]
keyword[with] identifier[codecs] . identifier[open] ( identifier[self] . identifier[template_file] , identifier[encoding] = identifier[self] . identifier[encoding] ) keyword[as] identifier[template_src] :
identifier[template] = identifier[jinja2] . identifier[Template] ( identifier[template_src] . identifier[read] ())
identifier[slides] = identifier[self] . identifier[fetch_contents] ( identifier[self] . identifier[source] )
identifier[context] = identifier[self] . identifier[get_template_vars] ( identifier[slides] )
identifier[html] = identifier[template] . identifier[render] ( identifier[context] )
keyword[if] identifier[self] . identifier[embed] :
identifier[images] = identifier[re] . identifier[findall] ( literal[string] ,
identifier[html] , identifier[re] . identifier[DOTALL] | identifier[re] . identifier[UNICODE] )
keyword[for] identifier[img_url] keyword[in] identifier[images] :
identifier[img_url] = identifier[img_url] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[self] . identifier[theme_dir] :
identifier[source] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[theme_dir] , literal[string] )
keyword[else] :
identifier[source] = identifier[os] . identifier[path] . identifier[join] ( identifier[THEMES_DIR] , identifier[self] . identifier[theme] , literal[string] )
identifier[encoded_url] = identifier[utils] . identifier[encode_image_from_url] ( identifier[img_url] , identifier[source] )
keyword[if] identifier[encoded_url] :
identifier[html] = identifier[html] . identifier[replace] ( identifier[img_url] , identifier[encoded_url] , literal[int] )
identifier[self] . identifier[log] ( literal[string] %( identifier[img_url] , identifier[source] ))
keyword[else] :
identifier[found] = keyword[False]
keyword[for] identifier[css_entry] keyword[in] identifier[context] [ literal[string] ]:
identifier[directory] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[css_entry] [ literal[string] ])
keyword[if] keyword[not] identifier[directory] :
identifier[directory] = literal[string]
identifier[encoded_url] = identifier[utils] . identifier[encode_image_from_url] ( identifier[img_url] , identifier[directory] )
keyword[if] identifier[encoded_url] :
identifier[found] = keyword[True]
identifier[html] = identifier[html] . identifier[replace] ( identifier[img_url] , identifier[encoded_url] , literal[int] )
identifier[self] . identifier[log] ( literal[string] %( identifier[img_url] , identifier[directory] ))
keyword[if] keyword[not] identifier[found] :
identifier[self] . identifier[log] ( literal[string] % identifier[img_url] )
keyword[return] identifier[html]
|
def render(self):
""" Returns generated html code.
"""
with codecs.open(self.template_file, encoding=self.encoding) as template_src:
template = jinja2.Template(template_src.read()) # depends on [control=['with'], data=['template_src']]
slides = self.fetch_contents(self.source)
context = self.get_template_vars(slides)
html = template.render(context)
if self.embed:
images = re.findall('url\\(["\\\']?(.*?\\.(?:jpe?g|gif|png|svg)[\\\'"]?)\\)', html, re.DOTALL | re.UNICODE)
for img_url in images:
img_url = img_url.replace('"', '').replace("'", '')
if self.theme_dir:
source = os.path.join(self.theme_dir, 'css') # depends on [control=['if'], data=[]]
else:
source = os.path.join(THEMES_DIR, self.theme, 'css')
encoded_url = utils.encode_image_from_url(img_url, source)
if encoded_url:
html = html.replace(img_url, encoded_url, 1)
self.log('Embedded theme image %s from theme directory %s' % (img_url, source)) # depends on [control=['if'], data=[]]
else:
# Missing file in theme directory. Try user_css folders
found = False
for css_entry in context['user_css']:
directory = os.path.dirname(css_entry['path_url'])
if not directory:
directory = '.' # depends on [control=['if'], data=[]]
encoded_url = utils.encode_image_from_url(img_url, directory)
if encoded_url:
found = True
html = html.replace(img_url, encoded_url, 1)
self.log('Embedded theme image %s from directory %s' % (img_url, directory)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['css_entry']]
if not found:
# Missing image file, etc...
self.log(u'Failed to embed theme image %s' % img_url) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['img_url']] # depends on [control=['if'], data=[]]
return html
|
def local_timezone(value):
"""Add the local timezone to `value` to make it aware."""
if hasattr(value, "tzinfo") and value.tzinfo is None:
return value.replace(tzinfo=dateutil.tz.tzlocal())
return value
|
def function[local_timezone, parameter[value]]:
constant[Add the local timezone to `value` to make it aware.]
if <ast.BoolOp object at 0x7da1b23b3a00> begin[:]
return[call[name[value].replace, parameter[]]]
return[name[value]]
|
keyword[def] identifier[local_timezone] ( identifier[value] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ) keyword[and] identifier[value] . identifier[tzinfo] keyword[is] keyword[None] :
keyword[return] identifier[value] . identifier[replace] ( identifier[tzinfo] = identifier[dateutil] . identifier[tz] . identifier[tzlocal] ())
keyword[return] identifier[value]
|
def local_timezone(value):
"""Add the local timezone to `value` to make it aware."""
if hasattr(value, 'tzinfo') and value.tzinfo is None:
return value.replace(tzinfo=dateutil.tz.tzlocal()) # depends on [control=['if'], data=[]]
return value
|
async def Set(self, annotations):
'''
annotations : typing.Sequence[~EntityAnnotations]
Returns -> typing.Sequence[~ErrorResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Annotations',
request='Set',
version=2,
params=_params)
_params['annotations'] = annotations
reply = await self.rpc(msg)
return reply
|
<ast.AsyncFunctionDef object at 0x7da1b0dbeec0>
|
keyword[async] keyword[def] identifier[Set] ( identifier[self] , identifier[annotations] ):
literal[string]
identifier[_params] = identifier[dict] ()
identifier[msg] = identifier[dict] ( identifier[type] = literal[string] ,
identifier[request] = literal[string] ,
identifier[version] = literal[int] ,
identifier[params] = identifier[_params] )
identifier[_params] [ literal[string] ]= identifier[annotations]
identifier[reply] = keyword[await] identifier[self] . identifier[rpc] ( identifier[msg] )
keyword[return] identifier[reply]
|
async def Set(self, annotations):
"""
annotations : typing.Sequence[~EntityAnnotations]
Returns -> typing.Sequence[~ErrorResult]
"""
# map input types to rpc msg
_params = dict()
msg = dict(type='Annotations', request='Set', version=2, params=_params)
_params['annotations'] = annotations
reply = await self.rpc(msg)
return reply
|
def mirror_pull(self, **kwargs):
"""Start the pull mirroring process for the project.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server failed to perform the request
"""
path = '/projects/%s/mirror/pull' % self.get_id()
self.manager.gitlab.http_post(path, **kwargs)
|
def function[mirror_pull, parameter[self]]:
constant[Start the pull mirroring process for the project.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server failed to perform the request
]
variable[path] assign[=] binary_operation[constant[/projects/%s/mirror/pull] <ast.Mod object at 0x7da2590d6920> call[name[self].get_id, parameter[]]]
call[name[self].manager.gitlab.http_post, parameter[name[path]]]
|
keyword[def] identifier[mirror_pull] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = literal[string] % identifier[self] . identifier[get_id] ()
identifier[self] . identifier[manager] . identifier[gitlab] . identifier[http_post] ( identifier[path] ,** identifier[kwargs] )
|
def mirror_pull(self, **kwargs):
"""Start the pull mirroring process for the project.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server failed to perform the request
"""
path = '/projects/%s/mirror/pull' % self.get_id()
self.manager.gitlab.http_post(path, **kwargs)
|
def varchar(anon, obj, field, val):
"""
Returns random data for a varchar field.
"""
return anon.faker.varchar(field=field)
|
def function[varchar, parameter[anon, obj, field, val]]:
constant[
Returns random data for a varchar field.
]
return[call[name[anon].faker.varchar, parameter[]]]
|
keyword[def] identifier[varchar] ( identifier[anon] , identifier[obj] , identifier[field] , identifier[val] ):
literal[string]
keyword[return] identifier[anon] . identifier[faker] . identifier[varchar] ( identifier[field] = identifier[field] )
|
def varchar(anon, obj, field, val):
"""
Returns random data for a varchar field.
"""
return anon.faker.varchar(field=field)
|
def write_txt(refs):
'''Converts references to plain text format
'''
full_str = '\n'
lib_citation_desc, lib_citations = get_library_citation()
# Add the refs for the libarary at the top
full_str += '*' * 80 + '\n'
full_str += lib_citation_desc
full_str += '*' * 80 + '\n'
for r in lib_citations.values():
ref_txt = reference_text(r)
ref_txt = textwrap.indent(ref_txt, ' ' * 4)
full_str += '{}\n\n'.format(ref_txt)
full_str += '*' * 80 + '\n'
full_str += "References for the basis set\n"
full_str += '*' * 80 + '\n'
for ref in refs:
full_str += '{}\n'.format(compact_elements(ref['elements']))
for ri in ref['reference_info']:
full_str += ' ## {}\n'.format(ri['reference_description'])
refdata = ri['reference_data']
if len(refdata) == 0:
full_str += ' (...no reference...)\n\n'
for k, r in refdata:
ref_txt = reference_text(r)
ref_txt = textwrap.indent(ref_txt, ' ' * 4)
full_str += '{}\n\n'.format(ref_txt)
return full_str
|
def function[write_txt, parameter[refs]]:
constant[Converts references to plain text format
]
variable[full_str] assign[=] constant[
]
<ast.Tuple object at 0x7da2041db9d0> assign[=] call[name[get_library_citation], parameter[]]
<ast.AugAssign object at 0x7da2041d8a60>
<ast.AugAssign object at 0x7da20c6a8430>
<ast.AugAssign object at 0x7da20c6a8970>
for taget[name[r]] in starred[call[name[lib_citations].values, parameter[]]] begin[:]
variable[ref_txt] assign[=] call[name[reference_text], parameter[name[r]]]
variable[ref_txt] assign[=] call[name[textwrap].indent, parameter[name[ref_txt], binary_operation[constant[ ] * constant[4]]]]
<ast.AugAssign object at 0x7da20c6a99c0>
<ast.AugAssign object at 0x7da20c6a8760>
<ast.AugAssign object at 0x7da20c6a86d0>
<ast.AugAssign object at 0x7da20c6ab670>
for taget[name[ref]] in starred[name[refs]] begin[:]
<ast.AugAssign object at 0x7da20c6a9a20>
for taget[name[ri]] in starred[call[name[ref]][constant[reference_info]]] begin[:]
<ast.AugAssign object at 0x7da20c6aab60>
variable[refdata] assign[=] call[name[ri]][constant[reference_data]]
if compare[call[name[len], parameter[name[refdata]]] equal[==] constant[0]] begin[:]
<ast.AugAssign object at 0x7da20c6aa5f0>
for taget[tuple[[<ast.Name object at 0x7da20c6a9690>, <ast.Name object at 0x7da20c6a9270>]]] in starred[name[refdata]] begin[:]
variable[ref_txt] assign[=] call[name[reference_text], parameter[name[r]]]
variable[ref_txt] assign[=] call[name[textwrap].indent, parameter[name[ref_txt], binary_operation[constant[ ] * constant[4]]]]
<ast.AugAssign object at 0x7da20c6a9f90>
return[name[full_str]]
|
keyword[def] identifier[write_txt] ( identifier[refs] ):
literal[string]
identifier[full_str] = literal[string]
identifier[lib_citation_desc] , identifier[lib_citations] = identifier[get_library_citation] ()
identifier[full_str] += literal[string] * literal[int] + literal[string]
identifier[full_str] += identifier[lib_citation_desc]
identifier[full_str] += literal[string] * literal[int] + literal[string]
keyword[for] identifier[r] keyword[in] identifier[lib_citations] . identifier[values] ():
identifier[ref_txt] = identifier[reference_text] ( identifier[r] )
identifier[ref_txt] = identifier[textwrap] . identifier[indent] ( identifier[ref_txt] , literal[string] * literal[int] )
identifier[full_str] += literal[string] . identifier[format] ( identifier[ref_txt] )
identifier[full_str] += literal[string] * literal[int] + literal[string]
identifier[full_str] += literal[string]
identifier[full_str] += literal[string] * literal[int] + literal[string]
keyword[for] identifier[ref] keyword[in] identifier[refs] :
identifier[full_str] += literal[string] . identifier[format] ( identifier[compact_elements] ( identifier[ref] [ literal[string] ]))
keyword[for] identifier[ri] keyword[in] identifier[ref] [ literal[string] ]:
identifier[full_str] += literal[string] . identifier[format] ( identifier[ri] [ literal[string] ])
identifier[refdata] = identifier[ri] [ literal[string] ]
keyword[if] identifier[len] ( identifier[refdata] )== literal[int] :
identifier[full_str] += literal[string]
keyword[for] identifier[k] , identifier[r] keyword[in] identifier[refdata] :
identifier[ref_txt] = identifier[reference_text] ( identifier[r] )
identifier[ref_txt] = identifier[textwrap] . identifier[indent] ( identifier[ref_txt] , literal[string] * literal[int] )
identifier[full_str] += literal[string] . identifier[format] ( identifier[ref_txt] )
keyword[return] identifier[full_str]
|
def write_txt(refs):
"""Converts references to plain text format
"""
full_str = '\n'
(lib_citation_desc, lib_citations) = get_library_citation()
# Add the refs for the libarary at the top
full_str += '*' * 80 + '\n'
full_str += lib_citation_desc
full_str += '*' * 80 + '\n'
for r in lib_citations.values():
ref_txt = reference_text(r)
ref_txt = textwrap.indent(ref_txt, ' ' * 4)
full_str += '{}\n\n'.format(ref_txt) # depends on [control=['for'], data=['r']]
full_str += '*' * 80 + '\n'
full_str += 'References for the basis set\n'
full_str += '*' * 80 + '\n'
for ref in refs:
full_str += '{}\n'.format(compact_elements(ref['elements']))
for ri in ref['reference_info']:
full_str += ' ## {}\n'.format(ri['reference_description'])
refdata = ri['reference_data']
if len(refdata) == 0:
full_str += ' (...no reference...)\n\n' # depends on [control=['if'], data=[]]
for (k, r) in refdata:
ref_txt = reference_text(r)
ref_txt = textwrap.indent(ref_txt, ' ' * 4)
full_str += '{}\n\n'.format(ref_txt) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['ri']] # depends on [control=['for'], data=['ref']]
return full_str
|
def L(self):
r"""Cholesky decomposition of :math:`\mathrm B`.
.. math::
\mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q
+ \mathrm{S}^{-1}
"""
from numpy_sugar.linalg import ddot, sum2diag
if self._L_cache is not None:
return self._L_cache
s = self._cov["scale"]
d = self._cov["delta"]
Q = self._cov["QS"][0][0]
S = self._cov["QS"][1]
ddot(self.A * self._site.tau, Q, left=True, out=self._NxR)
B = dot(Q.T, self._NxR, out=self._RxR)
B *= 1 - d
sum2diag(B, 1.0 / S / s, out=B)
self._L_cache = _cho_factor(B)
return self._L_cache
|
def function[L, parameter[self]]:
constant[Cholesky decomposition of :math:`\mathrm B`.
.. math::
\mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q
+ \mathrm{S}^{-1}
]
from relative_module[numpy_sugar.linalg] import module[ddot], module[sum2diag]
if compare[name[self]._L_cache is_not constant[None]] begin[:]
return[name[self]._L_cache]
variable[s] assign[=] call[name[self]._cov][constant[scale]]
variable[d] assign[=] call[name[self]._cov][constant[delta]]
variable[Q] assign[=] call[call[call[name[self]._cov][constant[QS]]][constant[0]]][constant[0]]
variable[S] assign[=] call[call[name[self]._cov][constant[QS]]][constant[1]]
call[name[ddot], parameter[binary_operation[name[self].A * name[self]._site.tau], name[Q]]]
variable[B] assign[=] call[name[dot], parameter[name[Q].T, name[self]._NxR]]
<ast.AugAssign object at 0x7da1b00fa350>
call[name[sum2diag], parameter[name[B], binary_operation[binary_operation[constant[1.0] / name[S]] / name[s]]]]
name[self]._L_cache assign[=] call[name[_cho_factor], parameter[name[B]]]
return[name[self]._L_cache]
|
keyword[def] identifier[L] ( identifier[self] ):
literal[string]
keyword[from] identifier[numpy_sugar] . identifier[linalg] keyword[import] identifier[ddot] , identifier[sum2diag]
keyword[if] identifier[self] . identifier[_L_cache] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_L_cache]
identifier[s] = identifier[self] . identifier[_cov] [ literal[string] ]
identifier[d] = identifier[self] . identifier[_cov] [ literal[string] ]
identifier[Q] = identifier[self] . identifier[_cov] [ literal[string] ][ literal[int] ][ literal[int] ]
identifier[S] = identifier[self] . identifier[_cov] [ literal[string] ][ literal[int] ]
identifier[ddot] ( identifier[self] . identifier[A] * identifier[self] . identifier[_site] . identifier[tau] , identifier[Q] , identifier[left] = keyword[True] , identifier[out] = identifier[self] . identifier[_NxR] )
identifier[B] = identifier[dot] ( identifier[Q] . identifier[T] , identifier[self] . identifier[_NxR] , identifier[out] = identifier[self] . identifier[_RxR] )
identifier[B] *= literal[int] - identifier[d]
identifier[sum2diag] ( identifier[B] , literal[int] / identifier[S] / identifier[s] , identifier[out] = identifier[B] )
identifier[self] . identifier[_L_cache] = identifier[_cho_factor] ( identifier[B] )
keyword[return] identifier[self] . identifier[_L_cache]
|
def L(self):
"""Cholesky decomposition of :math:`\\mathrm B`.
.. math::
\\mathrm B = \\mathrm Q^{\\intercal}\\tilde{\\mathrm{T}}\\mathrm Q
+ \\mathrm{S}^{-1}
"""
from numpy_sugar.linalg import ddot, sum2diag
if self._L_cache is not None:
return self._L_cache # depends on [control=['if'], data=[]]
s = self._cov['scale']
d = self._cov['delta']
Q = self._cov['QS'][0][0]
S = self._cov['QS'][1]
ddot(self.A * self._site.tau, Q, left=True, out=self._NxR)
B = dot(Q.T, self._NxR, out=self._RxR)
B *= 1 - d
sum2diag(B, 1.0 / S / s, out=B)
self._L_cache = _cho_factor(B)
return self._L_cache
|
def on_trial_remove(self, trial_runner, trial):
"""Notification when trial terminates.
Trial info is removed from bracket. Triggers halving if bracket is
not finished."""
bracket, _ = self._trial_info[trial]
bracket.cleanup_trial(trial)
if not bracket.finished():
self._process_bracket(trial_runner, bracket, trial)
|
def function[on_trial_remove, parameter[self, trial_runner, trial]]:
constant[Notification when trial terminates.
Trial info is removed from bracket. Triggers halving if bracket is
not finished.]
<ast.Tuple object at 0x7da18f58eec0> assign[=] call[name[self]._trial_info][name[trial]]
call[name[bracket].cleanup_trial, parameter[name[trial]]]
if <ast.UnaryOp object at 0x7da18f58e410> begin[:]
call[name[self]._process_bracket, parameter[name[trial_runner], name[bracket], name[trial]]]
|
keyword[def] identifier[on_trial_remove] ( identifier[self] , identifier[trial_runner] , identifier[trial] ):
literal[string]
identifier[bracket] , identifier[_] = identifier[self] . identifier[_trial_info] [ identifier[trial] ]
identifier[bracket] . identifier[cleanup_trial] ( identifier[trial] )
keyword[if] keyword[not] identifier[bracket] . identifier[finished] ():
identifier[self] . identifier[_process_bracket] ( identifier[trial_runner] , identifier[bracket] , identifier[trial] )
|
def on_trial_remove(self, trial_runner, trial):
"""Notification when trial terminates.
Trial info is removed from bracket. Triggers halving if bracket is
not finished."""
(bracket, _) = self._trial_info[trial]
bracket.cleanup_trial(trial)
if not bracket.finished():
self._process_bracket(trial_runner, bracket, trial) # depends on [control=['if'], data=[]]
|
def input(input_id, name, value_class=NumberValue):
"""Add input to controller"""
def _init():
return value_class(
name,
input_id=input_id,
is_input=True,
index=-1
)
def _decorator(cls):
setattr(cls, input_id, _init())
return cls
return _decorator
|
def function[input, parameter[input_id, name, value_class]]:
constant[Add input to controller]
def function[_init, parameter[]]:
return[call[name[value_class], parameter[name[name]]]]
def function[_decorator, parameter[cls]]:
call[name[setattr], parameter[name[cls], name[input_id], call[name[_init], parameter[]]]]
return[name[cls]]
return[name[_decorator]]
|
keyword[def] identifier[input] ( identifier[input_id] , identifier[name] , identifier[value_class] = identifier[NumberValue] ):
literal[string]
keyword[def] identifier[_init] ():
keyword[return] identifier[value_class] (
identifier[name] ,
identifier[input_id] = identifier[input_id] ,
identifier[is_input] = keyword[True] ,
identifier[index] =- literal[int]
)
keyword[def] identifier[_decorator] ( identifier[cls] ):
identifier[setattr] ( identifier[cls] , identifier[input_id] , identifier[_init] ())
keyword[return] identifier[cls]
keyword[return] identifier[_decorator]
|
def input(input_id, name, value_class=NumberValue):
"""Add input to controller"""
def _init():
return value_class(name, input_id=input_id, is_input=True, index=-1)
def _decorator(cls):
setattr(cls, input_id, _init())
return cls
return _decorator
|
def load_fixture(filename, kind, post_processor=None):
"""
Loads a file into entities of a given class, run the post_processor on each
instance before it's saved
"""
def _load(od, kind, post_processor, parent=None, presets={}):
"""
Loads a single dictionary (od) into an object, overlays the values in
presets, persists it and
calls itself on the objects in __children__* keys
"""
if hasattr(kind, 'keys'): # kind is a map
objtype = kind[od['__kind__']]
else:
objtype = kind
obj_id = od.get('__id__')
if obj_id is not None:
obj = objtype(id=obj_id, parent=parent)
else:
obj = objtype(parent=parent)
# Iterate over the non-special attributes and overlay the presets
for attribute_name in [k for k in od.keys()
if not k.startswith('__') and
not k.endswith('__')] + presets.keys():
attribute_type = objtype.__dict__[attribute_name]
attribute_value = _sensible_value(attribute_type,
presets.get(
attribute_name,
od.get(attribute_name)))
obj.__dict__['_values'][attribute_name] = attribute_value
if post_processor:
post_processor(obj)
# Saving obj is required to continue with the children
obj.put()
loaded = [obj]
# Process ancestor-based __children__
for item in od.get('__children__', []):
loaded.extend(_load(item, kind, post_processor, parent=obj.key))
# Process other __children__[key]__ items
for child_attribute_name in [k for k in od.keys()
if k.startswith('__children__')
and k != '__children__']:
attribute_name = child_attribute_name.split('__')[-2]
for child in od[child_attribute_name]:
loaded.extend(_load(child, kind, post_processor,
presets={attribute_name: obj.key}))
return loaded
tree = json.load(open(filename))
loaded = []
# Start with the top-level of the tree
for item in tree:
loaded.extend(_load(item, kind, post_processor))
return loaded
|
def function[load_fixture, parameter[filename, kind, post_processor]]:
constant[
Loads a file into entities of a given class, run the post_processor on each
instance before it's saved
]
def function[_load, parameter[od, kind, post_processor, parent, presets]]:
constant[
Loads a single dictionary (od) into an object, overlays the values in
presets, persists it and
calls itself on the objects in __children__* keys
]
if call[name[hasattr], parameter[name[kind], constant[keys]]] begin[:]
variable[objtype] assign[=] call[name[kind]][call[name[od]][constant[__kind__]]]
variable[obj_id] assign[=] call[name[od].get, parameter[constant[__id__]]]
if compare[name[obj_id] is_not constant[None]] begin[:]
variable[obj] assign[=] call[name[objtype], parameter[]]
for taget[name[attribute_name]] in starred[binary_operation[<ast.ListComp object at 0x7da1b16051b0> + call[name[presets].keys, parameter[]]]] begin[:]
variable[attribute_type] assign[=] call[name[objtype].__dict__][name[attribute_name]]
variable[attribute_value] assign[=] call[name[_sensible_value], parameter[name[attribute_type], call[name[presets].get, parameter[name[attribute_name], call[name[od].get, parameter[name[attribute_name]]]]]]]
call[call[name[obj].__dict__][constant[_values]]][name[attribute_name]] assign[=] name[attribute_value]
if name[post_processor] begin[:]
call[name[post_processor], parameter[name[obj]]]
call[name[obj].put, parameter[]]
variable[loaded] assign[=] list[[<ast.Name object at 0x7da1b1605d20>]]
for taget[name[item]] in starred[call[name[od].get, parameter[constant[__children__], list[[]]]]] begin[:]
call[name[loaded].extend, parameter[call[name[_load], parameter[name[item], name[kind], name[post_processor]]]]]
for taget[name[child_attribute_name]] in starred[<ast.ListComp object at 0x7da1b16404c0>] begin[:]
variable[attribute_name] assign[=] call[call[name[child_attribute_name].split, parameter[constant[__]]]][<ast.UnaryOp object at 0x7da1b16435b0>]
for taget[name[child]] in starred[call[name[od]][name[child_attribute_name]]] begin[:]
call[name[loaded].extend, parameter[call[name[_load], parameter[name[child], name[kind], name[post_processor]]]]]
return[name[loaded]]
variable[tree] assign[=] call[name[json].load, parameter[call[name[open], parameter[name[filename]]]]]
variable[loaded] assign[=] list[[]]
for taget[name[item]] in starred[name[tree]] begin[:]
call[name[loaded].extend, parameter[call[name[_load], parameter[name[item], name[kind], name[post_processor]]]]]
return[name[loaded]]
|
keyword[def] identifier[load_fixture] ( identifier[filename] , identifier[kind] , identifier[post_processor] = keyword[None] ):
literal[string]
keyword[def] identifier[_load] ( identifier[od] , identifier[kind] , identifier[post_processor] , identifier[parent] = keyword[None] , identifier[presets] ={}):
literal[string]
keyword[if] identifier[hasattr] ( identifier[kind] , literal[string] ):
identifier[objtype] = identifier[kind] [ identifier[od] [ literal[string] ]]
keyword[else] :
identifier[objtype] = identifier[kind]
identifier[obj_id] = identifier[od] . identifier[get] ( literal[string] )
keyword[if] identifier[obj_id] keyword[is] keyword[not] keyword[None] :
identifier[obj] = identifier[objtype] ( identifier[id] = identifier[obj_id] , identifier[parent] = identifier[parent] )
keyword[else] :
identifier[obj] = identifier[objtype] ( identifier[parent] = identifier[parent] )
keyword[for] identifier[attribute_name] keyword[in] [ identifier[k] keyword[for] identifier[k] keyword[in] identifier[od] . identifier[keys] ()
keyword[if] keyword[not] identifier[k] . identifier[startswith] ( literal[string] ) keyword[and]
keyword[not] identifier[k] . identifier[endswith] ( literal[string] )]+ identifier[presets] . identifier[keys] ():
identifier[attribute_type] = identifier[objtype] . identifier[__dict__] [ identifier[attribute_name] ]
identifier[attribute_value] = identifier[_sensible_value] ( identifier[attribute_type] ,
identifier[presets] . identifier[get] (
identifier[attribute_name] ,
identifier[od] . identifier[get] ( identifier[attribute_name] )))
identifier[obj] . identifier[__dict__] [ literal[string] ][ identifier[attribute_name] ]= identifier[attribute_value]
keyword[if] identifier[post_processor] :
identifier[post_processor] ( identifier[obj] )
identifier[obj] . identifier[put] ()
identifier[loaded] =[ identifier[obj] ]
keyword[for] identifier[item] keyword[in] identifier[od] . identifier[get] ( literal[string] ,[]):
identifier[loaded] . identifier[extend] ( identifier[_load] ( identifier[item] , identifier[kind] , identifier[post_processor] , identifier[parent] = identifier[obj] . identifier[key] ))
keyword[for] identifier[child_attribute_name] keyword[in] [ identifier[k] keyword[for] identifier[k] keyword[in] identifier[od] . identifier[keys] ()
keyword[if] identifier[k] . identifier[startswith] ( literal[string] )
keyword[and] identifier[k] != literal[string] ]:
identifier[attribute_name] = identifier[child_attribute_name] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[for] identifier[child] keyword[in] identifier[od] [ identifier[child_attribute_name] ]:
identifier[loaded] . identifier[extend] ( identifier[_load] ( identifier[child] , identifier[kind] , identifier[post_processor] ,
identifier[presets] ={ identifier[attribute_name] : identifier[obj] . identifier[key] }))
keyword[return] identifier[loaded]
identifier[tree] = identifier[json] . identifier[load] ( identifier[open] ( identifier[filename] ))
identifier[loaded] =[]
keyword[for] identifier[item] keyword[in] identifier[tree] :
identifier[loaded] . identifier[extend] ( identifier[_load] ( identifier[item] , identifier[kind] , identifier[post_processor] ))
keyword[return] identifier[loaded]
|
def load_fixture(filename, kind, post_processor=None):
"""
Loads a file into entities of a given class, run the post_processor on each
instance before it's saved
"""
def _load(od, kind, post_processor, parent=None, presets={}):
"""
Loads a single dictionary (od) into an object, overlays the values in
presets, persists it and
calls itself on the objects in __children__* keys
"""
if hasattr(kind, 'keys'): # kind is a map
objtype = kind[od['__kind__']] # depends on [control=['if'], data=[]]
else:
objtype = kind
obj_id = od.get('__id__')
if obj_id is not None:
obj = objtype(id=obj_id, parent=parent) # depends on [control=['if'], data=['obj_id']]
else:
obj = objtype(parent=parent)
# Iterate over the non-special attributes and overlay the presets
for attribute_name in [k for k in od.keys() if not k.startswith('__') and (not k.endswith('__'))] + presets.keys():
attribute_type = objtype.__dict__[attribute_name]
attribute_value = _sensible_value(attribute_type, presets.get(attribute_name, od.get(attribute_name)))
obj.__dict__['_values'][attribute_name] = attribute_value # depends on [control=['for'], data=['attribute_name']]
if post_processor:
post_processor(obj) # depends on [control=['if'], data=[]]
# Saving obj is required to continue with the children
obj.put()
loaded = [obj]
# Process ancestor-based __children__
for item in od.get('__children__', []):
loaded.extend(_load(item, kind, post_processor, parent=obj.key)) # depends on [control=['for'], data=['item']]
# Process other __children__[key]__ items
for child_attribute_name in [k for k in od.keys() if k.startswith('__children__') and k != '__children__']:
attribute_name = child_attribute_name.split('__')[-2]
for child in od[child_attribute_name]:
loaded.extend(_load(child, kind, post_processor, presets={attribute_name: obj.key})) # depends on [control=['for'], data=['child']] # depends on [control=['for'], data=['child_attribute_name']]
return loaded
tree = json.load(open(filename))
loaded = []
# Start with the top-level of the tree
for item in tree:
loaded.extend(_load(item, kind, post_processor)) # depends on [control=['for'], data=['item']]
return loaded
|
def _merge_array(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
element_type = type_.array_element_type
if element_type.code in _UNMERGEABLE_TYPES:
# Individual values cannot be merged, just concatenate
lhs.list_value.values.extend(rhs.list_value.values)
return lhs
lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values)
# Sanity check: If either list is empty, short-circuit.
# This is effectively a no-op.
if not len(lhs) or not len(rhs):
return Value(list_value=ListValue(values=(lhs + rhs)))
first = rhs.pop(0)
if first.HasField("null_value"): # can't merge
lhs.append(first)
else:
last = lhs.pop()
try:
merged = _merge_by_type(last, first, element_type)
except Unmergeable:
lhs.append(last)
lhs.append(first)
else:
lhs.append(merged)
return Value(list_value=ListValue(values=(lhs + rhs)))
|
def function[_merge_array, parameter[lhs, rhs, type_]]:
constant[Helper for '_merge_by_type'.]
variable[element_type] assign[=] name[type_].array_element_type
if compare[name[element_type].code in name[_UNMERGEABLE_TYPES]] begin[:]
call[name[lhs].list_value.values.extend, parameter[name[rhs].list_value.values]]
return[name[lhs]]
<ast.Tuple object at 0x7da20e9b1750> assign[=] tuple[[<ast.Call object at 0x7da20e9b3820>, <ast.Call object at 0x7da20e9b07f0>]]
if <ast.BoolOp object at 0x7da2045676a0> begin[:]
return[call[name[Value], parameter[]]]
variable[first] assign[=] call[name[rhs].pop, parameter[constant[0]]]
if call[name[first].HasField, parameter[constant[null_value]]] begin[:]
call[name[lhs].append, parameter[name[first]]]
return[call[name[Value], parameter[]]]
|
keyword[def] identifier[_merge_array] ( identifier[lhs] , identifier[rhs] , identifier[type_] ):
literal[string]
identifier[element_type] = identifier[type_] . identifier[array_element_type]
keyword[if] identifier[element_type] . identifier[code] keyword[in] identifier[_UNMERGEABLE_TYPES] :
identifier[lhs] . identifier[list_value] . identifier[values] . identifier[extend] ( identifier[rhs] . identifier[list_value] . identifier[values] )
keyword[return] identifier[lhs]
identifier[lhs] , identifier[rhs] = identifier[list] ( identifier[lhs] . identifier[list_value] . identifier[values] ), identifier[list] ( identifier[rhs] . identifier[list_value] . identifier[values] )
keyword[if] keyword[not] identifier[len] ( identifier[lhs] ) keyword[or] keyword[not] identifier[len] ( identifier[rhs] ):
keyword[return] identifier[Value] ( identifier[list_value] = identifier[ListValue] ( identifier[values] =( identifier[lhs] + identifier[rhs] )))
identifier[first] = identifier[rhs] . identifier[pop] ( literal[int] )
keyword[if] identifier[first] . identifier[HasField] ( literal[string] ):
identifier[lhs] . identifier[append] ( identifier[first] )
keyword[else] :
identifier[last] = identifier[lhs] . identifier[pop] ()
keyword[try] :
identifier[merged] = identifier[_merge_by_type] ( identifier[last] , identifier[first] , identifier[element_type] )
keyword[except] identifier[Unmergeable] :
identifier[lhs] . identifier[append] ( identifier[last] )
identifier[lhs] . identifier[append] ( identifier[first] )
keyword[else] :
identifier[lhs] . identifier[append] ( identifier[merged] )
keyword[return] identifier[Value] ( identifier[list_value] = identifier[ListValue] ( identifier[values] =( identifier[lhs] + identifier[rhs] )))
|
def _merge_array(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
element_type = type_.array_element_type
if element_type.code in _UNMERGEABLE_TYPES:
# Individual values cannot be merged, just concatenate
lhs.list_value.values.extend(rhs.list_value.values)
return lhs # depends on [control=['if'], data=[]]
(lhs, rhs) = (list(lhs.list_value.values), list(rhs.list_value.values))
# Sanity check: If either list is empty, short-circuit.
# This is effectively a no-op.
if not len(lhs) or not len(rhs):
return Value(list_value=ListValue(values=lhs + rhs)) # depends on [control=['if'], data=[]]
first = rhs.pop(0)
if first.HasField('null_value'): # can't merge
lhs.append(first) # depends on [control=['if'], data=[]]
else:
last = lhs.pop()
try:
merged = _merge_by_type(last, first, element_type) # depends on [control=['try'], data=[]]
except Unmergeable:
lhs.append(last)
lhs.append(first) # depends on [control=['except'], data=[]]
else:
lhs.append(merged)
return Value(list_value=ListValue(values=lhs + rhs))
|
def update_customer(self, customer_id, **kwargs):
"""Update a customer."""
body = self._formdata(kwargs, FastlyCustomer.FIELDS)
content = self._fetch("/customer/%s" % customer_id, method="PUT", body=body)
return FastlyCustomer(self, content)
|
def function[update_customer, parameter[self, customer_id]]:
constant[Update a customer.]
variable[body] assign[=] call[name[self]._formdata, parameter[name[kwargs], name[FastlyCustomer].FIELDS]]
variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/customer/%s] <ast.Mod object at 0x7da2590d6920> name[customer_id]]]]
return[call[name[FastlyCustomer], parameter[name[self], name[content]]]]
|
keyword[def] identifier[update_customer] ( identifier[self] , identifier[customer_id] ,** identifier[kwargs] ):
literal[string]
identifier[body] = identifier[self] . identifier[_formdata] ( identifier[kwargs] , identifier[FastlyCustomer] . identifier[FIELDS] )
identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] % identifier[customer_id] , identifier[method] = literal[string] , identifier[body] = identifier[body] )
keyword[return] identifier[FastlyCustomer] ( identifier[self] , identifier[content] )
|
def update_customer(self, customer_id, **kwargs):
"""Update a customer."""
body = self._formdata(kwargs, FastlyCustomer.FIELDS)
content = self._fetch('/customer/%s' % customer_id, method='PUT', body=body)
return FastlyCustomer(self, content)
|
def _volume_get(self, volume_id):
'''
Organize information about a volume from the volume_id
'''
if self.volume_conn is None:
raise SaltCloudSystemExit('No cinder endpoint available')
nt_ks = self.volume_conn
volume = nt_ks.volumes.get(volume_id)
response = {'name': volume.display_name,
'size': volume.size,
'id': volume.id,
'description': volume.display_description,
'attachments': volume.attachments,
'status': volume.status
}
return response
|
def function[_volume_get, parameter[self, volume_id]]:
constant[
Organize information about a volume from the volume_id
]
if compare[name[self].volume_conn is constant[None]] begin[:]
<ast.Raise object at 0x7da1b21a04f0>
variable[nt_ks] assign[=] name[self].volume_conn
variable[volume] assign[=] call[name[nt_ks].volumes.get, parameter[name[volume_id]]]
variable[response] assign[=] dictionary[[<ast.Constant object at 0x7da1b21ef2e0>, <ast.Constant object at 0x7da1b21ee770>, <ast.Constant object at 0x7da1b21ee7a0>, <ast.Constant object at 0x7da1b21ee470>, <ast.Constant object at 0x7da1b21ed150>, <ast.Constant object at 0x7da1b21edba0>], [<ast.Attribute object at 0x7da1b21ed5a0>, <ast.Attribute object at 0x7da1b21ec9a0>, <ast.Attribute object at 0x7da1b21ee050>, <ast.Attribute object at 0x7da1b21ec040>, <ast.Attribute object at 0x7da1b21ef220>, <ast.Attribute object at 0x7da1b21ee530>]]
return[name[response]]
|
keyword[def] identifier[_volume_get] ( identifier[self] , identifier[volume_id] ):
literal[string]
keyword[if] identifier[self] . identifier[volume_conn] keyword[is] keyword[None] :
keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] )
identifier[nt_ks] = identifier[self] . identifier[volume_conn]
identifier[volume] = identifier[nt_ks] . identifier[volumes] . identifier[get] ( identifier[volume_id] )
identifier[response] ={ literal[string] : identifier[volume] . identifier[display_name] ,
literal[string] : identifier[volume] . identifier[size] ,
literal[string] : identifier[volume] . identifier[id] ,
literal[string] : identifier[volume] . identifier[display_description] ,
literal[string] : identifier[volume] . identifier[attachments] ,
literal[string] : identifier[volume] . identifier[status]
}
keyword[return] identifier[response]
|
def _volume_get(self, volume_id):
"""
Organize information about a volume from the volume_id
"""
if self.volume_conn is None:
raise SaltCloudSystemExit('No cinder endpoint available') # depends on [control=['if'], data=[]]
nt_ks = self.volume_conn
volume = nt_ks.volumes.get(volume_id)
response = {'name': volume.display_name, 'size': volume.size, 'id': volume.id, 'description': volume.display_description, 'attachments': volume.attachments, 'status': volume.status}
return response
|
def has_address(self, address):
"""
is the given address on the don't send list?
"""
queryset = self.filter(to_address__iexact=address)
return queryset.exists()
|
def function[has_address, parameter[self, address]]:
constant[
is the given address on the don't send list?
]
variable[queryset] assign[=] call[name[self].filter, parameter[]]
return[call[name[queryset].exists, parameter[]]]
|
keyword[def] identifier[has_address] ( identifier[self] , identifier[address] ):
literal[string]
identifier[queryset] = identifier[self] . identifier[filter] ( identifier[to_address__iexact] = identifier[address] )
keyword[return] identifier[queryset] . identifier[exists] ()
|
def has_address(self, address):
"""
is the given address on the don't send list?
"""
queryset = self.filter(to_address__iexact=address)
return queryset.exists()
|
def remove_wirevector(self, wirevector):
""" Remove a wirevector object to the block."""
self.wirevector_set.remove(wirevector)
del self.wirevector_by_name[wirevector.name]
|
def function[remove_wirevector, parameter[self, wirevector]]:
constant[ Remove a wirevector object to the block.]
call[name[self].wirevector_set.remove, parameter[name[wirevector]]]
<ast.Delete object at 0x7da20e962e30>
|
keyword[def] identifier[remove_wirevector] ( identifier[self] , identifier[wirevector] ):
literal[string]
identifier[self] . identifier[wirevector_set] . identifier[remove] ( identifier[wirevector] )
keyword[del] identifier[self] . identifier[wirevector_by_name] [ identifier[wirevector] . identifier[name] ]
|
def remove_wirevector(self, wirevector):
""" Remove a wirevector object to the block."""
self.wirevector_set.remove(wirevector)
del self.wirevector_by_name[wirevector.name]
|
def Execute(self, http):
"""Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format.
"""
self._Execute(http)
for key in self.__request_response_handlers:
response = self.__request_response_handlers[key].response
callback = self.__request_response_handlers[key].handler
exception = None
if response.status_code >= 300:
exception = exceptions.HttpError.FromResponse(response)
if callback is not None:
callback(response, exception)
if self.__callback is not None:
self.__callback(response, exception)
|
def function[Execute, parameter[self, http]]:
constant[Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format.
]
call[name[self]._Execute, parameter[name[http]]]
for taget[name[key]] in starred[name[self].__request_response_handlers] begin[:]
variable[response] assign[=] call[name[self].__request_response_handlers][name[key]].response
variable[callback] assign[=] call[name[self].__request_response_handlers][name[key]].handler
variable[exception] assign[=] constant[None]
if compare[name[response].status_code greater_or_equal[>=] constant[300]] begin[:]
variable[exception] assign[=] call[name[exceptions].HttpError.FromResponse, parameter[name[response]]]
if compare[name[callback] is_not constant[None]] begin[:]
call[name[callback], parameter[name[response], name[exception]]]
if compare[name[self].__callback is_not constant[None]] begin[:]
call[name[self].__callback, parameter[name[response], name[exception]]]
|
keyword[def] identifier[Execute] ( identifier[self] , identifier[http] ):
literal[string]
identifier[self] . identifier[_Execute] ( identifier[http] )
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[__request_response_handlers] :
identifier[response] = identifier[self] . identifier[__request_response_handlers] [ identifier[key] ]. identifier[response]
identifier[callback] = identifier[self] . identifier[__request_response_handlers] [ identifier[key] ]. identifier[handler]
identifier[exception] = keyword[None]
keyword[if] identifier[response] . identifier[status_code] >= literal[int] :
identifier[exception] = identifier[exceptions] . identifier[HttpError] . identifier[FromResponse] ( identifier[response] )
keyword[if] identifier[callback] keyword[is] keyword[not] keyword[None] :
identifier[callback] ( identifier[response] , identifier[exception] )
keyword[if] identifier[self] . identifier[__callback] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[__callback] ( identifier[response] , identifier[exception] )
|
def Execute(self, http):
"""Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format.
"""
self._Execute(http)
for key in self.__request_response_handlers:
response = self.__request_response_handlers[key].response
callback = self.__request_response_handlers[key].handler
exception = None
if response.status_code >= 300:
exception = exceptions.HttpError.FromResponse(response) # depends on [control=['if'], data=[]]
if callback is not None:
callback(response, exception) # depends on [control=['if'], data=['callback']]
if self.__callback is not None:
self.__callback(response, exception) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
|
def _getPhrase( self, i, sentence, NPlabels ):
''' Fetches the full length phrase from the position i
based on the existing NP phrase annotations (from
NPlabels);
Returns list of sentence tokens in the phrase, and
indices of the phrase;
'''
phrase = []
indices = []
if 0 <= i and i < len(sentence) and NPlabels[i] == 'B':
phrase = [ sentence[i] ]
indices = [ i ]
j = i + 1
while ( j < len(sentence) ):
if NPlabels[j] in ['B', '']:
break
else:
phrase.append( sentence[j] )
indices.append( j )
j += 1
return phrase, indices
|
def function[_getPhrase, parameter[self, i, sentence, NPlabels]]:
constant[ Fetches the full length phrase from the position i
based on the existing NP phrase annotations (from
NPlabels);
Returns list of sentence tokens in the phrase, and
indices of the phrase;
]
variable[phrase] assign[=] list[[]]
variable[indices] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b06cb9a0> begin[:]
variable[phrase] assign[=] list[[<ast.Subscript object at 0x7da1b06c8f10>]]
variable[indices] assign[=] list[[<ast.Name object at 0x7da1b06c8a60>]]
variable[j] assign[=] binary_operation[name[i] + constant[1]]
while compare[name[j] less[<] call[name[len], parameter[name[sentence]]]] begin[:]
if compare[call[name[NPlabels]][name[j]] in list[[<ast.Constant object at 0x7da1b06cb8e0>, <ast.Constant object at 0x7da1b06cbb80>]]] begin[:]
break
<ast.AugAssign object at 0x7da1b06c94e0>
return[tuple[[<ast.Name object at 0x7da1b06c9b70>, <ast.Name object at 0x7da1b06c8c70>]]]
|
keyword[def] identifier[_getPhrase] ( identifier[self] , identifier[i] , identifier[sentence] , identifier[NPlabels] ):
literal[string]
identifier[phrase] =[]
identifier[indices] =[]
keyword[if] literal[int] <= identifier[i] keyword[and] identifier[i] < identifier[len] ( identifier[sentence] ) keyword[and] identifier[NPlabels] [ identifier[i] ]== literal[string] :
identifier[phrase] =[ identifier[sentence] [ identifier[i] ]]
identifier[indices] =[ identifier[i] ]
identifier[j] = identifier[i] + literal[int]
keyword[while] ( identifier[j] < identifier[len] ( identifier[sentence] )):
keyword[if] identifier[NPlabels] [ identifier[j] ] keyword[in] [ literal[string] , literal[string] ]:
keyword[break]
keyword[else] :
identifier[phrase] . identifier[append] ( identifier[sentence] [ identifier[j] ])
identifier[indices] . identifier[append] ( identifier[j] )
identifier[j] += literal[int]
keyword[return] identifier[phrase] , identifier[indices]
|
def _getPhrase(self, i, sentence, NPlabels):
""" Fetches the full length phrase from the position i
based on the existing NP phrase annotations (from
NPlabels);
Returns list of sentence tokens in the phrase, and
indices of the phrase;
"""
phrase = []
indices = []
if 0 <= i and i < len(sentence) and (NPlabels[i] == 'B'):
phrase = [sentence[i]]
indices = [i]
j = i + 1
while j < len(sentence):
if NPlabels[j] in ['B', '']:
break # depends on [control=['if'], data=[]]
else:
phrase.append(sentence[j])
indices.append(j)
j += 1 # depends on [control=['while'], data=['j']] # depends on [control=['if'], data=[]]
return (phrase, indices)
|
def _GetMemberForOffset(self, offset):
"""Finds the member whose data includes the provided offset.
Args:
offset (int): offset in the uncompressed data to find the
containing member for.
Returns:
gzipfile.GzipMember: gzip file member or None if not available.
Raises:
ValueError: if the provided offset is outside of the bounds of the
uncompressed data.
"""
if offset < 0 or offset >= self.uncompressed_data_size:
raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format(
offset, self.uncompressed_data_size))
for end_offset, member in iter(self._members_by_end_offset.items()):
if offset < end_offset:
return member
return None
|
def function[_GetMemberForOffset, parameter[self, offset]]:
constant[Finds the member whose data includes the provided offset.
Args:
offset (int): offset in the uncompressed data to find the
containing member for.
Returns:
gzipfile.GzipMember: gzip file member or None if not available.
Raises:
ValueError: if the provided offset is outside of the bounds of the
uncompressed data.
]
if <ast.BoolOp object at 0x7da1b065b010> begin[:]
<ast.Raise object at 0x7da1b065b400>
for taget[tuple[[<ast.Name object at 0x7da1b065a650>, <ast.Name object at 0x7da1b06588b0>]]] in starred[call[name[iter], parameter[call[name[self]._members_by_end_offset.items, parameter[]]]]] begin[:]
if compare[name[offset] less[<] name[end_offset]] begin[:]
return[name[member]]
return[constant[None]]
|
keyword[def] identifier[_GetMemberForOffset] ( identifier[self] , identifier[offset] ):
literal[string]
keyword[if] identifier[offset] < literal[int] keyword[or] identifier[offset] >= identifier[self] . identifier[uncompressed_data_size] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[offset] , identifier[self] . identifier[uncompressed_data_size] ))
keyword[for] identifier[end_offset] , identifier[member] keyword[in] identifier[iter] ( identifier[self] . identifier[_members_by_end_offset] . identifier[items] ()):
keyword[if] identifier[offset] < identifier[end_offset] :
keyword[return] identifier[member]
keyword[return] keyword[None]
|
def _GetMemberForOffset(self, offset):
"""Finds the member whose data includes the provided offset.
Args:
offset (int): offset in the uncompressed data to find the
containing member for.
Returns:
gzipfile.GzipMember: gzip file member or None if not available.
Raises:
ValueError: if the provided offset is outside of the bounds of the
uncompressed data.
"""
if offset < 0 or offset >= self.uncompressed_data_size:
raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format(offset, self.uncompressed_data_size)) # depends on [control=['if'], data=[]]
for (end_offset, member) in iter(self._members_by_end_offset.items()):
if offset < end_offset:
return member # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return None
|
def get_version(program, *, version_arg='--version', regex=r'(\d+(\.\d+)*)'):
"Get the version of the specified program"
args_prog = [program, version_arg]
try:
proc = run(
args_prog,
close_fds=True,
universal_newlines=True,
stdout=PIPE,
stderr=STDOUT,
check=True,
)
output = proc.stdout
except FileNotFoundError as e:
raise MissingDependencyError(
f"Could not find program '{program}' on the PATH"
) from e
except CalledProcessError as e:
if e.returncode != 0:
raise MissingDependencyError(
f"Ran program '{program}' but it exited with an error:\n{e.output}"
) from e
raise MissingDependencyError(
f"Could not find program '{program}' on the PATH"
) from e
try:
version = re.match(regex, output.strip()).group(1)
except AttributeError as e:
raise MissingDependencyError(
f"The program '{program}' did not report its version. "
f"Message was:\n{output}"
)
return version
|
def function[get_version, parameter[program]]:
constant[Get the version of the specified program]
variable[args_prog] assign[=] list[[<ast.Name object at 0x7da1b1bc19c0>, <ast.Name object at 0x7da1b1bc1840>]]
<ast.Try object at 0x7da1b1bc0370>
<ast.Try object at 0x7da1b1bc17e0>
return[name[version]]
|
keyword[def] identifier[get_version] ( identifier[program] ,*, identifier[version_arg] = literal[string] , identifier[regex] = literal[string] ):
literal[string]
identifier[args_prog] =[ identifier[program] , identifier[version_arg] ]
keyword[try] :
identifier[proc] = identifier[run] (
identifier[args_prog] ,
identifier[close_fds] = keyword[True] ,
identifier[universal_newlines] = keyword[True] ,
identifier[stdout] = identifier[PIPE] ,
identifier[stderr] = identifier[STDOUT] ,
identifier[check] = keyword[True] ,
)
identifier[output] = identifier[proc] . identifier[stdout]
keyword[except] identifier[FileNotFoundError] keyword[as] identifier[e] :
keyword[raise] identifier[MissingDependencyError] (
literal[string]
) keyword[from] identifier[e]
keyword[except] identifier[CalledProcessError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[returncode] != literal[int] :
keyword[raise] identifier[MissingDependencyError] (
literal[string]
) keyword[from] identifier[e]
keyword[raise] identifier[MissingDependencyError] (
literal[string]
) keyword[from] identifier[e]
keyword[try] :
identifier[version] = identifier[re] . identifier[match] ( identifier[regex] , identifier[output] . identifier[strip] ()). identifier[group] ( literal[int] )
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
keyword[raise] identifier[MissingDependencyError] (
literal[string]
literal[string]
)
keyword[return] identifier[version]
|
def get_version(program, *, version_arg='--version', regex='(\\d+(\\.\\d+)*)'):
"""Get the version of the specified program"""
args_prog = [program, version_arg]
try:
proc = run(args_prog, close_fds=True, universal_newlines=True, stdout=PIPE, stderr=STDOUT, check=True)
output = proc.stdout # depends on [control=['try'], data=[]]
except FileNotFoundError as e:
raise MissingDependencyError(f"Could not find program '{program}' on the PATH") from e # depends on [control=['except'], data=['e']]
except CalledProcessError as e:
if e.returncode != 0:
raise MissingDependencyError(f"Ran program '{program}' but it exited with an error:\n{e.output}") from e # depends on [control=['if'], data=[]]
raise MissingDependencyError(f"Could not find program '{program}' on the PATH") from e # depends on [control=['except'], data=['e']]
try:
version = re.match(regex, output.strip()).group(1) # depends on [control=['try'], data=[]]
except AttributeError as e:
raise MissingDependencyError(f"The program '{program}' did not report its version. Message was:\n{output}") # depends on [control=['except'], data=[]]
return version
|
def connect(self, dialect=None, timeout=60):
"""
Will connect to the target server and negotiate the capabilities
with the client. Once setup, the client MUST call the disconnect()
function to close the listener thread. This function will populate
various connection properties that denote the capabilities of the
server.
:param dialect: If specified, forces the dialect that is negotiated
with the server, if not set, then the newest dialect supported by
the server is used up to SMB 3.1.1
:param timeout: The timeout in seconds to wait for the initial
negotiation process to complete
"""
log.info("Setting up transport connection")
self.transport.connect()
log.info("Starting negotiation with SMB server")
smb_response = self._send_smb2_negotiate(dialect, timeout)
log.info("Negotiated dialect: %s"
% str(smb_response['dialect_revision']))
self.dialect = smb_response['dialect_revision'].get_value()
self.max_transact_size = smb_response['max_transact_size'].get_value()
self.max_read_size = smb_response['max_read_size'].get_value()
self.max_write_size = smb_response['max_write_size'].get_value()
self.server_guid = smb_response['server_guid'].get_value()
self.gss_negotiate_token = smb_response['buffer'].get_value()
if not self.require_signing and \
smb_response['security_mode'].has_flag(
SecurityMode.SMB2_NEGOTIATE_SIGNING_REQUIRED):
self.require_signing = True
log.info("Connection require signing: %s" % self.require_signing)
capabilities = smb_response['capabilities']
# SMB 2.1
if self.dialect >= Dialects.SMB_2_1_0:
self.supports_file_leasing = \
capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_LEASING)
self.supports_multi_credit = \
capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_LARGE_MTU)
# SMB 3.x
if self.dialect >= Dialects.SMB_3_0_0:
self.supports_directory_leasing = capabilities.has_flag(
Capabilities.SMB2_GLOBAL_CAP_DIRECTORY_LEASING)
self.supports_multi_channel = capabilities.has_flag(
Capabilities.SMB2_GLOBAL_CAP_MULTI_CHANNEL)
# TODO: SMB2_GLOBAL_CAP_PERSISTENT_HANDLES
self.supports_persistent_handles = False
self.supports_encryption = capabilities.has_flag(
Capabilities.SMB2_GLOBAL_CAP_ENCRYPTION) \
and self.dialect < Dialects.SMB_3_1_1
self.server_capabilities = capabilities
self.server_security_mode = \
smb_response['security_mode'].get_value()
# TODO: Check/add server to server_list in Client Page 203
# SMB 3.1
if self.dialect >= Dialects.SMB_3_1_1:
for context in smb_response['negotiate_context_list']:
if context['context_type'].get_value() == \
NegotiateContextType.SMB2_ENCRYPTION_CAPABILITIES:
cipher_id = context['data']['ciphers'][0]
self.cipher_id = Ciphers.get_cipher(cipher_id)
self.supports_encryption = self.cipher_id != 0
else:
hash_id = context['data']['hash_algorithms'][0]
self.preauth_integrity_hash_id = \
HashAlgorithms.get_algorithm(hash_id)
|
def function[connect, parameter[self, dialect, timeout]]:
constant[
Will connect to the target server and negotiate the capabilities
with the client. Once setup, the client MUST call the disconnect()
function to close the listener thread. This function will populate
various connection properties that denote the capabilities of the
server.
:param dialect: If specified, forces the dialect that is negotiated
with the server, if not set, then the newest dialect supported by
the server is used up to SMB 3.1.1
:param timeout: The timeout in seconds to wait for the initial
negotiation process to complete
]
call[name[log].info, parameter[constant[Setting up transport connection]]]
call[name[self].transport.connect, parameter[]]
call[name[log].info, parameter[constant[Starting negotiation with SMB server]]]
variable[smb_response] assign[=] call[name[self]._send_smb2_negotiate, parameter[name[dialect], name[timeout]]]
call[name[log].info, parameter[binary_operation[constant[Negotiated dialect: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[call[name[smb_response]][constant[dialect_revision]]]]]]]
name[self].dialect assign[=] call[call[name[smb_response]][constant[dialect_revision]].get_value, parameter[]]
name[self].max_transact_size assign[=] call[call[name[smb_response]][constant[max_transact_size]].get_value, parameter[]]
name[self].max_read_size assign[=] call[call[name[smb_response]][constant[max_read_size]].get_value, parameter[]]
name[self].max_write_size assign[=] call[call[name[smb_response]][constant[max_write_size]].get_value, parameter[]]
name[self].server_guid assign[=] call[call[name[smb_response]][constant[server_guid]].get_value, parameter[]]
name[self].gss_negotiate_token assign[=] call[call[name[smb_response]][constant[buffer]].get_value, parameter[]]
if <ast.BoolOp object at 0x7da20c76efb0> begin[:]
name[self].require_signing assign[=] constant[True]
call[name[log].info, parameter[binary_operation[constant[Connection require signing: %s] <ast.Mod object at 0x7da2590d6920> name[self].require_signing]]]
variable[capabilities] assign[=] call[name[smb_response]][constant[capabilities]]
if compare[name[self].dialect greater_or_equal[>=] name[Dialects].SMB_2_1_0] begin[:]
name[self].supports_file_leasing assign[=] call[name[capabilities].has_flag, parameter[name[Capabilities].SMB2_GLOBAL_CAP_LEASING]]
name[self].supports_multi_credit assign[=] call[name[capabilities].has_flag, parameter[name[Capabilities].SMB2_GLOBAL_CAP_LARGE_MTU]]
if compare[name[self].dialect greater_or_equal[>=] name[Dialects].SMB_3_0_0] begin[:]
name[self].supports_directory_leasing assign[=] call[name[capabilities].has_flag, parameter[name[Capabilities].SMB2_GLOBAL_CAP_DIRECTORY_LEASING]]
name[self].supports_multi_channel assign[=] call[name[capabilities].has_flag, parameter[name[Capabilities].SMB2_GLOBAL_CAP_MULTI_CHANNEL]]
name[self].supports_persistent_handles assign[=] constant[False]
name[self].supports_encryption assign[=] <ast.BoolOp object at 0x7da20c76c7f0>
name[self].server_capabilities assign[=] name[capabilities]
name[self].server_security_mode assign[=] call[call[name[smb_response]][constant[security_mode]].get_value, parameter[]]
if compare[name[self].dialect greater_or_equal[>=] name[Dialects].SMB_3_1_1] begin[:]
for taget[name[context]] in starred[call[name[smb_response]][constant[negotiate_context_list]]] begin[:]
if compare[call[call[name[context]][constant[context_type]].get_value, parameter[]] equal[==] name[NegotiateContextType].SMB2_ENCRYPTION_CAPABILITIES] begin[:]
variable[cipher_id] assign[=] call[call[call[name[context]][constant[data]]][constant[ciphers]]][constant[0]]
name[self].cipher_id assign[=] call[name[Ciphers].get_cipher, parameter[name[cipher_id]]]
name[self].supports_encryption assign[=] compare[name[self].cipher_id not_equal[!=] constant[0]]
|
keyword[def] identifier[connect] ( identifier[self] , identifier[dialect] = keyword[None] , identifier[timeout] = literal[int] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] )
identifier[self] . identifier[transport] . identifier[connect] ()
identifier[log] . identifier[info] ( literal[string] )
identifier[smb_response] = identifier[self] . identifier[_send_smb2_negotiate] ( identifier[dialect] , identifier[timeout] )
identifier[log] . identifier[info] ( literal[string]
% identifier[str] ( identifier[smb_response] [ literal[string] ]))
identifier[self] . identifier[dialect] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
identifier[self] . identifier[max_transact_size] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
identifier[self] . identifier[max_read_size] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
identifier[self] . identifier[max_write_size] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
identifier[self] . identifier[server_guid] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
identifier[self] . identifier[gss_negotiate_token] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
keyword[if] keyword[not] identifier[self] . identifier[require_signing] keyword[and] identifier[smb_response] [ literal[string] ]. identifier[has_flag] (
identifier[SecurityMode] . identifier[SMB2_NEGOTIATE_SIGNING_REQUIRED] ):
identifier[self] . identifier[require_signing] = keyword[True]
identifier[log] . identifier[info] ( literal[string] % identifier[self] . identifier[require_signing] )
identifier[capabilities] = identifier[smb_response] [ literal[string] ]
keyword[if] identifier[self] . identifier[dialect] >= identifier[Dialects] . identifier[SMB_2_1_0] :
identifier[self] . identifier[supports_file_leasing] = identifier[capabilities] . identifier[has_flag] ( identifier[Capabilities] . identifier[SMB2_GLOBAL_CAP_LEASING] )
identifier[self] . identifier[supports_multi_credit] = identifier[capabilities] . identifier[has_flag] ( identifier[Capabilities] . identifier[SMB2_GLOBAL_CAP_LARGE_MTU] )
keyword[if] identifier[self] . identifier[dialect] >= identifier[Dialects] . identifier[SMB_3_0_0] :
identifier[self] . identifier[supports_directory_leasing] = identifier[capabilities] . identifier[has_flag] (
identifier[Capabilities] . identifier[SMB2_GLOBAL_CAP_DIRECTORY_LEASING] )
identifier[self] . identifier[supports_multi_channel] = identifier[capabilities] . identifier[has_flag] (
identifier[Capabilities] . identifier[SMB2_GLOBAL_CAP_MULTI_CHANNEL] )
identifier[self] . identifier[supports_persistent_handles] = keyword[False]
identifier[self] . identifier[supports_encryption] = identifier[capabilities] . identifier[has_flag] (
identifier[Capabilities] . identifier[SMB2_GLOBAL_CAP_ENCRYPTION] ) keyword[and] identifier[self] . identifier[dialect] < identifier[Dialects] . identifier[SMB_3_1_1]
identifier[self] . identifier[server_capabilities] = identifier[capabilities]
identifier[self] . identifier[server_security_mode] = identifier[smb_response] [ literal[string] ]. identifier[get_value] ()
keyword[if] identifier[self] . identifier[dialect] >= identifier[Dialects] . identifier[SMB_3_1_1] :
keyword[for] identifier[context] keyword[in] identifier[smb_response] [ literal[string] ]:
keyword[if] identifier[context] [ literal[string] ]. identifier[get_value] ()== identifier[NegotiateContextType] . identifier[SMB2_ENCRYPTION_CAPABILITIES] :
identifier[cipher_id] = identifier[context] [ literal[string] ][ literal[string] ][ literal[int] ]
identifier[self] . identifier[cipher_id] = identifier[Ciphers] . identifier[get_cipher] ( identifier[cipher_id] )
identifier[self] . identifier[supports_encryption] = identifier[self] . identifier[cipher_id] != literal[int]
keyword[else] :
identifier[hash_id] = identifier[context] [ literal[string] ][ literal[string] ][ literal[int] ]
identifier[self] . identifier[preauth_integrity_hash_id] = identifier[HashAlgorithms] . identifier[get_algorithm] ( identifier[hash_id] )
|
def connect(self, dialect=None, timeout=60):
"""
Will connect to the target server and negotiate the capabilities
with the client. Once setup, the client MUST call the disconnect()
function to close the listener thread. This function will populate
various connection properties that denote the capabilities of the
server.
:param dialect: If specified, forces the dialect that is negotiated
with the server, if not set, then the newest dialect supported by
the server is used up to SMB 3.1.1
:param timeout: The timeout in seconds to wait for the initial
negotiation process to complete
"""
log.info('Setting up transport connection')
self.transport.connect()
log.info('Starting negotiation with SMB server')
smb_response = self._send_smb2_negotiate(dialect, timeout)
log.info('Negotiated dialect: %s' % str(smb_response['dialect_revision']))
self.dialect = smb_response['dialect_revision'].get_value()
self.max_transact_size = smb_response['max_transact_size'].get_value()
self.max_read_size = smb_response['max_read_size'].get_value()
self.max_write_size = smb_response['max_write_size'].get_value()
self.server_guid = smb_response['server_guid'].get_value()
self.gss_negotiate_token = smb_response['buffer'].get_value()
if not self.require_signing and smb_response['security_mode'].has_flag(SecurityMode.SMB2_NEGOTIATE_SIGNING_REQUIRED):
self.require_signing = True # depends on [control=['if'], data=[]]
log.info('Connection require signing: %s' % self.require_signing)
capabilities = smb_response['capabilities']
# SMB 2.1
if self.dialect >= Dialects.SMB_2_1_0:
self.supports_file_leasing = capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_LEASING)
self.supports_multi_credit = capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_LARGE_MTU) # depends on [control=['if'], data=[]]
# SMB 3.x
if self.dialect >= Dialects.SMB_3_0_0:
self.supports_directory_leasing = capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_DIRECTORY_LEASING)
self.supports_multi_channel = capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_MULTI_CHANNEL)
# TODO: SMB2_GLOBAL_CAP_PERSISTENT_HANDLES
self.supports_persistent_handles = False
self.supports_encryption = capabilities.has_flag(Capabilities.SMB2_GLOBAL_CAP_ENCRYPTION) and self.dialect < Dialects.SMB_3_1_1
self.server_capabilities = capabilities
self.server_security_mode = smb_response['security_mode'].get_value() # depends on [control=['if'], data=[]]
# TODO: Check/add server to server_list in Client Page 203
# SMB 3.1
if self.dialect >= Dialects.SMB_3_1_1:
for context in smb_response['negotiate_context_list']:
if context['context_type'].get_value() == NegotiateContextType.SMB2_ENCRYPTION_CAPABILITIES:
cipher_id = context['data']['ciphers'][0]
self.cipher_id = Ciphers.get_cipher(cipher_id)
self.supports_encryption = self.cipher_id != 0 # depends on [control=['if'], data=[]]
else:
hash_id = context['data']['hash_algorithms'][0]
self.preauth_integrity_hash_id = HashAlgorithms.get_algorithm(hash_id) # depends on [control=['for'], data=['context']] # depends on [control=['if'], data=[]]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.