code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def ConsumeBool(self):
"""Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
try:
result = ParseBool(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | def function[ConsumeBool, parameter[self]]:
constant[Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
]
<ast.Try object at 0x7da1affee6b0>
call[name[self].NextToken, parameter[]]
return[name[result]] | keyword[def] identifier[ConsumeBool] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[result] = identifier[ParseBool] ( identifier[self] . identifier[token] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[raise] identifier[self] . identifier[_ParseError] ( identifier[str] ( identifier[e] ))
identifier[self] . identifier[NextToken] ()
keyword[return] identifier[result] | def ConsumeBool(self):
"""Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
try:
result = ParseBool(self.token) # depends on [control=['try'], data=[]]
except ValueError as e:
raise self._ParseError(str(e)) # depends on [control=['except'], data=['e']]
self.NextToken()
return result |
def getPreprocessorDefinitions(self, engineRoot, delimiter=' '):
"""
Returns the list of preprocessor definitions for this library, joined using the specified delimiter
"""
return delimiter.join(self.resolveRoot(self.definitions, engineRoot)) | def function[getPreprocessorDefinitions, parameter[self, engineRoot, delimiter]]:
constant[
Returns the list of preprocessor definitions for this library, joined using the specified delimiter
]
return[call[name[delimiter].join, parameter[call[name[self].resolveRoot, parameter[name[self].definitions, name[engineRoot]]]]]] | keyword[def] identifier[getPreprocessorDefinitions] ( identifier[self] , identifier[engineRoot] , identifier[delimiter] = literal[string] ):
literal[string]
keyword[return] identifier[delimiter] . identifier[join] ( identifier[self] . identifier[resolveRoot] ( identifier[self] . identifier[definitions] , identifier[engineRoot] )) | def getPreprocessorDefinitions(self, engineRoot, delimiter=' '):
"""
Returns the list of preprocessor definitions for this library, joined using the specified delimiter
"""
return delimiter.join(self.resolveRoot(self.definitions, engineRoot)) |
def coarsegrain(F, sets):
r"""Coarse-grains the flux to the given sets
$fc_{i,j} = \sum_{i \in I,j \in J} f_{i,j}$
Note that if you coarse-grain a net flux, it does not necessarily have a net
flux property anymore. If want to make sure you get a netflux,
use to_netflux(coarsegrain(F,sets)).
Parameters
----------
F : (n, n) ndarray
Matrix of flux values between pairs of states.
sets : list of array-like of ints
The sets of states onto which the flux is coarse-grained.
"""
nnew = len(sets)
Fc = np.zeros((nnew, nnew))
for i in range(0, nnew - 1):
for j in range(i + 1, nnew):
I = list(sets[i])
J = list(sets[j])
Fc[i, j] = np.sum(F[I, :][:, J])
Fc[j, i] = np.sum(F[J, :][:, I])
return Fc | def function[coarsegrain, parameter[F, sets]]:
constant[Coarse-grains the flux to the given sets
$fc_{i,j} = \sum_{i \in I,j \in J} f_{i,j}$
Note that if you coarse-grain a net flux, it does not necessarily have a net
flux property anymore. If want to make sure you get a netflux,
use to_netflux(coarsegrain(F,sets)).
Parameters
----------
F : (n, n) ndarray
Matrix of flux values between pairs of states.
sets : list of array-like of ints
The sets of states onto which the flux is coarse-grained.
]
variable[nnew] assign[=] call[name[len], parameter[name[sets]]]
variable[Fc] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b2631c00>, <ast.Name object at 0x7da1b2631b10>]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], binary_operation[name[nnew] - constant[1]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], name[nnew]]]] begin[:]
variable[I] assign[=] call[name[list], parameter[call[name[sets]][name[i]]]]
variable[J] assign[=] call[name[list], parameter[call[name[sets]][name[j]]]]
call[name[Fc]][tuple[[<ast.Name object at 0x7da1b2630be0>, <ast.Name object at 0x7da1b26326b0>]]] assign[=] call[name[np].sum, parameter[call[call[name[F]][tuple[[<ast.Name object at 0x7da1b2632e00>, <ast.Slice object at 0x7da1b2631c60>]]]][tuple[[<ast.Slice object at 0x7da1b2631330>, <ast.Name object at 0x7da1b2632470>]]]]]
call[name[Fc]][tuple[[<ast.Name object at 0x7da1b2630a30>, <ast.Name object at 0x7da1b2631fc0>]]] assign[=] call[name[np].sum, parameter[call[call[name[F]][tuple[[<ast.Name object at 0x7da1b2633250>, <ast.Slice object at 0x7da1b2631150>]]]][tuple[[<ast.Slice object at 0x7da1b2632320>, <ast.Name object at 0x7da1b2631ea0>]]]]]
return[name[Fc]] | keyword[def] identifier[coarsegrain] ( identifier[F] , identifier[sets] ):
literal[string]
identifier[nnew] = identifier[len] ( identifier[sets] )
identifier[Fc] = identifier[np] . identifier[zeros] (( identifier[nnew] , identifier[nnew] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[nnew] - literal[int] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[nnew] ):
identifier[I] = identifier[list] ( identifier[sets] [ identifier[i] ])
identifier[J] = identifier[list] ( identifier[sets] [ identifier[j] ])
identifier[Fc] [ identifier[i] , identifier[j] ]= identifier[np] . identifier[sum] ( identifier[F] [ identifier[I] ,:][:, identifier[J] ])
identifier[Fc] [ identifier[j] , identifier[i] ]= identifier[np] . identifier[sum] ( identifier[F] [ identifier[J] ,:][:, identifier[I] ])
keyword[return] identifier[Fc] | def coarsegrain(F, sets):
"""Coarse-grains the flux to the given sets
$fc_{i,j} = \\sum_{i \\in I,j \\in J} f_{i,j}$
Note that if you coarse-grain a net flux, it does not necessarily have a net
flux property anymore. If want to make sure you get a netflux,
use to_netflux(coarsegrain(F,sets)).
Parameters
----------
F : (n, n) ndarray
Matrix of flux values between pairs of states.
sets : list of array-like of ints
The sets of states onto which the flux is coarse-grained.
"""
nnew = len(sets)
Fc = np.zeros((nnew, nnew))
for i in range(0, nnew - 1):
for j in range(i + 1, nnew):
I = list(sets[i])
J = list(sets[j])
Fc[i, j] = np.sum(F[I, :][:, J])
Fc[j, i] = np.sum(F[J, :][:, I]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
return Fc |
def symmetrise(matrix, tri='upper'):
"""
Will copy the selected (upper or lower) triangle of a square matrix
to the opposite side, so that the matrix is symmetrical.
Alters in place.
"""
if tri == 'upper':
tri_fn = np.triu_indices
else:
tri_fn = np.tril_indices
size = matrix.shape[0]
matrix[tri_fn(size)[::-1]] = matrix[tri_fn(size)]
return matrix | def function[symmetrise, parameter[matrix, tri]]:
constant[
Will copy the selected (upper or lower) triangle of a square matrix
to the opposite side, so that the matrix is symmetrical.
Alters in place.
]
if compare[name[tri] equal[==] constant[upper]] begin[:]
variable[tri_fn] assign[=] name[np].triu_indices
variable[size] assign[=] call[name[matrix].shape][constant[0]]
call[name[matrix]][call[call[name[tri_fn], parameter[name[size]]]][<ast.Slice object at 0x7da18bcc9c30>]] assign[=] call[name[matrix]][call[name[tri_fn], parameter[name[size]]]]
return[name[matrix]] | keyword[def] identifier[symmetrise] ( identifier[matrix] , identifier[tri] = literal[string] ):
literal[string]
keyword[if] identifier[tri] == literal[string] :
identifier[tri_fn] = identifier[np] . identifier[triu_indices]
keyword[else] :
identifier[tri_fn] = identifier[np] . identifier[tril_indices]
identifier[size] = identifier[matrix] . identifier[shape] [ literal[int] ]
identifier[matrix] [ identifier[tri_fn] ( identifier[size] )[::- literal[int] ]]= identifier[matrix] [ identifier[tri_fn] ( identifier[size] )]
keyword[return] identifier[matrix] | def symmetrise(matrix, tri='upper'):
"""
Will copy the selected (upper or lower) triangle of a square matrix
to the opposite side, so that the matrix is symmetrical.
Alters in place.
"""
if tri == 'upper':
tri_fn = np.triu_indices # depends on [control=['if'], data=[]]
else:
tri_fn = np.tril_indices
size = matrix.shape[0]
matrix[tri_fn(size)[::-1]] = matrix[tri_fn(size)]
return matrix |
def change_exteditor(self):
"""Change external editor path"""
path, valid = QInputDialog.getText(self, _('External editor'),
_('External editor executable path:'),
QLineEdit.Normal,
self.get_option('external_editor/path'))
if valid:
self.set_option('external_editor/path', to_text_string(path)) | def function[change_exteditor, parameter[self]]:
constant[Change external editor path]
<ast.Tuple object at 0x7da1b1f44a90> assign[=] call[name[QInputDialog].getText, parameter[name[self], call[name[_], parameter[constant[External editor]]], call[name[_], parameter[constant[External editor executable path:]]], name[QLineEdit].Normal, call[name[self].get_option, parameter[constant[external_editor/path]]]]]
if name[valid] begin[:]
call[name[self].set_option, parameter[constant[external_editor/path], call[name[to_text_string], parameter[name[path]]]]] | keyword[def] identifier[change_exteditor] ( identifier[self] ):
literal[string]
identifier[path] , identifier[valid] = identifier[QInputDialog] . identifier[getText] ( identifier[self] , identifier[_] ( literal[string] ),
identifier[_] ( literal[string] ),
identifier[QLineEdit] . identifier[Normal] ,
identifier[self] . identifier[get_option] ( literal[string] ))
keyword[if] identifier[valid] :
identifier[self] . identifier[set_option] ( literal[string] , identifier[to_text_string] ( identifier[path] )) | def change_exteditor(self):
"""Change external editor path"""
(path, valid) = QInputDialog.getText(self, _('External editor'), _('External editor executable path:'), QLineEdit.Normal, self.get_option('external_editor/path'))
if valid:
self.set_option('external_editor/path', to_text_string(path)) # depends on [control=['if'], data=[]] |
def delete(self, paths, recurse=False):
''' Delete paths
:param paths: Paths to delete
:type paths: list
:param recurse: Recursive delete (use with care!)
:type recurse: boolean
:returns: a generator that yields dictionaries
.. note:: Recursive deletion uses the NameNode recursive deletion functionality
instead of letting the client recurse. Hadoops client recurses
by itself and thus showing all files and directories that are
deleted. Snakebite doesn't.
'''
if not isinstance(paths, list):
raise InvalidInputException("Paths should be a list")
if not paths:
raise InvalidInputException("delete: no path given")
processor = lambda path, node, recurse=recurse: self._handle_delete(path, node, recurse)
for item in self._find_items(paths, processor, include_toplevel=True):
if item:
yield item | def function[delete, parameter[self, paths, recurse]]:
constant[ Delete paths
:param paths: Paths to delete
:type paths: list
:param recurse: Recursive delete (use with care!)
:type recurse: boolean
:returns: a generator that yields dictionaries
.. note:: Recursive deletion uses the NameNode recursive deletion functionality
instead of letting the client recurse. Hadoops client recurses
by itself and thus showing all files and directories that are
deleted. Snakebite doesn't.
]
if <ast.UnaryOp object at 0x7da1b08fb970> begin[:]
<ast.Raise object at 0x7da1b08f8520>
if <ast.UnaryOp object at 0x7da1b08faad0> begin[:]
<ast.Raise object at 0x7da1b08fab90>
variable[processor] assign[=] <ast.Lambda object at 0x7da1b08fbcd0>
for taget[name[item]] in starred[call[name[self]._find_items, parameter[name[paths], name[processor]]]] begin[:]
if name[item] begin[:]
<ast.Yield object at 0x7da1b08fab30> | keyword[def] identifier[delete] ( identifier[self] , identifier[paths] , identifier[recurse] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[paths] , identifier[list] ):
keyword[raise] identifier[InvalidInputException] ( literal[string] )
keyword[if] keyword[not] identifier[paths] :
keyword[raise] identifier[InvalidInputException] ( literal[string] )
identifier[processor] = keyword[lambda] identifier[path] , identifier[node] , identifier[recurse] = identifier[recurse] : identifier[self] . identifier[_handle_delete] ( identifier[path] , identifier[node] , identifier[recurse] )
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_find_items] ( identifier[paths] , identifier[processor] , identifier[include_toplevel] = keyword[True] ):
keyword[if] identifier[item] :
keyword[yield] identifier[item] | def delete(self, paths, recurse=False):
""" Delete paths
:param paths: Paths to delete
:type paths: list
:param recurse: Recursive delete (use with care!)
:type recurse: boolean
:returns: a generator that yields dictionaries
.. note:: Recursive deletion uses the NameNode recursive deletion functionality
instead of letting the client recurse. Hadoops client recurses
by itself and thus showing all files and directories that are
deleted. Snakebite doesn't.
"""
if not isinstance(paths, list):
raise InvalidInputException('Paths should be a list') # depends on [control=['if'], data=[]]
if not paths:
raise InvalidInputException('delete: no path given') # depends on [control=['if'], data=[]]
processor = lambda path, node, recurse=recurse: self._handle_delete(path, node, recurse)
for item in self._find_items(paths, processor, include_toplevel=True):
if item:
yield item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] |
def create_custom_trees(cls, obj, options=None):
"""
Returns the appropriate set of customized subtree clones for
an object, suitable for merging with Store.custom_options (i.e
with the ids appropriately offset). Note if an object has no
integer ids a new OptionTree is built.
The id_mapping return value is a list mapping the ids that
need to be matched as set to their new values.
"""
clones, id_mapping = {}, []
obj_ids = cls.get_object_ids(obj)
offset = cls.id_offset()
obj_ids = [None] if len(obj_ids)==0 else obj_ids
for tree_id in obj_ids:
if tree_id is not None and tree_id in Store.custom_options():
original = Store.custom_options()[tree_id]
clone = OptionTree(items = original.items(),
groups = original.groups)
clones[tree_id + offset + 1] = clone
id_mapping.append((tree_id, tree_id + offset + 1))
else:
clone = OptionTree(groups=Store.options().groups)
clones[offset] = clone
id_mapping.append((tree_id, offset))
# Nodes needed to ensure allowed_keywords is respected
for k in Store.options():
if k in [(opt.split('.')[0],) for opt in options]:
group = {grp:Options(
allowed_keywords=opt.allowed_keywords)
for (grp, opt) in
Store.options()[k].groups.items()}
clone[k] = group
return {k:cls.apply_customizations(options, t) if options else t
for k,t in clones.items()}, id_mapping | def function[create_custom_trees, parameter[cls, obj, options]]:
constant[
Returns the appropriate set of customized subtree clones for
an object, suitable for merging with Store.custom_options (i.e
with the ids appropriately offset). Note if an object has no
integer ids a new OptionTree is built.
The id_mapping return value is a list mapping the ids that
need to be matched as set to their new values.
]
<ast.Tuple object at 0x7da2044c1de0> assign[=] tuple[[<ast.Dict object at 0x7da2044c2800>, <ast.List object at 0x7da2044c3af0>]]
variable[obj_ids] assign[=] call[name[cls].get_object_ids, parameter[name[obj]]]
variable[offset] assign[=] call[name[cls].id_offset, parameter[]]
variable[obj_ids] assign[=] <ast.IfExp object at 0x7da2044c25c0>
for taget[name[tree_id]] in starred[name[obj_ids]] begin[:]
if <ast.BoolOp object at 0x7da2044c1ab0> begin[:]
variable[original] assign[=] call[call[name[Store].custom_options, parameter[]]][name[tree_id]]
variable[clone] assign[=] call[name[OptionTree], parameter[]]
call[name[clones]][binary_operation[binary_operation[name[tree_id] + name[offset]] + constant[1]]] assign[=] name[clone]
call[name[id_mapping].append, parameter[tuple[[<ast.Name object at 0x7da2044c24d0>, <ast.BinOp object at 0x7da2044c3cd0>]]]]
for taget[name[k]] in starred[call[name[Store].options, parameter[]]] begin[:]
if compare[name[k] in <ast.ListComp object at 0x7da2044c0d00>] begin[:]
variable[group] assign[=] <ast.DictComp object at 0x7da2044c2230>
call[name[clone]][name[k]] assign[=] name[group]
return[tuple[[<ast.DictComp object at 0x7da2044c28c0>, <ast.Name object at 0x7da18f00ddb0>]]] | keyword[def] identifier[create_custom_trees] ( identifier[cls] , identifier[obj] , identifier[options] = keyword[None] ):
literal[string]
identifier[clones] , identifier[id_mapping] ={},[]
identifier[obj_ids] = identifier[cls] . identifier[get_object_ids] ( identifier[obj] )
identifier[offset] = identifier[cls] . identifier[id_offset] ()
identifier[obj_ids] =[ keyword[None] ] keyword[if] identifier[len] ( identifier[obj_ids] )== literal[int] keyword[else] identifier[obj_ids]
keyword[for] identifier[tree_id] keyword[in] identifier[obj_ids] :
keyword[if] identifier[tree_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[tree_id] keyword[in] identifier[Store] . identifier[custom_options] ():
identifier[original] = identifier[Store] . identifier[custom_options] ()[ identifier[tree_id] ]
identifier[clone] = identifier[OptionTree] ( identifier[items] = identifier[original] . identifier[items] (),
identifier[groups] = identifier[original] . identifier[groups] )
identifier[clones] [ identifier[tree_id] + identifier[offset] + literal[int] ]= identifier[clone]
identifier[id_mapping] . identifier[append] (( identifier[tree_id] , identifier[tree_id] + identifier[offset] + literal[int] ))
keyword[else] :
identifier[clone] = identifier[OptionTree] ( identifier[groups] = identifier[Store] . identifier[options] (). identifier[groups] )
identifier[clones] [ identifier[offset] ]= identifier[clone]
identifier[id_mapping] . identifier[append] (( identifier[tree_id] , identifier[offset] ))
keyword[for] identifier[k] keyword[in] identifier[Store] . identifier[options] ():
keyword[if] identifier[k] keyword[in] [( identifier[opt] . identifier[split] ( literal[string] )[ literal[int] ],) keyword[for] identifier[opt] keyword[in] identifier[options] ]:
identifier[group] ={ identifier[grp] : identifier[Options] (
identifier[allowed_keywords] = identifier[opt] . identifier[allowed_keywords] )
keyword[for] ( identifier[grp] , identifier[opt] ) keyword[in]
identifier[Store] . identifier[options] ()[ identifier[k] ]. identifier[groups] . identifier[items] ()}
identifier[clone] [ identifier[k] ]= identifier[group]
keyword[return] { identifier[k] : identifier[cls] . identifier[apply_customizations] ( identifier[options] , identifier[t] ) keyword[if] identifier[options] keyword[else] identifier[t]
keyword[for] identifier[k] , identifier[t] keyword[in] identifier[clones] . identifier[items] ()}, identifier[id_mapping] | def create_custom_trees(cls, obj, options=None):
"""
Returns the appropriate set of customized subtree clones for
an object, suitable for merging with Store.custom_options (i.e
with the ids appropriately offset). Note if an object has no
integer ids a new OptionTree is built.
The id_mapping return value is a list mapping the ids that
need to be matched as set to their new values.
"""
(clones, id_mapping) = ({}, [])
obj_ids = cls.get_object_ids(obj)
offset = cls.id_offset()
obj_ids = [None] if len(obj_ids) == 0 else obj_ids
for tree_id in obj_ids:
if tree_id is not None and tree_id in Store.custom_options():
original = Store.custom_options()[tree_id]
clone = OptionTree(items=original.items(), groups=original.groups)
clones[tree_id + offset + 1] = clone
id_mapping.append((tree_id, tree_id + offset + 1)) # depends on [control=['if'], data=[]]
else:
clone = OptionTree(groups=Store.options().groups)
clones[offset] = clone
id_mapping.append((tree_id, offset))
# Nodes needed to ensure allowed_keywords is respected
for k in Store.options():
if k in [(opt.split('.')[0],) for opt in options]:
group = {grp: Options(allowed_keywords=opt.allowed_keywords) for (grp, opt) in Store.options()[k].groups.items()}
clone[k] = group # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['tree_id']]
return ({k: cls.apply_customizations(options, t) if options else t for (k, t) in clones.items()}, id_mapping) |
def get_raw_data(self, times=5):
"""
do some readings and aggregate them using the defined statistics function
:param times: how many measures to aggregate
:type times: int
:return: the aggregate of the measured values
:rtype float
"""
self._validate_measure_count(times)
data_list = []
while len(data_list) < times:
data = self._read()
if data not in [False, -1]:
data_list.append(data)
return data_list | def function[get_raw_data, parameter[self, times]]:
constant[
do some readings and aggregate them using the defined statistics function
:param times: how many measures to aggregate
:type times: int
:return: the aggregate of the measured values
:rtype float
]
call[name[self]._validate_measure_count, parameter[name[times]]]
variable[data_list] assign[=] list[[]]
while compare[call[name[len], parameter[name[data_list]]] less[<] name[times]] begin[:]
variable[data] assign[=] call[name[self]._read, parameter[]]
if compare[name[data] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b0ed9cf0>, <ast.UnaryOp object at 0x7da1b0ed96c0>]]] begin[:]
call[name[data_list].append, parameter[name[data]]]
return[name[data_list]] | keyword[def] identifier[get_raw_data] ( identifier[self] , identifier[times] = literal[int] ):
literal[string]
identifier[self] . identifier[_validate_measure_count] ( identifier[times] )
identifier[data_list] =[]
keyword[while] identifier[len] ( identifier[data_list] )< identifier[times] :
identifier[data] = identifier[self] . identifier[_read] ()
keyword[if] identifier[data] keyword[not] keyword[in] [ keyword[False] ,- literal[int] ]:
identifier[data_list] . identifier[append] ( identifier[data] )
keyword[return] identifier[data_list] | def get_raw_data(self, times=5):
"""
do some readings and aggregate them using the defined statistics function
:param times: how many measures to aggregate
:type times: int
:return: the aggregate of the measured values
:rtype float
"""
self._validate_measure_count(times)
data_list = []
while len(data_list) < times:
data = self._read()
if data not in [False, -1]:
data_list.append(data) # depends on [control=['if'], data=['data']] # depends on [control=['while'], data=[]]
return data_list |
def clean_texmath(txt):
"""
clean tex math string, preserving control sequences
(incluing \n, so also '\nu') inside $ $, while allowing
\n and \t to be meaningful in the text string
"""
s = "%s " % txt
out = []
i = 0
while i < len(s)-1:
if s[i] == '\\' and s[i+1] in ('n', 't'):
if s[i+1] == 'n':
out.append('\n')
elif s[i+1] == 't':
out.append('\t')
i += 1
elif s[i] == '$':
j = s[i+1:].find('$')
if j < 0:
j = len(s)
out.append(s[i:j+2])
i += j+2
else:
out.append(s[i])
i += 1
if i > 5000:
break
return ''.join(out).strip() | def function[clean_texmath, parameter[txt]]:
constant[
clean tex math string, preserving control sequences
(incluing
, so also '
u') inside $ $, while allowing
and to be meaningful in the text string
]
variable[s] assign[=] binary_operation[constant[%s ] <ast.Mod object at 0x7da2590d6920> name[txt]]
variable[out] assign[=] list[[]]
variable[i] assign[=] constant[0]
while compare[name[i] less[<] binary_operation[call[name[len], parameter[name[s]]] - constant[1]]] begin[:]
if <ast.BoolOp object at 0x7da18fe90b80> begin[:]
if compare[call[name[s]][binary_operation[name[i] + constant[1]]] equal[==] constant[n]] begin[:]
call[name[out].append, parameter[constant[
]]]
<ast.AugAssign object at 0x7da18fe90f10>
<ast.AugAssign object at 0x7da207f00460>
if compare[name[i] greater[>] constant[5000]] begin[:]
break
return[call[call[constant[].join, parameter[name[out]]].strip, parameter[]]] | keyword[def] identifier[clean_texmath] ( identifier[txt] ):
literal[string]
identifier[s] = literal[string] % identifier[txt]
identifier[out] =[]
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[s] )- literal[int] :
keyword[if] identifier[s] [ identifier[i] ]== literal[string] keyword[and] identifier[s] [ identifier[i] + literal[int] ] keyword[in] ( literal[string] , literal[string] ):
keyword[if] identifier[s] [ identifier[i] + literal[int] ]== literal[string] :
identifier[out] . identifier[append] ( literal[string] )
keyword[elif] identifier[s] [ identifier[i] + literal[int] ]== literal[string] :
identifier[out] . identifier[append] ( literal[string] )
identifier[i] += literal[int]
keyword[elif] identifier[s] [ identifier[i] ]== literal[string] :
identifier[j] = identifier[s] [ identifier[i] + literal[int] :]. identifier[find] ( literal[string] )
keyword[if] identifier[j] < literal[int] :
identifier[j] = identifier[len] ( identifier[s] )
identifier[out] . identifier[append] ( identifier[s] [ identifier[i] : identifier[j] + literal[int] ])
identifier[i] += identifier[j] + literal[int]
keyword[else] :
identifier[out] . identifier[append] ( identifier[s] [ identifier[i] ])
identifier[i] += literal[int]
keyword[if] identifier[i] > literal[int] :
keyword[break]
keyword[return] literal[string] . identifier[join] ( identifier[out] ). identifier[strip] () | def clean_texmath(txt):
"""
clean tex math string, preserving control sequences
(incluing
, so also '
u') inside $ $, while allowing
and to be meaningful in the text string
"""
s = '%s ' % txt
out = []
i = 0
while i < len(s) - 1:
if s[i] == '\\' and s[i + 1] in ('n', 't'):
if s[i + 1] == 'n':
out.append('\n') # depends on [control=['if'], data=[]]
elif s[i + 1] == 't':
out.append('\t') # depends on [control=['if'], data=[]]
i += 1 # depends on [control=['if'], data=[]]
elif s[i] == '$':
j = s[i + 1:].find('$')
if j < 0:
j = len(s) # depends on [control=['if'], data=['j']]
out.append(s[i:j + 2])
i += j + 2 # depends on [control=['if'], data=[]]
else:
out.append(s[i])
i += 1
if i > 5000:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['i']]
return ''.join(out).strip() |
def searchAlbums(self, title, **kwargs):
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='photoalbum', title=title, **kwargs) | def function[searchAlbums, parameter[self, title]]:
constant[ Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. ]
return[call[name[self].search, parameter[]]] | keyword[def] identifier[searchAlbums] ( identifier[self] , identifier[title] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[search] ( identifier[libtype] = literal[string] , identifier[title] = identifier[title] ,** identifier[kwargs] ) | def searchAlbums(self, title, **kwargs):
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='photoalbum', title=title, **kwargs) |
def d2_func(x, y, z):
'''
#=================================================
/poly fit for every SF grid data
#=================================================
'''
X, Y = np.meshgrid(x, y, copy=False)
X = X.flatten()
Y = Y.flatten()
A = np.array([np.ones(len(X)), X, X**2, Y, Y**2, X*Y]).T
Z = np.array(z)
B = Z.flatten()
# print(A.shape,B.shape)
coeff, r, rank, s = np.linalg.lstsq(A, B, rcond=None)
return -coeff[5] | def function[d2_func, parameter[x, y, z]]:
constant[
#=================================================
/poly fit for every SF grid data
#=================================================
]
<ast.Tuple object at 0x7da1b036d960> assign[=] call[name[np].meshgrid, parameter[name[x], name[y]]]
variable[X] assign[=] call[name[X].flatten, parameter[]]
variable[Y] assign[=] call[name[Y].flatten, parameter[]]
variable[A] assign[=] call[name[np].array, parameter[list[[<ast.Call object at 0x7da1b02c4820>, <ast.Name object at 0x7da1b02b52d0>, <ast.BinOp object at 0x7da1b02b5300>, <ast.Name object at 0x7da1b02b5240>, <ast.BinOp object at 0x7da1b02b52a0>, <ast.BinOp object at 0x7da1b02e8520>]]]].T
variable[Z] assign[=] call[name[np].array, parameter[name[z]]]
variable[B] assign[=] call[name[Z].flatten, parameter[]]
<ast.Tuple object at 0x7da1b02c4430> assign[=] call[name[np].linalg.lstsq, parameter[name[A], name[B]]]
return[<ast.UnaryOp object at 0x7da1b02c5450>] | keyword[def] identifier[d2_func] ( identifier[x] , identifier[y] , identifier[z] ):
literal[string]
identifier[X] , identifier[Y] = identifier[np] . identifier[meshgrid] ( identifier[x] , identifier[y] , identifier[copy] = keyword[False] )
identifier[X] = identifier[X] . identifier[flatten] ()
identifier[Y] = identifier[Y] . identifier[flatten] ()
identifier[A] = identifier[np] . identifier[array] ([ identifier[np] . identifier[ones] ( identifier[len] ( identifier[X] )), identifier[X] , identifier[X] ** literal[int] , identifier[Y] , identifier[Y] ** literal[int] , identifier[X] * identifier[Y] ]). identifier[T]
identifier[Z] = identifier[np] . identifier[array] ( identifier[z] )
identifier[B] = identifier[Z] . identifier[flatten] ()
identifier[coeff] , identifier[r] , identifier[rank] , identifier[s] = identifier[np] . identifier[linalg] . identifier[lstsq] ( identifier[A] , identifier[B] , identifier[rcond] = keyword[None] )
keyword[return] - identifier[coeff] [ literal[int] ] | def d2_func(x, y, z):
"""
#=================================================
/poly fit for every SF grid data
#=================================================
"""
(X, Y) = np.meshgrid(x, y, copy=False)
X = X.flatten()
Y = Y.flatten()
A = np.array([np.ones(len(X)), X, X ** 2, Y, Y ** 2, X * Y]).T
Z = np.array(z)
B = Z.flatten()
# print(A.shape,B.shape)
(coeff, r, rank, s) = np.linalg.lstsq(A, B, rcond=None)
return -coeff[5] |
def alias_present(name, FunctionName, Name, FunctionVersion, Description='',
region=None, key=None, keyid=None, profile=None):
'''
Ensure alias exists.
name
The name of the state definition.
FunctionName
Name of the function for which you want to create an alias.
Name
The name of the alias to be created.
FunctionVersion
Function version for which you are creating the alias.
Description
A short, user-defined function description. Lambda does not use this value. Assign a meaningful
description as you see fit.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
'''
ret = {'name': Name,
'result': True,
'comment': '',
'changes': {}
}
r = __salt__['boto_lambda.alias_exists'](
FunctionName=FunctionName, Name=Name, region=region,
key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = ('Failed to create alias: '
'{0}.'.format(r['error']['message']))
return ret
if not r.get('exists'):
if __opts__['test']:
ret['comment'] = 'Alias {0} is set to be created.'.format(Name)
ret['result'] = None
return ret
r = __salt__['boto_lambda.create_alias'](FunctionName, Name,
FunctionVersion, Description,
region, key, keyid, profile)
if not r.get('created'):
ret['result'] = False
ret['comment'] = ('Failed to create alias: '
'{0}.'.format(r['error']['message']))
return ret
_describe = __salt__['boto_lambda.describe_alias'](
FunctionName, Name, region=region, key=key,
keyid=keyid, profile=profile)
ret['changes']['old'] = {'alias': None}
ret['changes']['new'] = _describe
ret['comment'] = 'Alias {0} created.'.format(Name)
return ret
ret['comment'] = os.linesep.join(
[ret['comment'], 'Alias {0} is present.'.format(Name)])
ret['changes'] = {}
_describe = __salt__['boto_lambda.describe_alias'](
FunctionName, Name, region=region, key=key, keyid=keyid,
profile=profile)['alias']
need_update = False
options = {'FunctionVersion': FunctionVersion,
'Description': Description}
for key, val in six.iteritems(options):
if _describe[key] != val:
need_update = True
ret['changes'].setdefault('old', {})[key] = _describe[key]
ret['changes'].setdefault('new', {})[key] = val
if need_update:
ret['comment'] = os.linesep.join(
[ret['comment'], 'Alias config to be modified'])
if __opts__['test']:
ret['comment'] = 'Alias {0} set to be modified.'.format(Name)
ret['result'] = None
return ret
_r = __salt__['boto_lambda.update_alias'](
FunctionName=FunctionName, Name=Name,
FunctionVersion=FunctionVersion, Description=Description,
region=region, key=key, keyid=keyid, profile=profile)
if not _r.get('updated'):
ret['result'] = False
ret['comment'] = ('Failed to update alias: '
'{0}.'.format(_r['error']['message']))
ret['changes'] = {}
return ret | def function[alias_present, parameter[name, FunctionName, Name, FunctionVersion, Description, region, key, keyid, profile]]:
constant[
Ensure alias exists.
name
The name of the state definition.
FunctionName
Name of the function for which you want to create an alias.
Name
The name of the alias to be created.
FunctionVersion
Function version for which you are creating the alias.
Description
A short, user-defined function description. Lambda does not use this value. Assign a meaningful
description as you see fit.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ff15d0>, <ast.Constant object at 0x7da1b1ff1600>, <ast.Constant object at 0x7da1b1ff0d30>, <ast.Constant object at 0x7da1b1ff1120>], [<ast.Name object at 0x7da1b1ff30a0>, <ast.Constant object at 0x7da1b1ff15a0>, <ast.Constant object at 0x7da1b1ff0d00>, <ast.Dict object at 0x7da1b1ff1030>]]
variable[r] assign[=] call[call[name[__salt__]][constant[boto_lambda.alias_exists]], parameter[]]
if compare[constant[error] in name[r]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create alias: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]]
return[name[ret]]
if <ast.UnaryOp object at 0x7da1b2007a30> begin[:]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Alias {0} is set to be created.].format, parameter[name[Name]]]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[r] assign[=] call[call[name[__salt__]][constant[boto_lambda.create_alias]], parameter[name[FunctionName], name[Name], name[FunctionVersion], name[Description], name[region], name[key], name[keyid], name[profile]]]
if <ast.UnaryOp object at 0x7da1b2007c10> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create alias: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]]
return[name[ret]]
variable[_describe] assign[=] call[call[name[__salt__]][constant[boto_lambda.describe_alias]], parameter[name[FunctionName], name[Name]]]
call[call[name[ret]][constant[changes]]][constant[old]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2007370>], [<ast.Constant object at 0x7da1b2007550>]]
call[call[name[ret]][constant[changes]]][constant[new]] assign[=] name[_describe]
call[name[ret]][constant[comment]] assign[=] call[constant[Alias {0} created.].format, parameter[name[Name]]]
return[name[ret]]
call[name[ret]][constant[comment]] assign[=] call[name[os].linesep.join, parameter[list[[<ast.Subscript object at 0x7da1b1f2cc10>, <ast.Call object at 0x7da1b1f2cb50>]]]]
call[name[ret]][constant[changes]] assign[=] dictionary[[], []]
variable[_describe] assign[=] call[call[call[name[__salt__]][constant[boto_lambda.describe_alias]], parameter[name[FunctionName], name[Name]]]][constant[alias]]
variable[need_update] assign[=] constant[False]
variable[options] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f2cca0>, <ast.Constant object at 0x7da1b1f2ca60>], [<ast.Name object at 0x7da1b1f2c130>, <ast.Name object at 0x7da1b1f2c820>]]
for taget[tuple[[<ast.Name object at 0x7da1b1f2c7c0>, <ast.Name object at 0x7da1b1c179d0>]]] in starred[call[name[six].iteritems, parameter[name[options]]]] begin[:]
if compare[call[name[_describe]][name[key]] not_equal[!=] name[val]] begin[:]
variable[need_update] assign[=] constant[True]
call[call[call[name[ret]][constant[changes]].setdefault, parameter[constant[old], dictionary[[], []]]]][name[key]] assign[=] call[name[_describe]][name[key]]
call[call[call[name[ret]][constant[changes]].setdefault, parameter[constant[new], dictionary[[], []]]]][name[key]] assign[=] name[val]
if name[need_update] begin[:]
call[name[ret]][constant[comment]] assign[=] call[name[os].linesep.join, parameter[list[[<ast.Subscript object at 0x7da1b2096bc0>, <ast.Constant object at 0x7da1b20951b0>]]]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Alias {0} set to be modified.].format, parameter[name[Name]]]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[_r] assign[=] call[call[name[__salt__]][constant[boto_lambda.update_alias]], parameter[]]
if <ast.UnaryOp object at 0x7da1b2097130> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to update alias: {0}.].format, parameter[call[call[name[_r]][constant[error]]][constant[message]]]]
call[name[ret]][constant[changes]] assign[=] dictionary[[], []]
return[name[ret]] | keyword[def] identifier[alias_present] ( identifier[name] , identifier[FunctionName] , identifier[Name] , identifier[FunctionVersion] , identifier[Description] = literal[string] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[Name] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] :{}
}
identifier[r] = identifier[__salt__] [ literal[string] ](
identifier[FunctionName] = identifier[FunctionName] , identifier[Name] = identifier[Name] , identifier[region] = identifier[region] ,
identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[if] literal[string] keyword[in] identifier[r] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]=( literal[string]
literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]))
keyword[return] identifier[ret]
keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ):
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[r] = identifier[__salt__] [ literal[string] ]( identifier[FunctionName] , identifier[Name] ,
identifier[FunctionVersion] , identifier[Description] ,
identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] )
keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ):
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]=( literal[string]
literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]))
keyword[return] identifier[ret]
identifier[_describe] = identifier[__salt__] [ literal[string] ](
identifier[FunctionName] , identifier[Name] , identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[ret] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[None] }
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[_describe]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Name] )
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ]= identifier[os] . identifier[linesep] . identifier[join] (
[ identifier[ret] [ literal[string] ], literal[string] . identifier[format] ( identifier[Name] )])
identifier[ret] [ literal[string] ]={}
identifier[_describe] = identifier[__salt__] [ literal[string] ](
identifier[FunctionName] , identifier[Name] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] ,
identifier[profile] = identifier[profile] )[ literal[string] ]
identifier[need_update] = keyword[False]
identifier[options] ={ literal[string] : identifier[FunctionVersion] ,
literal[string] : identifier[Description] }
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[six] . identifier[iteritems] ( identifier[options] ):
keyword[if] identifier[_describe] [ identifier[key] ]!= identifier[val] :
identifier[need_update] = keyword[True]
identifier[ret] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})[ identifier[key] ]= identifier[_describe] [ identifier[key] ]
identifier[ret] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})[ identifier[key] ]= identifier[val]
keyword[if] identifier[need_update] :
identifier[ret] [ literal[string] ]= identifier[os] . identifier[linesep] . identifier[join] (
[ identifier[ret] [ literal[string] ], literal[string] ])
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[_r] = identifier[__salt__] [ literal[string] ](
identifier[FunctionName] = identifier[FunctionName] , identifier[Name] = identifier[Name] ,
identifier[FunctionVersion] = identifier[FunctionVersion] , identifier[Description] = identifier[Description] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[if] keyword[not] identifier[_r] . identifier[get] ( literal[string] ):
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]=( literal[string]
literal[string] . identifier[format] ( identifier[_r] [ literal[string] ][ literal[string] ]))
identifier[ret] [ literal[string] ]={}
keyword[return] identifier[ret] | def alias_present(name, FunctionName, Name, FunctionVersion, Description='', region=None, key=None, keyid=None, profile=None):
"""
Ensure alias exists.
name
The name of the state definition.
FunctionName
Name of the function for which you want to create an alias.
Name
The name of the alias to be created.
FunctionVersion
Function version for which you are creating the alias.
Description
A short, user-defined function description. Lambda does not use this value. Assign a meaningful
description as you see fit.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
"""
ret = {'name': Name, 'result': True, 'comment': '', 'changes': {}}
r = __salt__['boto_lambda.alias_exists'](FunctionName=FunctionName, Name=Name, region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to create alias: {0}.'.format(r['error']['message'])
return ret # depends on [control=['if'], data=['r']]
if not r.get('exists'):
if __opts__['test']:
ret['comment'] = 'Alias {0} is set to be created.'.format(Name)
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
r = __salt__['boto_lambda.create_alias'](FunctionName, Name, FunctionVersion, Description, region, key, keyid, profile)
if not r.get('created'):
ret['result'] = False
ret['comment'] = 'Failed to create alias: {0}.'.format(r['error']['message'])
return ret # depends on [control=['if'], data=[]]
_describe = __salt__['boto_lambda.describe_alias'](FunctionName, Name, region=region, key=key, keyid=keyid, profile=profile)
ret['changes']['old'] = {'alias': None}
ret['changes']['new'] = _describe
ret['comment'] = 'Alias {0} created.'.format(Name)
return ret # depends on [control=['if'], data=[]]
ret['comment'] = os.linesep.join([ret['comment'], 'Alias {0} is present.'.format(Name)])
ret['changes'] = {}
_describe = __salt__['boto_lambda.describe_alias'](FunctionName, Name, region=region, key=key, keyid=keyid, profile=profile)['alias']
need_update = False
options = {'FunctionVersion': FunctionVersion, 'Description': Description}
for (key, val) in six.iteritems(options):
if _describe[key] != val:
need_update = True
ret['changes'].setdefault('old', {})[key] = _describe[key]
ret['changes'].setdefault('new', {})[key] = val # depends on [control=['if'], data=['val']] # depends on [control=['for'], data=[]]
if need_update:
ret['comment'] = os.linesep.join([ret['comment'], 'Alias config to be modified'])
if __opts__['test']:
ret['comment'] = 'Alias {0} set to be modified.'.format(Name)
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
_r = __salt__['boto_lambda.update_alias'](FunctionName=FunctionName, Name=Name, FunctionVersion=FunctionVersion, Description=Description, region=region, key=key, keyid=keyid, profile=profile)
if not _r.get('updated'):
ret['result'] = False
ret['comment'] = 'Failed to update alias: {0}.'.format(_r['error']['message'])
ret['changes'] = {} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return ret |
def save_token(self, token, request, *args, **kwargs):
"""Persist the token with a token type specific method.
Currently, only save_bearer_token is supported.
:param token: A (Bearer) token dict.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
return self.save_bearer_token(token, request, *args, **kwargs) | def function[save_token, parameter[self, token, request]]:
constant[Persist the token with a token type specific method.
Currently, only save_bearer_token is supported.
:param token: A (Bearer) token dict.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
]
return[call[name[self].save_bearer_token, parameter[name[token], name[request], <ast.Starred object at 0x7da1b179ac80>]]] | keyword[def] identifier[save_token] ( identifier[self] , identifier[token] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[save_bearer_token] ( identifier[token] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ) | def save_token(self, token, request, *args, **kwargs):
"""Persist the token with a token type specific method.
Currently, only save_bearer_token is supported.
:param token: A (Bearer) token dict.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
return self.save_bearer_token(token, request, *args, **kwargs) |
def to_pycode(self):
"""Create a python code object from the more abstract
codetransfomer.Code object.
Returns
-------
co : CodeType
The python code object.
"""
consts = self.consts
names = self.names
varnames = self.varnames
freevars = self.freevars
cellvars = self.cellvars
bc = bytearray()
for instr in self.instrs:
bc.append(instr.opcode) # Write the opcode byte.
if isinstance(instr, LOAD_CONST):
# Resolve the constant index.
bc.extend(consts.index(instr.arg).to_bytes(argsize, 'little'))
elif instr.uses_name:
# Resolve the name index.
bc.extend(names.index(instr.arg).to_bytes(argsize, 'little'))
elif instr.uses_varname:
# Resolve the local variable index.
bc.extend(
varnames.index(instr.arg).to_bytes(argsize, 'little'),
)
elif instr.uses_free:
# uses_free is really "uses freevars **or** cellvars".
try:
# look for the name in cellvars
bc.extend(
cellvars.index(instr.arg).to_bytes(argsize, 'little'),
)
except ValueError:
# fall back to freevars, incrementing the length of
# cellvars.
bc.extend(
(freevars.index(instr.arg) + len(cellvars)).to_bytes(
argsize,
'little',
)
)
elif instr.absjmp:
# Resolve the absolute jump target.
bc.extend(
self.bytecode_offset(instr.arg).to_bytes(
argsize,
'little',
),
)
elif instr.reljmp:
# Resolve the relative jump target.
# We do this by subtracting the curren't instructions's
# sparse index from the sparse index of the argument.
# We then subtract argsize - 1 to account for the bytes the
# current instruction takes up.
bytecode_offset = self.bytecode_offset
bc.extend((
bytecode_offset(instr.arg) -
bytecode_offset(instr) -
argsize -
1
).to_bytes(argsize, 'little',))
elif instr.have_arg:
# Write any other arg here.
bc.extend(instr.arg.to_bytes(argsize, 'little'))
elif WORDCODE:
# with wordcode, all instructions are padded to 2 bytes
bc.append(0)
return CodeType(
self.argcount,
self.kwonlyargcount,
len(varnames),
self.stacksize,
self.py_flags,
bytes(bc),
consts,
names,
varnames,
self.filename,
self.name,
self.firstlineno,
self.py_lnotab,
freevars,
cellvars,
) | def function[to_pycode, parameter[self]]:
constant[Create a python code object from the more abstract
codetransfomer.Code object.
Returns
-------
co : CodeType
The python code object.
]
variable[consts] assign[=] name[self].consts
variable[names] assign[=] name[self].names
variable[varnames] assign[=] name[self].varnames
variable[freevars] assign[=] name[self].freevars
variable[cellvars] assign[=] name[self].cellvars
variable[bc] assign[=] call[name[bytearray], parameter[]]
for taget[name[instr]] in starred[name[self].instrs] begin[:]
call[name[bc].append, parameter[name[instr].opcode]]
if call[name[isinstance], parameter[name[instr], name[LOAD_CONST]]] begin[:]
call[name[bc].extend, parameter[call[call[name[consts].index, parameter[name[instr].arg]].to_bytes, parameter[name[argsize], constant[little]]]]]
return[call[name[CodeType], parameter[name[self].argcount, name[self].kwonlyargcount, call[name[len], parameter[name[varnames]]], name[self].stacksize, name[self].py_flags, call[name[bytes], parameter[name[bc]]], name[consts], name[names], name[varnames], name[self].filename, name[self].name, name[self].firstlineno, name[self].py_lnotab, name[freevars], name[cellvars]]]] | keyword[def] identifier[to_pycode] ( identifier[self] ):
literal[string]
identifier[consts] = identifier[self] . identifier[consts]
identifier[names] = identifier[self] . identifier[names]
identifier[varnames] = identifier[self] . identifier[varnames]
identifier[freevars] = identifier[self] . identifier[freevars]
identifier[cellvars] = identifier[self] . identifier[cellvars]
identifier[bc] = identifier[bytearray] ()
keyword[for] identifier[instr] keyword[in] identifier[self] . identifier[instrs] :
identifier[bc] . identifier[append] ( identifier[instr] . identifier[opcode] )
keyword[if] identifier[isinstance] ( identifier[instr] , identifier[LOAD_CONST] ):
identifier[bc] . identifier[extend] ( identifier[consts] . identifier[index] ( identifier[instr] . identifier[arg] ). identifier[to_bytes] ( identifier[argsize] , literal[string] ))
keyword[elif] identifier[instr] . identifier[uses_name] :
identifier[bc] . identifier[extend] ( identifier[names] . identifier[index] ( identifier[instr] . identifier[arg] ). identifier[to_bytes] ( identifier[argsize] , literal[string] ))
keyword[elif] identifier[instr] . identifier[uses_varname] :
identifier[bc] . identifier[extend] (
identifier[varnames] . identifier[index] ( identifier[instr] . identifier[arg] ). identifier[to_bytes] ( identifier[argsize] , literal[string] ),
)
keyword[elif] identifier[instr] . identifier[uses_free] :
keyword[try] :
identifier[bc] . identifier[extend] (
identifier[cellvars] . identifier[index] ( identifier[instr] . identifier[arg] ). identifier[to_bytes] ( identifier[argsize] , literal[string] ),
)
keyword[except] identifier[ValueError] :
identifier[bc] . identifier[extend] (
( identifier[freevars] . identifier[index] ( identifier[instr] . identifier[arg] )+ identifier[len] ( identifier[cellvars] )). identifier[to_bytes] (
identifier[argsize] ,
literal[string] ,
)
)
keyword[elif] identifier[instr] . identifier[absjmp] :
identifier[bc] . identifier[extend] (
identifier[self] . identifier[bytecode_offset] ( identifier[instr] . identifier[arg] ). identifier[to_bytes] (
identifier[argsize] ,
literal[string] ,
),
)
keyword[elif] identifier[instr] . identifier[reljmp] :
identifier[bytecode_offset] = identifier[self] . identifier[bytecode_offset]
identifier[bc] . identifier[extend] ((
identifier[bytecode_offset] ( identifier[instr] . identifier[arg] )-
identifier[bytecode_offset] ( identifier[instr] )-
identifier[argsize] -
literal[int]
). identifier[to_bytes] ( identifier[argsize] , literal[string] ,))
keyword[elif] identifier[instr] . identifier[have_arg] :
identifier[bc] . identifier[extend] ( identifier[instr] . identifier[arg] . identifier[to_bytes] ( identifier[argsize] , literal[string] ))
keyword[elif] identifier[WORDCODE] :
identifier[bc] . identifier[append] ( literal[int] )
keyword[return] identifier[CodeType] (
identifier[self] . identifier[argcount] ,
identifier[self] . identifier[kwonlyargcount] ,
identifier[len] ( identifier[varnames] ),
identifier[self] . identifier[stacksize] ,
identifier[self] . identifier[py_flags] ,
identifier[bytes] ( identifier[bc] ),
identifier[consts] ,
identifier[names] ,
identifier[varnames] ,
identifier[self] . identifier[filename] ,
identifier[self] . identifier[name] ,
identifier[self] . identifier[firstlineno] ,
identifier[self] . identifier[py_lnotab] ,
identifier[freevars] ,
identifier[cellvars] ,
) | def to_pycode(self):
"""Create a python code object from the more abstract
codetransfomer.Code object.
Returns
-------
co : CodeType
The python code object.
"""
consts = self.consts
names = self.names
varnames = self.varnames
freevars = self.freevars
cellvars = self.cellvars
bc = bytearray()
for instr in self.instrs:
bc.append(instr.opcode) # Write the opcode byte.
if isinstance(instr, LOAD_CONST):
# Resolve the constant index.
bc.extend(consts.index(instr.arg).to_bytes(argsize, 'little')) # depends on [control=['if'], data=[]]
elif instr.uses_name:
# Resolve the name index.
bc.extend(names.index(instr.arg).to_bytes(argsize, 'little')) # depends on [control=['if'], data=[]]
elif instr.uses_varname:
# Resolve the local variable index.
bc.extend(varnames.index(instr.arg).to_bytes(argsize, 'little')) # depends on [control=['if'], data=[]]
elif instr.uses_free:
# uses_free is really "uses freevars **or** cellvars".
try:
# look for the name in cellvars
bc.extend(cellvars.index(instr.arg).to_bytes(argsize, 'little')) # depends on [control=['try'], data=[]]
except ValueError:
# fall back to freevars, incrementing the length of
# cellvars.
bc.extend((freevars.index(instr.arg) + len(cellvars)).to_bytes(argsize, 'little')) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif instr.absjmp:
# Resolve the absolute jump target.
bc.extend(self.bytecode_offset(instr.arg).to_bytes(argsize, 'little')) # depends on [control=['if'], data=[]]
elif instr.reljmp:
# Resolve the relative jump target.
# We do this by subtracting the curren't instructions's
# sparse index from the sparse index of the argument.
# We then subtract argsize - 1 to account for the bytes the
# current instruction takes up.
bytecode_offset = self.bytecode_offset
bc.extend((bytecode_offset(instr.arg) - bytecode_offset(instr) - argsize - 1).to_bytes(argsize, 'little')) # depends on [control=['if'], data=[]]
elif instr.have_arg:
# Write any other arg here.
bc.extend(instr.arg.to_bytes(argsize, 'little')) # depends on [control=['if'], data=[]]
elif WORDCODE:
# with wordcode, all instructions are padded to 2 bytes
bc.append(0) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['instr']]
return CodeType(self.argcount, self.kwonlyargcount, len(varnames), self.stacksize, self.py_flags, bytes(bc), consts, names, varnames, self.filename, self.name, self.firstlineno, self.py_lnotab, freevars, cellvars) |
def members(self):
""" Children of the collection's item
:rtype: [Collection]
"""
return list(
[
self.children_class(child)
for child in self.graph.subjects(RDF_NAMESPACES.DTS.parent, self.asNode())
]
) | def function[members, parameter[self]]:
constant[ Children of the collection's item
:rtype: [Collection]
]
return[call[name[list], parameter[<ast.ListComp object at 0x7da1b26ad2d0>]]] | keyword[def] identifier[members] ( identifier[self] ):
literal[string]
keyword[return] identifier[list] (
[
identifier[self] . identifier[children_class] ( identifier[child] )
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[graph] . identifier[subjects] ( identifier[RDF_NAMESPACES] . identifier[DTS] . identifier[parent] , identifier[self] . identifier[asNode] ())
]
) | def members(self):
""" Children of the collection's item
:rtype: [Collection]
"""
return list([self.children_class(child) for child in self.graph.subjects(RDF_NAMESPACES.DTS.parent, self.asNode())]) |
def url(self) -> str:
"""
Get URL for the message
:return: str
"""
if self.chat.type not in [ChatType.SUPER_GROUP, ChatType.CHANNEL]:
raise TypeError('Invalid chat type!')
elif not self.chat.username:
raise TypeError('This chat does not have @username')
return f"https://t.me/{self.chat.username}/{self.message_id}" | def function[url, parameter[self]]:
constant[
Get URL for the message
:return: str
]
if compare[name[self].chat.type <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b17b9690>, <ast.Attribute object at 0x7da1b17b8940>]]] begin[:]
<ast.Raise object at 0x7da1b17baec0>
return[<ast.JoinedStr object at 0x7da1b17b80a0>] | keyword[def] identifier[url] ( identifier[self] )-> identifier[str] :
literal[string]
keyword[if] identifier[self] . identifier[chat] . identifier[type] keyword[not] keyword[in] [ identifier[ChatType] . identifier[SUPER_GROUP] , identifier[ChatType] . identifier[CHANNEL] ]:
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[elif] keyword[not] identifier[self] . identifier[chat] . identifier[username] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] literal[string] | def url(self) -> str:
"""
Get URL for the message
:return: str
"""
if self.chat.type not in [ChatType.SUPER_GROUP, ChatType.CHANNEL]:
raise TypeError('Invalid chat type!') # depends on [control=['if'], data=[]]
elif not self.chat.username:
raise TypeError('This chat does not have @username') # depends on [control=['if'], data=[]]
return f'https://t.me/{self.chat.username}/{self.message_id}' |
def check_sensors():
"""
collect and check all available sensors
"""
all_sensors = walk_data(sess, oid_description, helper)[0]
all_status = walk_data(sess, oid_status, helper)[0]
# here we zip all index and descriptions to have a list like
# [('Fan Sensor', '2'), ('Power Supply Sensor', '4')]
# we are doomed if the lists do not have the same length ... but that should never happen ... hopefully
zipped = zip(all_sensors, all_status)
for sensor in zipped:
description = sensor[0]
status = sensor[1]
# translate the value to human readable
try:
status_string = senor_status_table[status]
except KeyError:
# if we receive an invalid value, we don't want to crash...
helper.exit(summary="received an undefined value from device: " + status, exit_code=unknown, perfdata='')
# for each sensor the summary is added like: Fan Sensor: good
helper.add_summary("%s: %s" % (description, status_string))
# set the status
if status == "2":
helper.status(critical)
if status == "3":
helper.status(warning) | def function[check_sensors, parameter[]]:
constant[
collect and check all available sensors
]
variable[all_sensors] assign[=] call[call[name[walk_data], parameter[name[sess], name[oid_description], name[helper]]]][constant[0]]
variable[all_status] assign[=] call[call[name[walk_data], parameter[name[sess], name[oid_status], name[helper]]]][constant[0]]
variable[zipped] assign[=] call[name[zip], parameter[name[all_sensors], name[all_status]]]
for taget[name[sensor]] in starred[name[zipped]] begin[:]
variable[description] assign[=] call[name[sensor]][constant[0]]
variable[status] assign[=] call[name[sensor]][constant[1]]
<ast.Try object at 0x7da1b1c631c0>
call[name[helper].add_summary, parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1c633d0>, <ast.Name object at 0x7da1b1c62980>]]]]]
if compare[name[status] equal[==] constant[2]] begin[:]
call[name[helper].status, parameter[name[critical]]]
if compare[name[status] equal[==] constant[3]] begin[:]
call[name[helper].status, parameter[name[warning]]] | keyword[def] identifier[check_sensors] ():
literal[string]
identifier[all_sensors] = identifier[walk_data] ( identifier[sess] , identifier[oid_description] , identifier[helper] )[ literal[int] ]
identifier[all_status] = identifier[walk_data] ( identifier[sess] , identifier[oid_status] , identifier[helper] )[ literal[int] ]
identifier[zipped] = identifier[zip] ( identifier[all_sensors] , identifier[all_status] )
keyword[for] identifier[sensor] keyword[in] identifier[zipped] :
identifier[description] = identifier[sensor] [ literal[int] ]
identifier[status] = identifier[sensor] [ literal[int] ]
keyword[try] :
identifier[status_string] = identifier[senor_status_table] [ identifier[status] ]
keyword[except] identifier[KeyError] :
identifier[helper] . identifier[exit] ( identifier[summary] = literal[string] + identifier[status] , identifier[exit_code] = identifier[unknown] , identifier[perfdata] = literal[string] )
identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[description] , identifier[status_string] ))
keyword[if] identifier[status] == literal[string] :
identifier[helper] . identifier[status] ( identifier[critical] )
keyword[if] identifier[status] == literal[string] :
identifier[helper] . identifier[status] ( identifier[warning] ) | def check_sensors():
"""
collect and check all available sensors
"""
all_sensors = walk_data(sess, oid_description, helper)[0]
all_status = walk_data(sess, oid_status, helper)[0]
# here we zip all index and descriptions to have a list like
# [('Fan Sensor', '2'), ('Power Supply Sensor', '4')]
# we are doomed if the lists do not have the same length ... but that should never happen ... hopefully
zipped = zip(all_sensors, all_status)
for sensor in zipped:
description = sensor[0]
status = sensor[1]
# translate the value to human readable
try:
status_string = senor_status_table[status] # depends on [control=['try'], data=[]]
except KeyError: # if we receive an invalid value, we don't want to crash...
helper.exit(summary='received an undefined value from device: ' + status, exit_code=unknown, perfdata='') # depends on [control=['except'], data=[]]
# for each sensor the summary is added like: Fan Sensor: good
helper.add_summary('%s: %s' % (description, status_string))
# set the status
if status == '2':
helper.status(critical) # depends on [control=['if'], data=[]]
if status == '3':
helper.status(warning) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sensor']] |
def plot(image, overlay=None, blend=False,
alpha=1, cmap='Greys_r', overlay_cmap='jet', overlay_alpha=0.9,
cbar=False, cbar_length=0.8, cbar_dx=0., cbar_vertical=True,
axis=0, nslices=12, slices=None, ncol=None, slice_buffer=None, black_bg=True,
bg_thresh_quant=0.01, bg_val_quant=0.99, domain_image_map=None, crop=False, scale=False,
reverse=False, title=None, title_fontsize=20, title_dx=0., title_dy=0.,
filename=None, dpi=500, figsize=1.5, reorient=True):
"""
Plot an ANTsImage.
By default, images will be reoriented to 'LAI' orientation before plotting.
So, if axis == 0, the images will be ordered from the
left side of the brain to the right side of the brain. If axis == 1,
the images will be ordered from the anterior (front) of the brain to
the posterior (back) of the brain. And if axis == 2, the images will
be ordered from the inferior (bottom) of the brain to the superior (top)
of the brain.
ANTsR function: `plot.antsImage`
Arguments
---------
image : ANTsImage
image to plot
overlay : ANTsImage
image to overlay on base image
cmap : string
colormap to use for base image. See matplotlib.
overlay_cmap : string
colormap to use for overlay images, if applicable. See matplotlib.
overlay_alpha : float
level of transparency for any overlays. Smaller value means
the overlay is more transparent. See matplotlib.
axis : integer
which axis to plot along if image is 3D
nslices : integer
number of slices to plot if image is 3D
slices : list or tuple of integers
specific slice indices to plot if image is 3D.
If given, this will override `nslices`.
This can be absolute array indices (e.g. (80,100,120)), or
this can be relative array indices (e.g. (0.4,0.5,0.6))
ncol : integer
Number of columns to have on the plot if image is 3D.
slice_buffer : integer
how many slices to buffer when finding the non-zero slices of
a 3D images. So, if slice_buffer = 10, then the first slice
in a 3D image will be the first non-zero slice index plus 10 more
slices.
black_bg : boolean
if True, the background of the image(s) will be black.
if False, the background of the image(s) will be determined by the
values `bg_thresh_quant` and `bg_val_quant`.
bg_thresh_quant : float
if white_bg=True, the background will be determined by thresholding
the image at the `bg_thresh` quantile value and setting the background
intensity to the `bg_val` quantile value.
This value should be in [0, 1] - somewhere around 0.01 is recommended.
- equal to 1 will threshold the entire image
- equal to 0 will threshold none of the image
bg_val_quant : float
if white_bg=True, the background will be determined by thresholding
the image at the `bg_thresh` quantile value and setting the background
intensity to the `bg_val` quantile value.
This value should be in [0, 1]
- equal to 1 is pure white
- equal to 0 is pure black
- somewhere in between is gray
domain_image_map : ANTsImage
this input ANTsImage or list of ANTsImage types contains a reference image
`domain_image` and optional reference mapping named `domainMap`.
If supplied, the image(s) to be plotted will be mapped to the domain
image space before plotting - useful for non-standard image orientations.
crop : boolean
if true, the image(s) will be cropped to their bounding boxes, resulting
in a potentially smaller image size.
if false, the image(s) will not be cropped
scale : boolean or 2-tuple
if true, nothing will happen to intensities of image(s) and overlay(s)
if false, dynamic range will be maximized when visualizing overlays
if 2-tuple, the image will be dynamically scaled between these quantiles
reverse : boolean
if true, the order in which the slices are plotted will be reversed.
This is useful if you want to plot from the front of the brain first
to the back of the brain, or vice-versa
title : string
add a title to the plot
filename : string
if given, the resulting image will be saved to this file
dpi : integer
determines resolution of image if saved to file. Higher values
result in higher resolution images, but at a cost of having a
larger file size
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.image_read(ants.get_data('r16'))
>>> segs = img.kmeans_segmentation(k=3)['segmentation']
>>> ants.plot(img, segs*(segs==1), crop=True)
>>> ants.plot(img, segs*(segs==1), crop=False)
>>> mni = ants.image_read(ants.get_data('mni'))
>>> segs = mni.kmeans_segmentation(k=3)['segmentation']
>>> ants.plot(mni, segs*(segs==1), crop=False)
"""
if (axis == 'x') or (axis == 'saggittal'):
axis = 0
if (axis == 'y') or (axis == 'coronal'):
axis = 1
if (axis == 'z') or (axis == 'axial'):
axis = 2
def mirror_matrix(x):
return x[::-1,:]
def rotate270_matrix(x):
return mirror_matrix(x.T)
def rotate180_matrix(x):
return x[::-1,:]
def rotate90_matrix(x):
return x.T
def flip_matrix(x):
return mirror_matrix(rotate180_matrix(x))
def reorient_slice(x, axis):
if (axis != 2):
x = rotate90_matrix(x)
if (axis == 2):
x = rotate270_matrix(x)
x = mirror_matrix(x)
return x
# need this hack because of a weird NaN warning from matplotlib with overlays
warnings.simplefilter('ignore')
# handle `image` argument
if isinstance(image, str):
image = iio2.image_read(image)
if not isinstance(image, iio.ANTsImage):
raise ValueError('image argument must be an ANTsImage')
if (image.pixeltype not in {'float', 'double'}) or (image.is_rgb):
scale = False # turn off scaling if image is discrete
# handle `overlay` argument
if overlay is not None:
if isinstance(overlay, str):
overlay = iio2.image_read(overlay)
if not isinstance(overlay, iio.ANTsImage):
raise ValueError('overlay argument must be an ANTsImage')
if not iio.image_physical_space_consistency(image, overlay):
overlay = reg.resample_image_to_target(overlay, image, interp_type='linear')
if blend:
if alpha == 1:
alpha = 0.5
image = image*alpha + overlay*(1-alpha)
overlay = None
alpha = 1.
# handle `domain_image_map` argument
if domain_image_map is not None:
if isinstance(domain_image_map, iio.ANTsImage):
tx = tio2.new_ants_transform(precision='float', transform_type='AffineTransform',
dimension=image.dimension)
image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)
if overlay is not None:
overlay = tio.apply_ants_transform_to_image(tx, overlay,
domain_image_map,
interpolation='linear')
elif isinstance(domain_image_map, (list, tuple)):
# expect an image and transformation
if len(domain_image_map) != 2:
raise ValueError('domain_image_map list or tuple must have length == 2')
dimg = domain_image_map[0]
if not isinstance(dimg, iio.ANTsImage):
raise ValueError('domain_image_map first entry should be ANTsImage')
tx = domain_image_map[1]
image = reg.apply_transforms(dimg, image, transform_list=tx)
if overlay is not None:
overlay = reg.apply_transforms(dimg, overlay, transform_list=tx,
interpolator='linear')
## single-channel images ##
if image.components == 1:
# potentially crop image
if crop:
plotmask = image.get_mask(cleanup=0)
if plotmask.max() == 0:
plotmask += 1
image = image.crop_image(plotmask)
if overlay is not None:
overlay = overlay.crop_image(plotmask)
# potentially find dynamic range
if scale == True:
vmin, vmax = image.quantile((0.05,0.95))
elif isinstance(scale, (list,tuple)):
if len(scale) != 2:
raise ValueError('scale argument must be boolean or list/tuple with two values')
vmin, vmax = image.quantile(scale)
else:
vmin = None
vmax = None
# Plot 2D image
if image.dimension == 2:
img_arr = image.numpy()
img_arr = rotate90_matrix(img_arr)
if not black_bg:
img_arr[img_arr<image.quantile(bg_thresh_quant)] = image.quantile(bg_val_quant)
if overlay is not None:
ov_arr = overlay.numpy()
ov_arr = rotate90_matrix(ov_arr)
ov_arr[np.abs(ov_arr) == 0] = np.nan
fig = plt.figure()
if title is not None:
fig.suptitle(title, fontsize=title_fontsize, x=0.5+title_dx, y=0.95+title_dy)
ax = plt.subplot(111)
# plot main image
im = ax.imshow(img_arr, cmap=cmap,
alpha=alpha,
vmin=vmin, vmax=vmax)
if overlay is not None:
im = ax.imshow(ov_arr,
alpha=overlay_alpha,
cmap=overlay_cmap)
if cbar:
cbar_orient = 'vertical' if cbar_vertical else 'horizontal'
fig.colorbar(im, orientation=cbar_orient)
plt.axis('off')
# Plot 3D image
elif image.dimension == 3:
# resample image if spacing is very unbalanced
spacing = [s for i,s in enumerate(image.spacing) if i != axis]
was_resampled = False
if (max(spacing) / min(spacing)) > 3.:
was_resampled = True
new_spacing = (1,1,1)
image = image.resample_image(tuple(new_spacing))
if overlay is not None:
overlay = overlay.resample_image(tuple(new_spacing))
if reorient:
image = image.reorient_image2('LAI')
img_arr = image.numpy()
# reorder dims so that chosen axis is first
img_arr = np.rollaxis(img_arr, axis)
if overlay is not None:
if reorient:
overlay = overlay.reorient_image2('LAI')
ov_arr = overlay.numpy()
ov_arr[np.abs(ov_arr) == 0] = np.nan
ov_arr = np.rollaxis(ov_arr, axis)
if slices is None:
if not isinstance(slice_buffer, (list, tuple)):
if slice_buffer is None:
slice_buffer = (int(img_arr.shape[1]*0.1), int(img_arr.shape[2]*0.1))
else:
slice_buffer = (slice_buffer, slice_buffer)
nonzero = np.where(img_arr.sum(axis=(1,2)) > 0.01)[0]
min_idx = nonzero[0] + slice_buffer[0]
max_idx = nonzero[-1] - slice_buffer[1]
slice_idxs = np.linspace(min_idx, max_idx, nslices).astype('int')
if reverse:
slice_idxs = np.array(list(reversed(slice_idxs)))
else:
if isinstance(slices, (int,float)):
slices = [slices]
# if all slices are less than 1, infer that they are relative slices
if sum([s > 1 for s in slices]) == 0:
slices = [int(s*img_arr.shape[0]) for s in slices]
slice_idxs = slices
nslices = len(slices)
if was_resampled:
# re-calculate slices to account for new image shape
slice_idxs = np.unique(np.array([int(s*(image.shape[axis]/img_arr.shape[0])) for s in slice_idxs]))
# only have one row if nslices <= 6 and user didnt specify ncol
if ncol is None:
if (nslices <= 6):
ncol = nslices
else:
ncol = int(round(math.sqrt(nslices)))
# calculate grid size
nrow = math.ceil(nslices / ncol)
xdim = img_arr.shape[2]
ydim = img_arr.shape[1]
dim_ratio = ydim/xdim
fig = plt.figure(figsize=((ncol+1)*figsize*dim_ratio, (nrow+1)*figsize))
if title is not None:
fig.suptitle(title, fontsize=title_fontsize, x=0.5+title_dx, y=0.95+title_dy)
gs = gridspec.GridSpec(nrow, ncol,
wspace=0.0, hspace=0.0,
top=1.-0.5/(nrow+1), bottom=0.5/(nrow+1),
left=0.5/(ncol+1), right=1-0.5/(ncol+1))
slice_idx_idx = 0
for i in range(nrow):
for j in range(ncol):
if slice_idx_idx < len(slice_idxs):
imslice = img_arr[slice_idxs[slice_idx_idx]]
imslice = reorient_slice(imslice, axis)
if not black_bg:
imslice[imslice<image.quantile(bg_thresh_quant)] = image.quantile(bg_val_quant)
else:
imslice = np.zeros_like(img_arr[0])
imslice = reorient_slice(imslice, axis)
ax = plt.subplot(gs[i,j])
im = ax.imshow(imslice, cmap=cmap,
vmin=vmin, vmax=vmax)
if overlay is not None:
if slice_idx_idx < len(slice_idxs):
ovslice = ov_arr[slice_idxs[slice_idx_idx]]
ovslice = reorient_slice(ovslice, axis)
im = ax.imshow(ovslice, alpha=overlay_alpha, cmap=overlay_cmap)
ax.axis('off')
slice_idx_idx += 1
if cbar:
cbar_start = (1-cbar_length) / 2
if cbar_vertical:
cax = fig.add_axes([0.9+cbar_dx, cbar_start, 0.03, cbar_length])
cbar_orient = 'vertical'
else:
cax = fig.add_axes([cbar_start, 0.08+cbar_dx, cbar_length, 0.03])
cbar_orient = 'horizontal'
fig.colorbar(im, cax=cax, orientation=cbar_orient)
## multi-channel images ##
elif image.components > 1:
if not image.is_rgb:
raise ValueError('Multi-component images only supported if they are RGB')
img_arr = image.numpy()
img_arr = np.stack([rotate90_matrix(img_arr[:,:,i]) for i in range(3)], axis=-1)
fig = plt.figure()
ax = plt.subplot(111)
# plot main image
ax.imshow(img_arr, alpha=alpha)
plt.axis('off')
if filename is not None:
filename = os.path.expanduser(filename)
plt.savefig(filename, dpi=dpi, transparent=True, bbox_inches='tight')
plt.close(fig)
else:
plt.show()
# turn warnings back to default
warnings.simplefilter('default') | def function[plot, parameter[image, overlay, blend, alpha, cmap, overlay_cmap, overlay_alpha, cbar, cbar_length, cbar_dx, cbar_vertical, axis, nslices, slices, ncol, slice_buffer, black_bg, bg_thresh_quant, bg_val_quant, domain_image_map, crop, scale, reverse, title, title_fontsize, title_dx, title_dy, filename, dpi, figsize, reorient]]:
constant[
Plot an ANTsImage.
By default, images will be reoriented to 'LAI' orientation before plotting.
So, if axis == 0, the images will be ordered from the
left side of the brain to the right side of the brain. If axis == 1,
the images will be ordered from the anterior (front) of the brain to
the posterior (back) of the brain. And if axis == 2, the images will
be ordered from the inferior (bottom) of the brain to the superior (top)
of the brain.
ANTsR function: `plot.antsImage`
Arguments
---------
image : ANTsImage
image to plot
overlay : ANTsImage
image to overlay on base image
cmap : string
colormap to use for base image. See matplotlib.
overlay_cmap : string
colormap to use for overlay images, if applicable. See matplotlib.
overlay_alpha : float
level of transparency for any overlays. Smaller value means
the overlay is more transparent. See matplotlib.
axis : integer
which axis to plot along if image is 3D
nslices : integer
number of slices to plot if image is 3D
slices : list or tuple of integers
specific slice indices to plot if image is 3D.
If given, this will override `nslices`.
This can be absolute array indices (e.g. (80,100,120)), or
this can be relative array indices (e.g. (0.4,0.5,0.6))
ncol : integer
Number of columns to have on the plot if image is 3D.
slice_buffer : integer
how many slices to buffer when finding the non-zero slices of
a 3D images. So, if slice_buffer = 10, then the first slice
in a 3D image will be the first non-zero slice index plus 10 more
slices.
black_bg : boolean
if True, the background of the image(s) will be black.
if False, the background of the image(s) will be determined by the
values `bg_thresh_quant` and `bg_val_quant`.
bg_thresh_quant : float
if white_bg=True, the background will be determined by thresholding
the image at the `bg_thresh` quantile value and setting the background
intensity to the `bg_val` quantile value.
This value should be in [0, 1] - somewhere around 0.01 is recommended.
- equal to 1 will threshold the entire image
- equal to 0 will threshold none of the image
bg_val_quant : float
if white_bg=True, the background will be determined by thresholding
the image at the `bg_thresh` quantile value and setting the background
intensity to the `bg_val` quantile value.
This value should be in [0, 1]
- equal to 1 is pure white
- equal to 0 is pure black
- somewhere in between is gray
domain_image_map : ANTsImage
this input ANTsImage or list of ANTsImage types contains a reference image
`domain_image` and optional reference mapping named `domainMap`.
If supplied, the image(s) to be plotted will be mapped to the domain
image space before plotting - useful for non-standard image orientations.
crop : boolean
if true, the image(s) will be cropped to their bounding boxes, resulting
in a potentially smaller image size.
if false, the image(s) will not be cropped
scale : boolean or 2-tuple
if true, nothing will happen to intensities of image(s) and overlay(s)
if false, dynamic range will be maximized when visualizing overlays
if 2-tuple, the image will be dynamically scaled between these quantiles
reverse : boolean
if true, the order in which the slices are plotted will be reversed.
This is useful if you want to plot from the front of the brain first
to the back of the brain, or vice-versa
title : string
add a title to the plot
filename : string
if given, the resulting image will be saved to this file
dpi : integer
determines resolution of image if saved to file. Higher values
result in higher resolution images, but at a cost of having a
larger file size
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.image_read(ants.get_data('r16'))
>>> segs = img.kmeans_segmentation(k=3)['segmentation']
>>> ants.plot(img, segs*(segs==1), crop=True)
>>> ants.plot(img, segs*(segs==1), crop=False)
>>> mni = ants.image_read(ants.get_data('mni'))
>>> segs = mni.kmeans_segmentation(k=3)['segmentation']
>>> ants.plot(mni, segs*(segs==1), crop=False)
]
if <ast.BoolOp object at 0x7da1b140aa40> begin[:]
variable[axis] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b140a830> begin[:]
variable[axis] assign[=] constant[1]
if <ast.BoolOp object at 0x7da1b140a620> begin[:]
variable[axis] assign[=] constant[2]
def function[mirror_matrix, parameter[x]]:
return[call[name[x]][tuple[[<ast.Slice object at 0x7da1b140a2f0>, <ast.Slice object at 0x7da1b140a260>]]]]
def function[rotate270_matrix, parameter[x]]:
return[call[name[mirror_matrix], parameter[name[x].T]]]
def function[rotate180_matrix, parameter[x]]:
return[call[name[x]][tuple[[<ast.Slice object at 0x7da1b1409f00>, <ast.Slice object at 0x7da1b1409e70>]]]]
def function[rotate90_matrix, parameter[x]]:
return[name[x].T]
def function[flip_matrix, parameter[x]]:
return[call[name[mirror_matrix], parameter[call[name[rotate180_matrix], parameter[name[x]]]]]]
def function[reorient_slice, parameter[x, axis]]:
if compare[name[axis] not_equal[!=] constant[2]] begin[:]
variable[x] assign[=] call[name[rotate90_matrix], parameter[name[x]]]
if compare[name[axis] equal[==] constant[2]] begin[:]
variable[x] assign[=] call[name[rotate270_matrix], parameter[name[x]]]
variable[x] assign[=] call[name[mirror_matrix], parameter[name[x]]]
return[name[x]]
call[name[warnings].simplefilter, parameter[constant[ignore]]]
if call[name[isinstance], parameter[name[image], name[str]]] begin[:]
variable[image] assign[=] call[name[iio2].image_read, parameter[name[image]]]
if <ast.UnaryOp object at 0x7da1b14091e0> begin[:]
<ast.Raise object at 0x7da1b14090c0>
if <ast.BoolOp object at 0x7da1b1408fd0> begin[:]
variable[scale] assign[=] constant[False]
if compare[name[overlay] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[overlay], name[str]]] begin[:]
variable[overlay] assign[=] call[name[iio2].image_read, parameter[name[overlay]]]
if <ast.UnaryOp object at 0x7da1b1408a60> begin[:]
<ast.Raise object at 0x7da1b1408940>
if <ast.UnaryOp object at 0x7da1b1408850> begin[:]
variable[overlay] assign[=] call[name[reg].resample_image_to_target, parameter[name[overlay], name[image]]]
if name[blend] begin[:]
if compare[name[alpha] equal[==] constant[1]] begin[:]
variable[alpha] assign[=] constant[0.5]
variable[image] assign[=] binary_operation[binary_operation[name[image] * name[alpha]] + binary_operation[name[overlay] * binary_operation[constant[1] - name[alpha]]]]
variable[overlay] assign[=] constant[None]
variable[alpha] assign[=] constant[1.0]
if compare[name[domain_image_map] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[domain_image_map], name[iio].ANTsImage]] begin[:]
variable[tx] assign[=] call[name[tio2].new_ants_transform, parameter[]]
variable[image] assign[=] call[name[tio].apply_ants_transform_to_image, parameter[name[tx], name[image], name[domain_image_map]]]
if compare[name[overlay] is_not constant[None]] begin[:]
variable[overlay] assign[=] call[name[tio].apply_ants_transform_to_image, parameter[name[tx], name[overlay], name[domain_image_map]]]
if compare[name[image].components equal[==] constant[1]] begin[:]
if name[crop] begin[:]
variable[plotmask] assign[=] call[name[image].get_mask, parameter[]]
if compare[call[name[plotmask].max, parameter[]] equal[==] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b15ede10>
variable[image] assign[=] call[name[image].crop_image, parameter[name[plotmask]]]
if compare[name[overlay] is_not constant[None]] begin[:]
variable[overlay] assign[=] call[name[overlay].crop_image, parameter[name[plotmask]]]
if compare[name[scale] equal[==] constant[True]] begin[:]
<ast.Tuple object at 0x7da1b15ed990> assign[=] call[name[image].quantile, parameter[tuple[[<ast.Constant object at 0x7da1b15ed840>, <ast.Constant object at 0x7da1b15ed810>]]]]
if compare[name[image].dimension equal[==] constant[2]] begin[:]
variable[img_arr] assign[=] call[name[image].numpy, parameter[]]
variable[img_arr] assign[=] call[name[rotate90_matrix], parameter[name[img_arr]]]
if <ast.UnaryOp object at 0x7da1b15ecf10> begin[:]
call[name[img_arr]][compare[name[img_arr] less[<] call[name[image].quantile, parameter[name[bg_thresh_quant]]]]] assign[=] call[name[image].quantile, parameter[name[bg_val_quant]]]
if compare[name[overlay] is_not constant[None]] begin[:]
variable[ov_arr] assign[=] call[name[overlay].numpy, parameter[]]
variable[ov_arr] assign[=] call[name[rotate90_matrix], parameter[name[ov_arr]]]
call[name[ov_arr]][compare[call[name[np].abs, parameter[name[ov_arr]]] equal[==] constant[0]]] assign[=] name[np].nan
variable[fig] assign[=] call[name[plt].figure, parameter[]]
if compare[name[title] is_not constant[None]] begin[:]
call[name[fig].suptitle, parameter[name[title]]]
variable[ax] assign[=] call[name[plt].subplot, parameter[constant[111]]]
variable[im] assign[=] call[name[ax].imshow, parameter[name[img_arr]]]
if compare[name[overlay] is_not constant[None]] begin[:]
variable[im] assign[=] call[name[ax].imshow, parameter[name[ov_arr]]]
if name[cbar] begin[:]
variable[cbar_orient] assign[=] <ast.IfExp object at 0x7da1b15ebb80>
call[name[fig].colorbar, parameter[name[im]]]
call[name[plt].axis, parameter[constant[off]]]
if compare[name[filename] is_not constant[None]] begin[:]
variable[filename] assign[=] call[name[os].path.expanduser, parameter[name[filename]]]
call[name[plt].savefig, parameter[name[filename]]]
call[name[plt].close, parameter[name[fig]]]
call[name[warnings].simplefilter, parameter[constant[default]]] | keyword[def] identifier[plot] ( identifier[image] , identifier[overlay] = keyword[None] , identifier[blend] = keyword[False] ,
identifier[alpha] = literal[int] , identifier[cmap] = literal[string] , identifier[overlay_cmap] = literal[string] , identifier[overlay_alpha] = literal[int] ,
identifier[cbar] = keyword[False] , identifier[cbar_length] = literal[int] , identifier[cbar_dx] = literal[int] , identifier[cbar_vertical] = keyword[True] ,
identifier[axis] = literal[int] , identifier[nslices] = literal[int] , identifier[slices] = keyword[None] , identifier[ncol] = keyword[None] , identifier[slice_buffer] = keyword[None] , identifier[black_bg] = keyword[True] ,
identifier[bg_thresh_quant] = literal[int] , identifier[bg_val_quant] = literal[int] , identifier[domain_image_map] = keyword[None] , identifier[crop] = keyword[False] , identifier[scale] = keyword[False] ,
identifier[reverse] = keyword[False] , identifier[title] = keyword[None] , identifier[title_fontsize] = literal[int] , identifier[title_dx] = literal[int] , identifier[title_dy] = literal[int] ,
identifier[filename] = keyword[None] , identifier[dpi] = literal[int] , identifier[figsize] = literal[int] , identifier[reorient] = keyword[True] ):
literal[string]
keyword[if] ( identifier[axis] == literal[string] ) keyword[or] ( identifier[axis] == literal[string] ):
identifier[axis] = literal[int]
keyword[if] ( identifier[axis] == literal[string] ) keyword[or] ( identifier[axis] == literal[string] ):
identifier[axis] = literal[int]
keyword[if] ( identifier[axis] == literal[string] ) keyword[or] ( identifier[axis] == literal[string] ):
identifier[axis] = literal[int]
keyword[def] identifier[mirror_matrix] ( identifier[x] ):
keyword[return] identifier[x] [::- literal[int] ,:]
keyword[def] identifier[rotate270_matrix] ( identifier[x] ):
keyword[return] identifier[mirror_matrix] ( identifier[x] . identifier[T] )
keyword[def] identifier[rotate180_matrix] ( identifier[x] ):
keyword[return] identifier[x] [::- literal[int] ,:]
keyword[def] identifier[rotate90_matrix] ( identifier[x] ):
keyword[return] identifier[x] . identifier[T]
keyword[def] identifier[flip_matrix] ( identifier[x] ):
keyword[return] identifier[mirror_matrix] ( identifier[rotate180_matrix] ( identifier[x] ))
keyword[def] identifier[reorient_slice] ( identifier[x] , identifier[axis] ):
keyword[if] ( identifier[axis] != literal[int] ):
identifier[x] = identifier[rotate90_matrix] ( identifier[x] )
keyword[if] ( identifier[axis] == literal[int] ):
identifier[x] = identifier[rotate270_matrix] ( identifier[x] )
identifier[x] = identifier[mirror_matrix] ( identifier[x] )
keyword[return] identifier[x]
identifier[warnings] . identifier[simplefilter] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[image] , identifier[str] ):
identifier[image] = identifier[iio2] . identifier[image_read] ( identifier[image] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[image] , identifier[iio] . identifier[ANTsImage] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] ( identifier[image] . identifier[pixeltype] keyword[not] keyword[in] { literal[string] , literal[string] }) keyword[or] ( identifier[image] . identifier[is_rgb] ):
identifier[scale] = keyword[False]
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[overlay] , identifier[str] ):
identifier[overlay] = identifier[iio2] . identifier[image_read] ( identifier[overlay] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[overlay] , identifier[iio] . identifier[ANTsImage] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[iio] . identifier[image_physical_space_consistency] ( identifier[image] , identifier[overlay] ):
identifier[overlay] = identifier[reg] . identifier[resample_image_to_target] ( identifier[overlay] , identifier[image] , identifier[interp_type] = literal[string] )
keyword[if] identifier[blend] :
keyword[if] identifier[alpha] == literal[int] :
identifier[alpha] = literal[int]
identifier[image] = identifier[image] * identifier[alpha] + identifier[overlay] *( literal[int] - identifier[alpha] )
identifier[overlay] = keyword[None]
identifier[alpha] = literal[int]
keyword[if] identifier[domain_image_map] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[domain_image_map] , identifier[iio] . identifier[ANTsImage] ):
identifier[tx] = identifier[tio2] . identifier[new_ants_transform] ( identifier[precision] = literal[string] , identifier[transform_type] = literal[string] ,
identifier[dimension] = identifier[image] . identifier[dimension] )
identifier[image] = identifier[tio] . identifier[apply_ants_transform_to_image] ( identifier[tx] , identifier[image] , identifier[domain_image_map] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
identifier[overlay] = identifier[tio] . identifier[apply_ants_transform_to_image] ( identifier[tx] , identifier[overlay] ,
identifier[domain_image_map] ,
identifier[interpolation] = literal[string] )
keyword[elif] identifier[isinstance] ( identifier[domain_image_map] ,( identifier[list] , identifier[tuple] )):
keyword[if] identifier[len] ( identifier[domain_image_map] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[dimg] = identifier[domain_image_map] [ literal[int] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[dimg] , identifier[iio] . identifier[ANTsImage] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[tx] = identifier[domain_image_map] [ literal[int] ]
identifier[image] = identifier[reg] . identifier[apply_transforms] ( identifier[dimg] , identifier[image] , identifier[transform_list] = identifier[tx] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
identifier[overlay] = identifier[reg] . identifier[apply_transforms] ( identifier[dimg] , identifier[overlay] , identifier[transform_list] = identifier[tx] ,
identifier[interpolator] = literal[string] )
keyword[if] identifier[image] . identifier[components] == literal[int] :
keyword[if] identifier[crop] :
identifier[plotmask] = identifier[image] . identifier[get_mask] ( identifier[cleanup] = literal[int] )
keyword[if] identifier[plotmask] . identifier[max] ()== literal[int] :
identifier[plotmask] += literal[int]
identifier[image] = identifier[image] . identifier[crop_image] ( identifier[plotmask] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
identifier[overlay] = identifier[overlay] . identifier[crop_image] ( identifier[plotmask] )
keyword[if] identifier[scale] == keyword[True] :
identifier[vmin] , identifier[vmax] = identifier[image] . identifier[quantile] (( literal[int] , literal[int] ))
keyword[elif] identifier[isinstance] ( identifier[scale] ,( identifier[list] , identifier[tuple] )):
keyword[if] identifier[len] ( identifier[scale] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[vmin] , identifier[vmax] = identifier[image] . identifier[quantile] ( identifier[scale] )
keyword[else] :
identifier[vmin] = keyword[None]
identifier[vmax] = keyword[None]
keyword[if] identifier[image] . identifier[dimension] == literal[int] :
identifier[img_arr] = identifier[image] . identifier[numpy] ()
identifier[img_arr] = identifier[rotate90_matrix] ( identifier[img_arr] )
keyword[if] keyword[not] identifier[black_bg] :
identifier[img_arr] [ identifier[img_arr] < identifier[image] . identifier[quantile] ( identifier[bg_thresh_quant] )]= identifier[image] . identifier[quantile] ( identifier[bg_val_quant] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
identifier[ov_arr] = identifier[overlay] . identifier[numpy] ()
identifier[ov_arr] = identifier[rotate90_matrix] ( identifier[ov_arr] )
identifier[ov_arr] [ identifier[np] . identifier[abs] ( identifier[ov_arr] )== literal[int] ]= identifier[np] . identifier[nan]
identifier[fig] = identifier[plt] . identifier[figure] ()
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[fig] . identifier[suptitle] ( identifier[title] , identifier[fontsize] = identifier[title_fontsize] , identifier[x] = literal[int] + identifier[title_dx] , identifier[y] = literal[int] + identifier[title_dy] )
identifier[ax] = identifier[plt] . identifier[subplot] ( literal[int] )
identifier[im] = identifier[ax] . identifier[imshow] ( identifier[img_arr] , identifier[cmap] = identifier[cmap] ,
identifier[alpha] = identifier[alpha] ,
identifier[vmin] = identifier[vmin] , identifier[vmax] = identifier[vmax] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
identifier[im] = identifier[ax] . identifier[imshow] ( identifier[ov_arr] ,
identifier[alpha] = identifier[overlay_alpha] ,
identifier[cmap] = identifier[overlay_cmap] )
keyword[if] identifier[cbar] :
identifier[cbar_orient] = literal[string] keyword[if] identifier[cbar_vertical] keyword[else] literal[string]
identifier[fig] . identifier[colorbar] ( identifier[im] , identifier[orientation] = identifier[cbar_orient] )
identifier[plt] . identifier[axis] ( literal[string] )
keyword[elif] identifier[image] . identifier[dimension] == literal[int] :
identifier[spacing] =[ identifier[s] keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[image] . identifier[spacing] ) keyword[if] identifier[i] != identifier[axis] ]
identifier[was_resampled] = keyword[False]
keyword[if] ( identifier[max] ( identifier[spacing] )/ identifier[min] ( identifier[spacing] ))> literal[int] :
identifier[was_resampled] = keyword[True]
identifier[new_spacing] =( literal[int] , literal[int] , literal[int] )
identifier[image] = identifier[image] . identifier[resample_image] ( identifier[tuple] ( identifier[new_spacing] ))
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
identifier[overlay] = identifier[overlay] . identifier[resample_image] ( identifier[tuple] ( identifier[new_spacing] ))
keyword[if] identifier[reorient] :
identifier[image] = identifier[image] . identifier[reorient_image2] ( literal[string] )
identifier[img_arr] = identifier[image] . identifier[numpy] ()
identifier[img_arr] = identifier[np] . identifier[rollaxis] ( identifier[img_arr] , identifier[axis] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[reorient] :
identifier[overlay] = identifier[overlay] . identifier[reorient_image2] ( literal[string] )
identifier[ov_arr] = identifier[overlay] . identifier[numpy] ()
identifier[ov_arr] [ identifier[np] . identifier[abs] ( identifier[ov_arr] )== literal[int] ]= identifier[np] . identifier[nan]
identifier[ov_arr] = identifier[np] . identifier[rollaxis] ( identifier[ov_arr] , identifier[axis] )
keyword[if] identifier[slices] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[slice_buffer] ,( identifier[list] , identifier[tuple] )):
keyword[if] identifier[slice_buffer] keyword[is] keyword[None] :
identifier[slice_buffer] =( identifier[int] ( identifier[img_arr] . identifier[shape] [ literal[int] ]* literal[int] ), identifier[int] ( identifier[img_arr] . identifier[shape] [ literal[int] ]* literal[int] ))
keyword[else] :
identifier[slice_buffer] =( identifier[slice_buffer] , identifier[slice_buffer] )
identifier[nonzero] = identifier[np] . identifier[where] ( identifier[img_arr] . identifier[sum] ( identifier[axis] =( literal[int] , literal[int] ))> literal[int] )[ literal[int] ]
identifier[min_idx] = identifier[nonzero] [ literal[int] ]+ identifier[slice_buffer] [ literal[int] ]
identifier[max_idx] = identifier[nonzero] [- literal[int] ]- identifier[slice_buffer] [ literal[int] ]
identifier[slice_idxs] = identifier[np] . identifier[linspace] ( identifier[min_idx] , identifier[max_idx] , identifier[nslices] ). identifier[astype] ( literal[string] )
keyword[if] identifier[reverse] :
identifier[slice_idxs] = identifier[np] . identifier[array] ( identifier[list] ( identifier[reversed] ( identifier[slice_idxs] )))
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[slices] ,( identifier[int] , identifier[float] )):
identifier[slices] =[ identifier[slices] ]
keyword[if] identifier[sum] ([ identifier[s] > literal[int] keyword[for] identifier[s] keyword[in] identifier[slices] ])== literal[int] :
identifier[slices] =[ identifier[int] ( identifier[s] * identifier[img_arr] . identifier[shape] [ literal[int] ]) keyword[for] identifier[s] keyword[in] identifier[slices] ]
identifier[slice_idxs] = identifier[slices]
identifier[nslices] = identifier[len] ( identifier[slices] )
keyword[if] identifier[was_resampled] :
identifier[slice_idxs] = identifier[np] . identifier[unique] ( identifier[np] . identifier[array] ([ identifier[int] ( identifier[s] *( identifier[image] . identifier[shape] [ identifier[axis] ]/ identifier[img_arr] . identifier[shape] [ literal[int] ])) keyword[for] identifier[s] keyword[in] identifier[slice_idxs] ]))
keyword[if] identifier[ncol] keyword[is] keyword[None] :
keyword[if] ( identifier[nslices] <= literal[int] ):
identifier[ncol] = identifier[nslices]
keyword[else] :
identifier[ncol] = identifier[int] ( identifier[round] ( identifier[math] . identifier[sqrt] ( identifier[nslices] )))
identifier[nrow] = identifier[math] . identifier[ceil] ( identifier[nslices] / identifier[ncol] )
identifier[xdim] = identifier[img_arr] . identifier[shape] [ literal[int] ]
identifier[ydim] = identifier[img_arr] . identifier[shape] [ literal[int] ]
identifier[dim_ratio] = identifier[ydim] / identifier[xdim]
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =(( identifier[ncol] + literal[int] )* identifier[figsize] * identifier[dim_ratio] ,( identifier[nrow] + literal[int] )* identifier[figsize] ))
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[fig] . identifier[suptitle] ( identifier[title] , identifier[fontsize] = identifier[title_fontsize] , identifier[x] = literal[int] + identifier[title_dx] , identifier[y] = literal[int] + identifier[title_dy] )
identifier[gs] = identifier[gridspec] . identifier[GridSpec] ( identifier[nrow] , identifier[ncol] ,
identifier[wspace] = literal[int] , identifier[hspace] = literal[int] ,
identifier[top] = literal[int] - literal[int] /( identifier[nrow] + literal[int] ), identifier[bottom] = literal[int] /( identifier[nrow] + literal[int] ),
identifier[left] = literal[int] /( identifier[ncol] + literal[int] ), identifier[right] = literal[int] - literal[int] /( identifier[ncol] + literal[int] ))
identifier[slice_idx_idx] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nrow] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[ncol] ):
keyword[if] identifier[slice_idx_idx] < identifier[len] ( identifier[slice_idxs] ):
identifier[imslice] = identifier[img_arr] [ identifier[slice_idxs] [ identifier[slice_idx_idx] ]]
identifier[imslice] = identifier[reorient_slice] ( identifier[imslice] , identifier[axis] )
keyword[if] keyword[not] identifier[black_bg] :
identifier[imslice] [ identifier[imslice] < identifier[image] . identifier[quantile] ( identifier[bg_thresh_quant] )]= identifier[image] . identifier[quantile] ( identifier[bg_val_quant] )
keyword[else] :
identifier[imslice] = identifier[np] . identifier[zeros_like] ( identifier[img_arr] [ literal[int] ])
identifier[imslice] = identifier[reorient_slice] ( identifier[imslice] , identifier[axis] )
identifier[ax] = identifier[plt] . identifier[subplot] ( identifier[gs] [ identifier[i] , identifier[j] ])
identifier[im] = identifier[ax] . identifier[imshow] ( identifier[imslice] , identifier[cmap] = identifier[cmap] ,
identifier[vmin] = identifier[vmin] , identifier[vmax] = identifier[vmax] )
keyword[if] identifier[overlay] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[slice_idx_idx] < identifier[len] ( identifier[slice_idxs] ):
identifier[ovslice] = identifier[ov_arr] [ identifier[slice_idxs] [ identifier[slice_idx_idx] ]]
identifier[ovslice] = identifier[reorient_slice] ( identifier[ovslice] , identifier[axis] )
identifier[im] = identifier[ax] . identifier[imshow] ( identifier[ovslice] , identifier[alpha] = identifier[overlay_alpha] , identifier[cmap] = identifier[overlay_cmap] )
identifier[ax] . identifier[axis] ( literal[string] )
identifier[slice_idx_idx] += literal[int]
keyword[if] identifier[cbar] :
identifier[cbar_start] =( literal[int] - identifier[cbar_length] )/ literal[int]
keyword[if] identifier[cbar_vertical] :
identifier[cax] = identifier[fig] . identifier[add_axes] ([ literal[int] + identifier[cbar_dx] , identifier[cbar_start] , literal[int] , identifier[cbar_length] ])
identifier[cbar_orient] = literal[string]
keyword[else] :
identifier[cax] = identifier[fig] . identifier[add_axes] ([ identifier[cbar_start] , literal[int] + identifier[cbar_dx] , identifier[cbar_length] , literal[int] ])
identifier[cbar_orient] = literal[string]
identifier[fig] . identifier[colorbar] ( identifier[im] , identifier[cax] = identifier[cax] , identifier[orientation] = identifier[cbar_orient] )
keyword[elif] identifier[image] . identifier[components] > literal[int] :
keyword[if] keyword[not] identifier[image] . identifier[is_rgb] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[img_arr] = identifier[image] . identifier[numpy] ()
identifier[img_arr] = identifier[np] . identifier[stack] ([ identifier[rotate90_matrix] ( identifier[img_arr] [:,:, identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )], identifier[axis] =- literal[int] )
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[plt] . identifier[subplot] ( literal[int] )
identifier[ax] . identifier[imshow] ( identifier[img_arr] , identifier[alpha] = identifier[alpha] )
identifier[plt] . identifier[axis] ( literal[string] )
keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] :
identifier[filename] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[filename] )
identifier[plt] . identifier[savefig] ( identifier[filename] , identifier[dpi] = identifier[dpi] , identifier[transparent] = keyword[True] , identifier[bbox_inches] = literal[string] )
identifier[plt] . identifier[close] ( identifier[fig] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[warnings] . identifier[simplefilter] ( literal[string] ) | def plot(image, overlay=None, blend=False, alpha=1, cmap='Greys_r', overlay_cmap='jet', overlay_alpha=0.9, cbar=False, cbar_length=0.8, cbar_dx=0.0, cbar_vertical=True, axis=0, nslices=12, slices=None, ncol=None, slice_buffer=None, black_bg=True, bg_thresh_quant=0.01, bg_val_quant=0.99, domain_image_map=None, crop=False, scale=False, reverse=False, title=None, title_fontsize=20, title_dx=0.0, title_dy=0.0, filename=None, dpi=500, figsize=1.5, reorient=True):
"""
Plot an ANTsImage.
By default, images will be reoriented to 'LAI' orientation before plotting.
So, if axis == 0, the images will be ordered from the
left side of the brain to the right side of the brain. If axis == 1,
the images will be ordered from the anterior (front) of the brain to
the posterior (back) of the brain. And if axis == 2, the images will
be ordered from the inferior (bottom) of the brain to the superior (top)
of the brain.
ANTsR function: `plot.antsImage`
Arguments
---------
image : ANTsImage
image to plot
overlay : ANTsImage
image to overlay on base image
cmap : string
colormap to use for base image. See matplotlib.
overlay_cmap : string
colormap to use for overlay images, if applicable. See matplotlib.
overlay_alpha : float
level of transparency for any overlays. Smaller value means
the overlay is more transparent. See matplotlib.
axis : integer
which axis to plot along if image is 3D
nslices : integer
number of slices to plot if image is 3D
slices : list or tuple of integers
specific slice indices to plot if image is 3D.
If given, this will override `nslices`.
This can be absolute array indices (e.g. (80,100,120)), or
this can be relative array indices (e.g. (0.4,0.5,0.6))
ncol : integer
Number of columns to have on the plot if image is 3D.
slice_buffer : integer
how many slices to buffer when finding the non-zero slices of
a 3D images. So, if slice_buffer = 10, then the first slice
in a 3D image will be the first non-zero slice index plus 10 more
slices.
black_bg : boolean
if True, the background of the image(s) will be black.
if False, the background of the image(s) will be determined by the
values `bg_thresh_quant` and `bg_val_quant`.
bg_thresh_quant : float
if white_bg=True, the background will be determined by thresholding
the image at the `bg_thresh` quantile value and setting the background
intensity to the `bg_val` quantile value.
This value should be in [0, 1] - somewhere around 0.01 is recommended.
- equal to 1 will threshold the entire image
- equal to 0 will threshold none of the image
bg_val_quant : float
if white_bg=True, the background will be determined by thresholding
the image at the `bg_thresh` quantile value and setting the background
intensity to the `bg_val` quantile value.
This value should be in [0, 1]
- equal to 1 is pure white
- equal to 0 is pure black
- somewhere in between is gray
domain_image_map : ANTsImage
this input ANTsImage or list of ANTsImage types contains a reference image
`domain_image` and optional reference mapping named `domainMap`.
If supplied, the image(s) to be plotted will be mapped to the domain
image space before plotting - useful for non-standard image orientations.
crop : boolean
if true, the image(s) will be cropped to their bounding boxes, resulting
in a potentially smaller image size.
if false, the image(s) will not be cropped
scale : boolean or 2-tuple
if true, nothing will happen to intensities of image(s) and overlay(s)
if false, dynamic range will be maximized when visualizing overlays
if 2-tuple, the image will be dynamically scaled between these quantiles
reverse : boolean
if true, the order in which the slices are plotted will be reversed.
This is useful if you want to plot from the front of the brain first
to the back of the brain, or vice-versa
title : string
add a title to the plot
filename : string
if given, the resulting image will be saved to this file
dpi : integer
determines resolution of image if saved to file. Higher values
result in higher resolution images, but at a cost of having a
larger file size
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.image_read(ants.get_data('r16'))
>>> segs = img.kmeans_segmentation(k=3)['segmentation']
>>> ants.plot(img, segs*(segs==1), crop=True)
>>> ants.plot(img, segs*(segs==1), crop=False)
>>> mni = ants.image_read(ants.get_data('mni'))
>>> segs = mni.kmeans_segmentation(k=3)['segmentation']
>>> ants.plot(mni, segs*(segs==1), crop=False)
"""
if axis == 'x' or axis == 'saggittal':
axis = 0 # depends on [control=['if'], data=[]]
if axis == 'y' or axis == 'coronal':
axis = 1 # depends on [control=['if'], data=[]]
if axis == 'z' or axis == 'axial':
axis = 2 # depends on [control=['if'], data=[]]
def mirror_matrix(x):
return x[::-1, :]
def rotate270_matrix(x):
return mirror_matrix(x.T)
def rotate180_matrix(x):
return x[::-1, :]
def rotate90_matrix(x):
return x.T
def flip_matrix(x):
return mirror_matrix(rotate180_matrix(x))
def reorient_slice(x, axis):
if axis != 2:
x = rotate90_matrix(x) # depends on [control=['if'], data=[]]
if axis == 2:
x = rotate270_matrix(x) # depends on [control=['if'], data=[]]
x = mirror_matrix(x)
return x
# need this hack because of a weird NaN warning from matplotlib with overlays
warnings.simplefilter('ignore')
# handle `image` argument
if isinstance(image, str):
image = iio2.image_read(image) # depends on [control=['if'], data=[]]
if not isinstance(image, iio.ANTsImage):
raise ValueError('image argument must be an ANTsImage') # depends on [control=['if'], data=[]]
if image.pixeltype not in {'float', 'double'} or image.is_rgb:
scale = False # turn off scaling if image is discrete # depends on [control=['if'], data=[]]
# handle `overlay` argument
if overlay is not None:
if isinstance(overlay, str):
overlay = iio2.image_read(overlay) # depends on [control=['if'], data=[]]
if not isinstance(overlay, iio.ANTsImage):
raise ValueError('overlay argument must be an ANTsImage') # depends on [control=['if'], data=[]]
if not iio.image_physical_space_consistency(image, overlay):
overlay = reg.resample_image_to_target(overlay, image, interp_type='linear') # depends on [control=['if'], data=[]]
if blend:
if alpha == 1:
alpha = 0.5 # depends on [control=['if'], data=['alpha']]
image = image * alpha + overlay * (1 - alpha)
overlay = None
alpha = 1.0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['overlay']]
# handle `domain_image_map` argument
if domain_image_map is not None:
if isinstance(domain_image_map, iio.ANTsImage):
tx = tio2.new_ants_transform(precision='float', transform_type='AffineTransform', dimension=image.dimension)
image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)
if overlay is not None:
overlay = tio.apply_ants_transform_to_image(tx, overlay, domain_image_map, interpolation='linear') # depends on [control=['if'], data=['overlay']] # depends on [control=['if'], data=[]]
elif isinstance(domain_image_map, (list, tuple)):
# expect an image and transformation
if len(domain_image_map) != 2:
raise ValueError('domain_image_map list or tuple must have length == 2') # depends on [control=['if'], data=[]]
dimg = domain_image_map[0]
if not isinstance(dimg, iio.ANTsImage):
raise ValueError('domain_image_map first entry should be ANTsImage') # depends on [control=['if'], data=[]]
tx = domain_image_map[1]
image = reg.apply_transforms(dimg, image, transform_list=tx)
if overlay is not None:
overlay = reg.apply_transforms(dimg, overlay, transform_list=tx, interpolator='linear') # depends on [control=['if'], data=['overlay']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['domain_image_map']]
## single-channel images ##
if image.components == 1:
# potentially crop image
if crop:
plotmask = image.get_mask(cleanup=0)
if plotmask.max() == 0:
plotmask += 1 # depends on [control=['if'], data=[]]
image = image.crop_image(plotmask)
if overlay is not None:
overlay = overlay.crop_image(plotmask) # depends on [control=['if'], data=['overlay']] # depends on [control=['if'], data=[]]
# potentially find dynamic range
if scale == True:
(vmin, vmax) = image.quantile((0.05, 0.95)) # depends on [control=['if'], data=[]]
elif isinstance(scale, (list, tuple)):
if len(scale) != 2:
raise ValueError('scale argument must be boolean or list/tuple with two values') # depends on [control=['if'], data=[]]
(vmin, vmax) = image.quantile(scale) # depends on [control=['if'], data=[]]
else:
vmin = None
vmax = None
# Plot 2D image
if image.dimension == 2:
img_arr = image.numpy()
img_arr = rotate90_matrix(img_arr)
if not black_bg:
img_arr[img_arr < image.quantile(bg_thresh_quant)] = image.quantile(bg_val_quant) # depends on [control=['if'], data=[]]
if overlay is not None:
ov_arr = overlay.numpy()
ov_arr = rotate90_matrix(ov_arr)
ov_arr[np.abs(ov_arr) == 0] = np.nan # depends on [control=['if'], data=['overlay']]
fig = plt.figure()
if title is not None:
fig.suptitle(title, fontsize=title_fontsize, x=0.5 + title_dx, y=0.95 + title_dy) # depends on [control=['if'], data=['title']]
ax = plt.subplot(111)
# plot main image
im = ax.imshow(img_arr, cmap=cmap, alpha=alpha, vmin=vmin, vmax=vmax)
if overlay is not None:
im = ax.imshow(ov_arr, alpha=overlay_alpha, cmap=overlay_cmap) # depends on [control=['if'], data=[]]
if cbar:
cbar_orient = 'vertical' if cbar_vertical else 'horizontal'
fig.colorbar(im, orientation=cbar_orient) # depends on [control=['if'], data=[]]
plt.axis('off') # depends on [control=['if'], data=[]]
# Plot 3D image
elif image.dimension == 3:
# resample image if spacing is very unbalanced
spacing = [s for (i, s) in enumerate(image.spacing) if i != axis]
was_resampled = False
if max(spacing) / min(spacing) > 3.0:
was_resampled = True
new_spacing = (1, 1, 1)
image = image.resample_image(tuple(new_spacing))
if overlay is not None:
overlay = overlay.resample_image(tuple(new_spacing)) # depends on [control=['if'], data=['overlay']] # depends on [control=['if'], data=[]]
if reorient:
image = image.reorient_image2('LAI') # depends on [control=['if'], data=[]]
img_arr = image.numpy()
# reorder dims so that chosen axis is first
img_arr = np.rollaxis(img_arr, axis)
if overlay is not None:
if reorient:
overlay = overlay.reorient_image2('LAI') # depends on [control=['if'], data=[]]
ov_arr = overlay.numpy()
ov_arr[np.abs(ov_arr) == 0] = np.nan
ov_arr = np.rollaxis(ov_arr, axis) # depends on [control=['if'], data=['overlay']]
if slices is None:
if not isinstance(slice_buffer, (list, tuple)):
if slice_buffer is None:
slice_buffer = (int(img_arr.shape[1] * 0.1), int(img_arr.shape[2] * 0.1)) # depends on [control=['if'], data=['slice_buffer']]
else:
slice_buffer = (slice_buffer, slice_buffer) # depends on [control=['if'], data=[]]
nonzero = np.where(img_arr.sum(axis=(1, 2)) > 0.01)[0]
min_idx = nonzero[0] + slice_buffer[0]
max_idx = nonzero[-1] - slice_buffer[1]
slice_idxs = np.linspace(min_idx, max_idx, nslices).astype('int')
if reverse:
slice_idxs = np.array(list(reversed(slice_idxs))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
if isinstance(slices, (int, float)):
slices = [slices] # depends on [control=['if'], data=[]]
# if all slices are less than 1, infer that they are relative slices
if sum([s > 1 for s in slices]) == 0:
slices = [int(s * img_arr.shape[0]) for s in slices] # depends on [control=['if'], data=[]]
slice_idxs = slices
nslices = len(slices)
if was_resampled:
# re-calculate slices to account for new image shape
slice_idxs = np.unique(np.array([int(s * (image.shape[axis] / img_arr.shape[0])) for s in slice_idxs])) # depends on [control=['if'], data=[]]
# only have one row if nslices <= 6 and user didnt specify ncol
if ncol is None:
if nslices <= 6:
ncol = nslices # depends on [control=['if'], data=['nslices']]
else:
ncol = int(round(math.sqrt(nslices))) # depends on [control=['if'], data=['ncol']]
# calculate grid size
nrow = math.ceil(nslices / ncol)
xdim = img_arr.shape[2]
ydim = img_arr.shape[1]
dim_ratio = ydim / xdim
fig = plt.figure(figsize=((ncol + 1) * figsize * dim_ratio, (nrow + 1) * figsize))
if title is not None:
fig.suptitle(title, fontsize=title_fontsize, x=0.5 + title_dx, y=0.95 + title_dy) # depends on [control=['if'], data=['title']]
gs = gridspec.GridSpec(nrow, ncol, wspace=0.0, hspace=0.0, top=1.0 - 0.5 / (nrow + 1), bottom=0.5 / (nrow + 1), left=0.5 / (ncol + 1), right=1 - 0.5 / (ncol + 1))
slice_idx_idx = 0
for i in range(nrow):
for j in range(ncol):
if slice_idx_idx < len(slice_idxs):
imslice = img_arr[slice_idxs[slice_idx_idx]]
imslice = reorient_slice(imslice, axis)
if not black_bg:
imslice[imslice < image.quantile(bg_thresh_quant)] = image.quantile(bg_val_quant) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['slice_idx_idx']]
else:
imslice = np.zeros_like(img_arr[0])
imslice = reorient_slice(imslice, axis)
ax = plt.subplot(gs[i, j])
im = ax.imshow(imslice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
if slice_idx_idx < len(slice_idxs):
ovslice = ov_arr[slice_idxs[slice_idx_idx]]
ovslice = reorient_slice(ovslice, axis)
im = ax.imshow(ovslice, alpha=overlay_alpha, cmap=overlay_cmap) # depends on [control=['if'], data=['slice_idx_idx']] # depends on [control=['if'], data=[]]
ax.axis('off')
slice_idx_idx += 1 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
if cbar:
cbar_start = (1 - cbar_length) / 2
if cbar_vertical:
cax = fig.add_axes([0.9 + cbar_dx, cbar_start, 0.03, cbar_length])
cbar_orient = 'vertical' # depends on [control=['if'], data=[]]
else:
cax = fig.add_axes([cbar_start, 0.08 + cbar_dx, cbar_length, 0.03])
cbar_orient = 'horizontal'
fig.colorbar(im, cax=cax, orientation=cbar_orient) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
## multi-channel images ##
elif image.components > 1:
if not image.is_rgb:
raise ValueError('Multi-component images only supported if they are RGB') # depends on [control=['if'], data=[]]
img_arr = image.numpy()
img_arr = np.stack([rotate90_matrix(img_arr[:, :, i]) for i in range(3)], axis=-1)
fig = plt.figure()
ax = plt.subplot(111)
# plot main image
ax.imshow(img_arr, alpha=alpha)
plt.axis('off') # depends on [control=['if'], data=[]]
if filename is not None:
filename = os.path.expanduser(filename)
plt.savefig(filename, dpi=dpi, transparent=True, bbox_inches='tight')
plt.close(fig) # depends on [control=['if'], data=['filename']]
else:
plt.show()
# turn warnings back to default
warnings.simplefilter('default') |
def set_template(self, template):
""" Sets template to be used when generating output
:param template TEmplate instance
:type instance of BasicTemplate
"""
if isinstance(template, templates.BasicTemplate):
self.template = template
else:
raise TypeError('converter#set_template:'
'Template must inherit from BasicTemplate') | def function[set_template, parameter[self, template]]:
constant[ Sets template to be used when generating output
:param template TEmplate instance
:type instance of BasicTemplate
]
if call[name[isinstance], parameter[name[template], name[templates].BasicTemplate]] begin[:]
name[self].template assign[=] name[template] | keyword[def] identifier[set_template] ( identifier[self] , identifier[template] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[template] , identifier[templates] . identifier[BasicTemplate] ):
identifier[self] . identifier[template] = identifier[template]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] ) | def set_template(self, template):
""" Sets template to be used when generating output
:param template TEmplate instance
:type instance of BasicTemplate
"""
if isinstance(template, templates.BasicTemplate):
self.template = template # depends on [control=['if'], data=[]]
else:
raise TypeError('converter#set_template:Template must inherit from BasicTemplate') |
def _create_response(self, datapath, port, req):
"""create a packet including LACP."""
src = datapath.ports[port].hw_addr
res_ether = ethernet.ethernet(
slow.SLOW_PROTOCOL_MULTICAST, src, ether.ETH_TYPE_SLOW)
res_lacp = self._create_lacp(datapath, port, req)
res_pkt = packet.Packet()
res_pkt.add_protocol(res_ether)
res_pkt.add_protocol(res_lacp)
res_pkt.serialize()
return res_pkt | def function[_create_response, parameter[self, datapath, port, req]]:
constant[create a packet including LACP.]
variable[src] assign[=] call[name[datapath].ports][name[port]].hw_addr
variable[res_ether] assign[=] call[name[ethernet].ethernet, parameter[name[slow].SLOW_PROTOCOL_MULTICAST, name[src], name[ether].ETH_TYPE_SLOW]]
variable[res_lacp] assign[=] call[name[self]._create_lacp, parameter[name[datapath], name[port], name[req]]]
variable[res_pkt] assign[=] call[name[packet].Packet, parameter[]]
call[name[res_pkt].add_protocol, parameter[name[res_ether]]]
call[name[res_pkt].add_protocol, parameter[name[res_lacp]]]
call[name[res_pkt].serialize, parameter[]]
return[name[res_pkt]] | keyword[def] identifier[_create_response] ( identifier[self] , identifier[datapath] , identifier[port] , identifier[req] ):
literal[string]
identifier[src] = identifier[datapath] . identifier[ports] [ identifier[port] ]. identifier[hw_addr]
identifier[res_ether] = identifier[ethernet] . identifier[ethernet] (
identifier[slow] . identifier[SLOW_PROTOCOL_MULTICAST] , identifier[src] , identifier[ether] . identifier[ETH_TYPE_SLOW] )
identifier[res_lacp] = identifier[self] . identifier[_create_lacp] ( identifier[datapath] , identifier[port] , identifier[req] )
identifier[res_pkt] = identifier[packet] . identifier[Packet] ()
identifier[res_pkt] . identifier[add_protocol] ( identifier[res_ether] )
identifier[res_pkt] . identifier[add_protocol] ( identifier[res_lacp] )
identifier[res_pkt] . identifier[serialize] ()
keyword[return] identifier[res_pkt] | def _create_response(self, datapath, port, req):
"""create a packet including LACP."""
src = datapath.ports[port].hw_addr
res_ether = ethernet.ethernet(slow.SLOW_PROTOCOL_MULTICAST, src, ether.ETH_TYPE_SLOW)
res_lacp = self._create_lacp(datapath, port, req)
res_pkt = packet.Packet()
res_pkt.add_protocol(res_ether)
res_pkt.add_protocol(res_lacp)
res_pkt.serialize()
return res_pkt |
def get_objective_bank_nodes(self,
objective_bank_id=None,
ancestor_levels=None,
descendant_levels=None,
include_siblings=None):
"""Gets a portion of the hierarchy for the given objective bank.
arg: includeSiblings (boolean): true to include the siblings
of the given node, false to omit the siblings
return: (osid.learning.ObjectiveBankNode) - an objective bank
node
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if descendant_levels:
url_path = self._urls.nodes(alias=objective_bank_id, depth=descendant_levels)
else:
url_path = self._urls.nodes(alias=objective_bank_id)
return self._get_request(url_path) | def function[get_objective_bank_nodes, parameter[self, objective_bank_id, ancestor_levels, descendant_levels, include_siblings]]:
constant[Gets a portion of the hierarchy for the given objective bank.
arg: includeSiblings (boolean): true to include the siblings
of the given node, false to omit the siblings
return: (osid.learning.ObjectiveBankNode) - an objective bank
node
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
]
if name[descendant_levels] begin[:]
variable[url_path] assign[=] call[name[self]._urls.nodes, parameter[]]
return[call[name[self]._get_request, parameter[name[url_path]]]] | keyword[def] identifier[get_objective_bank_nodes] ( identifier[self] ,
identifier[objective_bank_id] = keyword[None] ,
identifier[ancestor_levels] = keyword[None] ,
identifier[descendant_levels] = keyword[None] ,
identifier[include_siblings] = keyword[None] ):
literal[string]
keyword[if] identifier[descendant_levels] :
identifier[url_path] = identifier[self] . identifier[_urls] . identifier[nodes] ( identifier[alias] = identifier[objective_bank_id] , identifier[depth] = identifier[descendant_levels] )
keyword[else] :
identifier[url_path] = identifier[self] . identifier[_urls] . identifier[nodes] ( identifier[alias] = identifier[objective_bank_id] )
keyword[return] identifier[self] . identifier[_get_request] ( identifier[url_path] ) | def get_objective_bank_nodes(self, objective_bank_id=None, ancestor_levels=None, descendant_levels=None, include_siblings=None):
"""Gets a portion of the hierarchy for the given objective bank.
arg: includeSiblings (boolean): true to include the siblings
of the given node, false to omit the siblings
return: (osid.learning.ObjectiveBankNode) - an objective bank
node
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if descendant_levels:
url_path = self._urls.nodes(alias=objective_bank_id, depth=descendant_levels) # depends on [control=['if'], data=[]]
else:
url_path = self._urls.nodes(alias=objective_bank_id)
return self._get_request(url_path) |
def node_from_nid(self, nid):
"""Return the node in the `Flow` with the given `nid` identifier"""
for node in self.iflat_nodes():
if node.node_id == nid: return node
raise ValueError("Cannot find node with node id: %s" % nid) | def function[node_from_nid, parameter[self, nid]]:
constant[Return the node in the `Flow` with the given `nid` identifier]
for taget[name[node]] in starred[call[name[self].iflat_nodes, parameter[]]] begin[:]
if compare[name[node].node_id equal[==] name[nid]] begin[:]
return[name[node]]
<ast.Raise object at 0x7da18f811ff0> | keyword[def] identifier[node_from_nid] ( identifier[self] , identifier[nid] ):
literal[string]
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[iflat_nodes] ():
keyword[if] identifier[node] . identifier[node_id] == identifier[nid] : keyword[return] identifier[node]
keyword[raise] identifier[ValueError] ( literal[string] % identifier[nid] ) | def node_from_nid(self, nid):
"""Return the node in the `Flow` with the given `nid` identifier"""
for node in self.iflat_nodes():
if node.node_id == nid:
return node # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
raise ValueError('Cannot find node with node id: %s' % nid) |
def _colorize_bars(self, cmap, bars, element, main_range, dim):
"""
Use the given cmap to color the bars, applying the correct
color ranges as necessary.
"""
cmap_range = main_range[1] - main_range[0]
lower_bound = main_range[0]
colors = np.array(element.dimension_values(dim))
colors = (colors - lower_bound) / (cmap_range)
for c, bar in zip(colors, bars):
bar.set_facecolor(cmap(c))
bar.set_clip_on(False) | def function[_colorize_bars, parameter[self, cmap, bars, element, main_range, dim]]:
constant[
Use the given cmap to color the bars, applying the correct
color ranges as necessary.
]
variable[cmap_range] assign[=] binary_operation[call[name[main_range]][constant[1]] - call[name[main_range]][constant[0]]]
variable[lower_bound] assign[=] call[name[main_range]][constant[0]]
variable[colors] assign[=] call[name[np].array, parameter[call[name[element].dimension_values, parameter[name[dim]]]]]
variable[colors] assign[=] binary_operation[binary_operation[name[colors] - name[lower_bound]] / name[cmap_range]]
for taget[tuple[[<ast.Name object at 0x7da20c7cbf40>, <ast.Name object at 0x7da20c7c9d50>]]] in starred[call[name[zip], parameter[name[colors], name[bars]]]] begin[:]
call[name[bar].set_facecolor, parameter[call[name[cmap], parameter[name[c]]]]]
call[name[bar].set_clip_on, parameter[constant[False]]] | keyword[def] identifier[_colorize_bars] ( identifier[self] , identifier[cmap] , identifier[bars] , identifier[element] , identifier[main_range] , identifier[dim] ):
literal[string]
identifier[cmap_range] = identifier[main_range] [ literal[int] ]- identifier[main_range] [ literal[int] ]
identifier[lower_bound] = identifier[main_range] [ literal[int] ]
identifier[colors] = identifier[np] . identifier[array] ( identifier[element] . identifier[dimension_values] ( identifier[dim] ))
identifier[colors] =( identifier[colors] - identifier[lower_bound] )/( identifier[cmap_range] )
keyword[for] identifier[c] , identifier[bar] keyword[in] identifier[zip] ( identifier[colors] , identifier[bars] ):
identifier[bar] . identifier[set_facecolor] ( identifier[cmap] ( identifier[c] ))
identifier[bar] . identifier[set_clip_on] ( keyword[False] ) | def _colorize_bars(self, cmap, bars, element, main_range, dim):
"""
Use the given cmap to color the bars, applying the correct
color ranges as necessary.
"""
cmap_range = main_range[1] - main_range[0]
lower_bound = main_range[0]
colors = np.array(element.dimension_values(dim))
colors = (colors - lower_bound) / cmap_range
for (c, bar) in zip(colors, bars):
bar.set_facecolor(cmap(c))
bar.set_clip_on(False) # depends on [control=['for'], data=[]] |
def _ioctl_cast(n):
"""
Linux ioctl() request parameter is unsigned, whereas on BSD/Darwin it is
signed. Until 2.5 Python exclusively implemented the BSD behaviour,
preventing use of large unsigned int requests like the TTY layer uses
below. So on 2.4, we cast our unsigned to look like signed for Python.
"""
if sys.version_info < (2, 5):
n, = struct.unpack('i', struct.pack('I', n))
return n | def function[_ioctl_cast, parameter[n]]:
constant[
Linux ioctl() request parameter is unsigned, whereas on BSD/Darwin it is
signed. Until 2.5 Python exclusively implemented the BSD behaviour,
preventing use of large unsigned int requests like the TTY layer uses
below. So on 2.4, we cast our unsigned to look like signed for Python.
]
if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b1d37070>, <ast.Constant object at 0x7da1b1d35d20>]]] begin[:]
<ast.Tuple object at 0x7da1b1d34ca0> assign[=] call[name[struct].unpack, parameter[constant[i], call[name[struct].pack, parameter[constant[I], name[n]]]]]
return[name[n]] | keyword[def] identifier[_ioctl_cast] ( identifier[n] ):
literal[string]
keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ):
identifier[n] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[struct] . identifier[pack] ( literal[string] , identifier[n] ))
keyword[return] identifier[n] | def _ioctl_cast(n):
"""
Linux ioctl() request parameter is unsigned, whereas on BSD/Darwin it is
signed. Until 2.5 Python exclusively implemented the BSD behaviour,
preventing use of large unsigned int requests like the TTY layer uses
below. So on 2.4, we cast our unsigned to look like signed for Python.
"""
if sys.version_info < (2, 5):
(n,) = struct.unpack('i', struct.pack('I', n)) # depends on [control=['if'], data=[]]
return n |
def user_view_events(self) -> List[str]:
"""Return event types where use viewed a main object."""
return [event_type for event_type, event in self.items if event.get_event_action()
== event_actions.VIEWED] | def function[user_view_events, parameter[self]]:
constant[Return event types where use viewed a main object.]
return[<ast.ListComp object at 0x7da20c990490>] | keyword[def] identifier[user_view_events] ( identifier[self] )-> identifier[List] [ identifier[str] ]:
literal[string]
keyword[return] [ identifier[event_type] keyword[for] identifier[event_type] , identifier[event] keyword[in] identifier[self] . identifier[items] keyword[if] identifier[event] . identifier[get_event_action] ()
== identifier[event_actions] . identifier[VIEWED] ] | def user_view_events(self) -> List[str]:
"""Return event types where use viewed a main object."""
return [event_type for (event_type, event) in self.items if event.get_event_action() == event_actions.VIEWED] |
def get_learning_objectives(self):
"""Gets the any ``Objectives`` corresponding to this item.
return: (osid.learning.ObjectiveList) - the learning objectives
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.Activity.get_assets_template
if not bool(self._my_map['learningObjectiveIds']):
raise errors.IllegalState('no learningObjectiveIds')
mgr = self._get_provider_manager('LEARNING')
if not mgr.supports_objective_lookup():
raise errors.OperationFailed('Learning does not support Objective lookup')
# What about the Proxy?
lookup_session = mgr.get_objective_lookup_session(proxy=getattr(self, "_proxy", None))
lookup_session.use_federated_objective_bank_view()
return lookup_session.get_objectives_by_ids(self.get_learning_objective_ids()) | def function[get_learning_objectives, parameter[self]]:
constant[Gets the any ``Objectives`` corresponding to this item.
return: (osid.learning.ObjectiveList) - the learning objectives
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
]
if <ast.UnaryOp object at 0x7da207f03a90> begin[:]
<ast.Raise object at 0x7da207f00a60>
variable[mgr] assign[=] call[name[self]._get_provider_manager, parameter[constant[LEARNING]]]
if <ast.UnaryOp object at 0x7da207f01030> begin[:]
<ast.Raise object at 0x7da207f00b20>
variable[lookup_session] assign[=] call[name[mgr].get_objective_lookup_session, parameter[]]
call[name[lookup_session].use_federated_objective_bank_view, parameter[]]
return[call[name[lookup_session].get_objectives_by_ids, parameter[call[name[self].get_learning_objective_ids, parameter[]]]]] | keyword[def] identifier[get_learning_objectives] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[bool] ( identifier[self] . identifier[_my_map] [ literal[string] ]):
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
identifier[mgr] = identifier[self] . identifier[_get_provider_manager] ( literal[string] )
keyword[if] keyword[not] identifier[mgr] . identifier[supports_objective_lookup] ():
keyword[raise] identifier[errors] . identifier[OperationFailed] ( literal[string] )
identifier[lookup_session] = identifier[mgr] . identifier[get_objective_lookup_session] ( identifier[proxy] = identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ))
identifier[lookup_session] . identifier[use_federated_objective_bank_view] ()
keyword[return] identifier[lookup_session] . identifier[get_objectives_by_ids] ( identifier[self] . identifier[get_learning_objective_ids] ()) | def get_learning_objectives(self):
"""Gets the any ``Objectives`` corresponding to this item.
return: (osid.learning.ObjectiveList) - the learning objectives
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.Activity.get_assets_template
if not bool(self._my_map['learningObjectiveIds']):
raise errors.IllegalState('no learningObjectiveIds') # depends on [control=['if'], data=[]]
mgr = self._get_provider_manager('LEARNING')
if not mgr.supports_objective_lookup():
raise errors.OperationFailed('Learning does not support Objective lookup') # depends on [control=['if'], data=[]]
# What about the Proxy?
lookup_session = mgr.get_objective_lookup_session(proxy=getattr(self, '_proxy', None))
lookup_session.use_federated_objective_bank_view()
return lookup_session.get_objectives_by_ids(self.get_learning_objective_ids()) |
def cosine(w, A=1, phi=0, offset=0):
''' Return a driver function that can advance a sequence of cosine values.
.. code-block:: none
value = A * cos(w*i + phi) + offset
Args:
w (float) : a frequency for the cosine driver
A (float) : an amplitude for the cosine driver
phi (float) : a phase offset to start the cosine driver with
offset (float) : a global offset to add to the driver values
'''
from math import cos
def f(i):
return A * cos(w*i + phi) + offset
return partial(force, sequence=_advance(f)) | def function[cosine, parameter[w, A, phi, offset]]:
constant[ Return a driver function that can advance a sequence of cosine values.
.. code-block:: none
value = A * cos(w*i + phi) + offset
Args:
w (float) : a frequency for the cosine driver
A (float) : an amplitude for the cosine driver
phi (float) : a phase offset to start the cosine driver with
offset (float) : a global offset to add to the driver values
]
from relative_module[math] import module[cos]
def function[f, parameter[i]]:
return[binary_operation[binary_operation[name[A] * call[name[cos], parameter[binary_operation[binary_operation[name[w] * name[i]] + name[phi]]]]] + name[offset]]]
return[call[name[partial], parameter[name[force]]]] | keyword[def] identifier[cosine] ( identifier[w] , identifier[A] = literal[int] , identifier[phi] = literal[int] , identifier[offset] = literal[int] ):
literal[string]
keyword[from] identifier[math] keyword[import] identifier[cos]
keyword[def] identifier[f] ( identifier[i] ):
keyword[return] identifier[A] * identifier[cos] ( identifier[w] * identifier[i] + identifier[phi] )+ identifier[offset]
keyword[return] identifier[partial] ( identifier[force] , identifier[sequence] = identifier[_advance] ( identifier[f] )) | def cosine(w, A=1, phi=0, offset=0):
""" Return a driver function that can advance a sequence of cosine values.
.. code-block:: none
value = A * cos(w*i + phi) + offset
Args:
w (float) : a frequency for the cosine driver
A (float) : an amplitude for the cosine driver
phi (float) : a phase offset to start the cosine driver with
offset (float) : a global offset to add to the driver values
"""
from math import cos
def f(i):
return A * cos(w * i + phi) + offset
return partial(force, sequence=_advance(f)) |
def _extract_subscription_url(url):
"""Extract the first part of the URL, just after subscription:
https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/
"""
match = re.match(r".*/subscriptions/[a-f0-9-]+/", url, re.IGNORECASE)
if not match:
raise ValueError("Unable to extract subscription ID from URL")
return match.group(0) | def function[_extract_subscription_url, parameter[url]]:
constant[Extract the first part of the URL, just after subscription:
https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/
]
variable[match] assign[=] call[name[re].match, parameter[constant[.*/subscriptions/[a-f0-9-]+/], name[url], name[re].IGNORECASE]]
if <ast.UnaryOp object at 0x7da18bc71630> begin[:]
<ast.Raise object at 0x7da18bc72aa0>
return[call[name[match].group, parameter[constant[0]]]] | keyword[def] identifier[_extract_subscription_url] ( identifier[url] ):
literal[string]
identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[url] , identifier[re] . identifier[IGNORECASE] )
keyword[if] keyword[not] identifier[match] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[match] . identifier[group] ( literal[int] ) | def _extract_subscription_url(url):
"""Extract the first part of the URL, just after subscription:
https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/
"""
match = re.match('.*/subscriptions/[a-f0-9-]+/', url, re.IGNORECASE)
if not match:
raise ValueError('Unable to extract subscription ID from URL') # depends on [control=['if'], data=[]]
return match.group(0) |
def verify(self, signature, msg):
'''
Verify the message
'''
if not self.key:
return False
try:
self.key.verify(signature + msg)
except ValueError:
return False
return True | def function[verify, parameter[self, signature, msg]]:
constant[
Verify the message
]
if <ast.UnaryOp object at 0x7da18dc9b730> begin[:]
return[constant[False]]
<ast.Try object at 0x7da2054a51e0>
return[constant[True]] | keyword[def] identifier[verify] ( identifier[self] , identifier[signature] , identifier[msg] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[key] :
keyword[return] keyword[False]
keyword[try] :
identifier[self] . identifier[key] . identifier[verify] ( identifier[signature] + identifier[msg] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def verify(self, signature, msg):
"""
Verify the message
"""
if not self.key:
return False # depends on [control=['if'], data=[]]
try:
self.key.verify(signature + msg) # depends on [control=['try'], data=[]]
except ValueError:
return False # depends on [control=['except'], data=[]]
return True |
def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
"""Start a new kernel, and return its Manager and Client"""
logger.debug('Starting new kernel: "%s"' % kernel_name)
km = KernelManager(kernel_name=kernel_name,
kernel_spec_manager=NbvalKernelspecManager())
km.start_kernel(**kwargs)
kc = km.client()
kc.start_channels()
try:
kc.wait_for_ready(timeout=startup_timeout)
except RuntimeError:
logger.exception('Failure starting kernel "%s"', kernel_name)
kc.stop_channels()
km.shutdown_kernel()
raise
return km, kc | def function[start_new_kernel, parameter[startup_timeout, kernel_name]]:
constant[Start a new kernel, and return its Manager and Client]
call[name[logger].debug, parameter[binary_operation[constant[Starting new kernel: "%s"] <ast.Mod object at 0x7da2590d6920> name[kernel_name]]]]
variable[km] assign[=] call[name[KernelManager], parameter[]]
call[name[km].start_kernel, parameter[]]
variable[kc] assign[=] call[name[km].client, parameter[]]
call[name[kc].start_channels, parameter[]]
<ast.Try object at 0x7da1b1e151b0>
return[tuple[[<ast.Name object at 0x7da1b1e16050>, <ast.Name object at 0x7da1b1e11f30>]]] | keyword[def] identifier[start_new_kernel] ( identifier[startup_timeout] = literal[int] , identifier[kernel_name] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] % identifier[kernel_name] )
identifier[km] = identifier[KernelManager] ( identifier[kernel_name] = identifier[kernel_name] ,
identifier[kernel_spec_manager] = identifier[NbvalKernelspecManager] ())
identifier[km] . identifier[start_kernel] (** identifier[kwargs] )
identifier[kc] = identifier[km] . identifier[client] ()
identifier[kc] . identifier[start_channels] ()
keyword[try] :
identifier[kc] . identifier[wait_for_ready] ( identifier[timeout] = identifier[startup_timeout] )
keyword[except] identifier[RuntimeError] :
identifier[logger] . identifier[exception] ( literal[string] , identifier[kernel_name] )
identifier[kc] . identifier[stop_channels] ()
identifier[km] . identifier[shutdown_kernel] ()
keyword[raise]
keyword[return] identifier[km] , identifier[kc] | def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
"""Start a new kernel, and return its Manager and Client"""
logger.debug('Starting new kernel: "%s"' % kernel_name)
km = KernelManager(kernel_name=kernel_name, kernel_spec_manager=NbvalKernelspecManager())
km.start_kernel(**kwargs)
kc = km.client()
kc.start_channels()
try:
kc.wait_for_ready(timeout=startup_timeout) # depends on [control=['try'], data=[]]
except RuntimeError:
logger.exception('Failure starting kernel "%s"', kernel_name)
kc.stop_channels()
km.shutdown_kernel()
raise # depends on [control=['except'], data=[]]
return (km, kc) |
def SHR(cpu, dest, src):
"""
Shift logical right.
The shift arithmetic right (SAR) and shift logical right (SHR)
instructions shift the bits of the destination operand to the right
(toward less significant bit locations). For each shift count, the
least significant bit of the destination operand is shifted into the CF
flag, and the most significant bit is either set or cleared depending
on the instruction type. The SHR instruction clears the most
significant bit.
:param cpu: current CPU.
:param dest: destination operand.
:param src: count operand.
"""
OperandSize = dest.size
count = Operators.ZEXTEND(src.read() & (OperandSize - 1), OperandSize)
value = dest.read()
res = dest.write(value >> count) # UNSIGNED Operators.UDIV2 !! TODO Check
MASK = (1 << OperandSize) - 1
SIGN_MASK = 1 << (OperandSize - 1)
if issymbolic(count):
cpu.CF = Operators.ITE(count != 0,
((value >> Operators.ZEXTEND(count - 1, OperandSize)) & 1) != 0,
cpu.CF)
else:
if count != 0:
cpu.CF = Operators.EXTRACT(value, count - 1, 1) != 0
cpu.ZF = Operators.ITE(count != 0, res == 0, cpu.ZF)
cpu.SF = Operators.ITE(count != 0, (res & SIGN_MASK) != 0, cpu.SF)
# OF is only defined for count == 1, but in practice (unit tests from real cpu) it's calculated for count != 0
cpu.OF = Operators.ITE(count != 0, ((value >> (OperandSize - 1)) & 0x1) == 1, cpu.OF)
cpu.PF = Operators.ITE(count != 0, cpu._calculate_parity_flag(res), cpu.PF) | def function[SHR, parameter[cpu, dest, src]]:
constant[
Shift logical right.
The shift arithmetic right (SAR) and shift logical right (SHR)
instructions shift the bits of the destination operand to the right
(toward less significant bit locations). For each shift count, the
least significant bit of the destination operand is shifted into the CF
flag, and the most significant bit is either set or cleared depending
on the instruction type. The SHR instruction clears the most
significant bit.
:param cpu: current CPU.
:param dest: destination operand.
:param src: count operand.
]
variable[OperandSize] assign[=] name[dest].size
variable[count] assign[=] call[name[Operators].ZEXTEND, parameter[binary_operation[call[name[src].read, parameter[]] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[name[OperandSize] - constant[1]]], name[OperandSize]]]
variable[value] assign[=] call[name[dest].read, parameter[]]
variable[res] assign[=] call[name[dest].write, parameter[binary_operation[name[value] <ast.RShift object at 0x7da2590d6a40> name[count]]]]
variable[MASK] assign[=] binary_operation[binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> name[OperandSize]] - constant[1]]
variable[SIGN_MASK] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> binary_operation[name[OperandSize] - constant[1]]]
if call[name[issymbolic], parameter[name[count]]] begin[:]
name[cpu].CF assign[=] call[name[Operators].ITE, parameter[compare[name[count] not_equal[!=] constant[0]], compare[binary_operation[binary_operation[name[value] <ast.RShift object at 0x7da2590d6a40> call[name[Operators].ZEXTEND, parameter[binary_operation[name[count] - constant[1]], name[OperandSize]]]] <ast.BitAnd object at 0x7da2590d6b60> constant[1]] not_equal[!=] constant[0]], name[cpu].CF]]
name[cpu].ZF assign[=] call[name[Operators].ITE, parameter[compare[name[count] not_equal[!=] constant[0]], compare[name[res] equal[==] constant[0]], name[cpu].ZF]]
name[cpu].SF assign[=] call[name[Operators].ITE, parameter[compare[name[count] not_equal[!=] constant[0]], compare[binary_operation[name[res] <ast.BitAnd object at 0x7da2590d6b60> name[SIGN_MASK]] not_equal[!=] constant[0]], name[cpu].SF]]
name[cpu].OF assign[=] call[name[Operators].ITE, parameter[compare[name[count] not_equal[!=] constant[0]], compare[binary_operation[binary_operation[name[value] <ast.RShift object at 0x7da2590d6a40> binary_operation[name[OperandSize] - constant[1]]] <ast.BitAnd object at 0x7da2590d6b60> constant[1]] equal[==] constant[1]], name[cpu].OF]]
name[cpu].PF assign[=] call[name[Operators].ITE, parameter[compare[name[count] not_equal[!=] constant[0]], call[name[cpu]._calculate_parity_flag, parameter[name[res]]], name[cpu].PF]] | keyword[def] identifier[SHR] ( identifier[cpu] , identifier[dest] , identifier[src] ):
literal[string]
identifier[OperandSize] = identifier[dest] . identifier[size]
identifier[count] = identifier[Operators] . identifier[ZEXTEND] ( identifier[src] . identifier[read] ()&( identifier[OperandSize] - literal[int] ), identifier[OperandSize] )
identifier[value] = identifier[dest] . identifier[read] ()
identifier[res] = identifier[dest] . identifier[write] ( identifier[value] >> identifier[count] )
identifier[MASK] =( literal[int] << identifier[OperandSize] )- literal[int]
identifier[SIGN_MASK] = literal[int] <<( identifier[OperandSize] - literal[int] )
keyword[if] identifier[issymbolic] ( identifier[count] ):
identifier[cpu] . identifier[CF] = identifier[Operators] . identifier[ITE] ( identifier[count] != literal[int] ,
(( identifier[value] >> identifier[Operators] . identifier[ZEXTEND] ( identifier[count] - literal[int] , identifier[OperandSize] ))& literal[int] )!= literal[int] ,
identifier[cpu] . identifier[CF] )
keyword[else] :
keyword[if] identifier[count] != literal[int] :
identifier[cpu] . identifier[CF] = identifier[Operators] . identifier[EXTRACT] ( identifier[value] , identifier[count] - literal[int] , literal[int] )!= literal[int]
identifier[cpu] . identifier[ZF] = identifier[Operators] . identifier[ITE] ( identifier[count] != literal[int] , identifier[res] == literal[int] , identifier[cpu] . identifier[ZF] )
identifier[cpu] . identifier[SF] = identifier[Operators] . identifier[ITE] ( identifier[count] != literal[int] ,( identifier[res] & identifier[SIGN_MASK] )!= literal[int] , identifier[cpu] . identifier[SF] )
identifier[cpu] . identifier[OF] = identifier[Operators] . identifier[ITE] ( identifier[count] != literal[int] ,(( identifier[value] >>( identifier[OperandSize] - literal[int] ))& literal[int] )== literal[int] , identifier[cpu] . identifier[OF] )
identifier[cpu] . identifier[PF] = identifier[Operators] . identifier[ITE] ( identifier[count] != literal[int] , identifier[cpu] . identifier[_calculate_parity_flag] ( identifier[res] ), identifier[cpu] . identifier[PF] ) | def SHR(cpu, dest, src):
"""
Shift logical right.
The shift arithmetic right (SAR) and shift logical right (SHR)
instructions shift the bits of the destination operand to the right
(toward less significant bit locations). For each shift count, the
least significant bit of the destination operand is shifted into the CF
flag, and the most significant bit is either set or cleared depending
on the instruction type. The SHR instruction clears the most
significant bit.
:param cpu: current CPU.
:param dest: destination operand.
:param src: count operand.
"""
OperandSize = dest.size
count = Operators.ZEXTEND(src.read() & OperandSize - 1, OperandSize)
value = dest.read()
res = dest.write(value >> count) # UNSIGNED Operators.UDIV2 !! TODO Check
MASK = (1 << OperandSize) - 1
SIGN_MASK = 1 << OperandSize - 1
if issymbolic(count):
cpu.CF = Operators.ITE(count != 0, value >> Operators.ZEXTEND(count - 1, OperandSize) & 1 != 0, cpu.CF) # depends on [control=['if'], data=[]]
elif count != 0:
cpu.CF = Operators.EXTRACT(value, count - 1, 1) != 0 # depends on [control=['if'], data=['count']]
cpu.ZF = Operators.ITE(count != 0, res == 0, cpu.ZF)
cpu.SF = Operators.ITE(count != 0, res & SIGN_MASK != 0, cpu.SF)
# OF is only defined for count == 1, but in practice (unit tests from real cpu) it's calculated for count != 0
cpu.OF = Operators.ITE(count != 0, value >> OperandSize - 1 & 1 == 1, cpu.OF)
cpu.PF = Operators.ITE(count != 0, cpu._calculate_parity_flag(res), cpu.PF) |
async def _kill_it_with_fire(self, container_id):
"""
Kill a container, with fire.
"""
if container_id in self._watching:
self._watching.remove(container_id)
self._container_had_error.add(container_id)
try:
await self._docker_interface.kill_container(container_id)
except:
pass | <ast.AsyncFunctionDef object at 0x7da20e9b1e10> | keyword[async] keyword[def] identifier[_kill_it_with_fire] ( identifier[self] , identifier[container_id] ):
literal[string]
keyword[if] identifier[container_id] keyword[in] identifier[self] . identifier[_watching] :
identifier[self] . identifier[_watching] . identifier[remove] ( identifier[container_id] )
identifier[self] . identifier[_container_had_error] . identifier[add] ( identifier[container_id] )
keyword[try] :
keyword[await] identifier[self] . identifier[_docker_interface] . identifier[kill_container] ( identifier[container_id] )
keyword[except] :
keyword[pass] | async def _kill_it_with_fire(self, container_id):
"""
Kill a container, with fire.
"""
if container_id in self._watching:
self._watching.remove(container_id)
self._container_had_error.add(container_id)
try:
await self._docker_interface.kill_container(container_id) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['container_id']] |
def third_prediction(self,singular_value):
"""get the omitted parameter contribution to prediction error variance
at a singular value. used to construct error variance dataframe
Parameters
----------
singular_value : int
singular value to calc third term at
Returns
-------
dict : dict
dictionary of ("third",prediction_names),error variance
"""
if not self.predictions:
raise Exception("ErrVar.third(): not predictions are set")
if self.__need_omitted is False:
zero_preds = {}
for pred in self.predictions_iter:
zero_preds[("third", pred.col_names[0])] = 0.0
return zero_preds
self.log("calc third term prediction @" + str(singular_value))
mn = min(self.jco.shape)
try:
mn = min(self.pst.npar_adj, self.pst.nnz_obs)
except:
pass
if singular_value > mn:
inf_pred = {}
for pred in self.predictions_iter:
inf_pred[("third",pred.col_names[0])] = 1.0E+35
return inf_pred
else:
results = {}
for prediction,omitted_prediction in \
zip(self.predictions_iter, self.omitted_predictions):
# comes out as row vector, but needs to be a column vector
p = ((prediction.T * self.G(singular_value) * self.omitted_jco)
- omitted_prediction.T).T
result = float((p.T * self.omitted_parcov * p).x)
results[("third", prediction.col_names[0])] = result
self.log("calc third term prediction @" + str(singular_value))
return results | def function[third_prediction, parameter[self, singular_value]]:
constant[get the omitted parameter contribution to prediction error variance
at a singular value. used to construct error variance dataframe
Parameters
----------
singular_value : int
singular value to calc third term at
Returns
-------
dict : dict
dictionary of ("third",prediction_names),error variance
]
if <ast.UnaryOp object at 0x7da1b1dd61d0> begin[:]
<ast.Raise object at 0x7da1b1dd4880>
if compare[name[self].__need_omitted is constant[False]] begin[:]
variable[zero_preds] assign[=] dictionary[[], []]
for taget[name[pred]] in starred[name[self].predictions_iter] begin[:]
call[name[zero_preds]][tuple[[<ast.Constant object at 0x7da1b1dd6890>, <ast.Subscript object at 0x7da1b1dd76a0>]]] assign[=] constant[0.0]
return[name[zero_preds]]
call[name[self].log, parameter[binary_operation[constant[calc third term prediction @] + call[name[str], parameter[name[singular_value]]]]]]
variable[mn] assign[=] call[name[min], parameter[name[self].jco.shape]]
<ast.Try object at 0x7da1b1dd4040>
if compare[name[singular_value] greater[>] name[mn]] begin[:]
variable[inf_pred] assign[=] dictionary[[], []]
for taget[name[pred]] in starred[name[self].predictions_iter] begin[:]
call[name[inf_pred]][tuple[[<ast.Constant object at 0x7da1b1dd5600>, <ast.Subscript object at 0x7da1b1dd5750>]]] assign[=] constant[1e+35]
return[name[inf_pred]] | keyword[def] identifier[third_prediction] ( identifier[self] , identifier[singular_value] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[predictions] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[self] . identifier[__need_omitted] keyword[is] keyword[False] :
identifier[zero_preds] ={}
keyword[for] identifier[pred] keyword[in] identifier[self] . identifier[predictions_iter] :
identifier[zero_preds] [( literal[string] , identifier[pred] . identifier[col_names] [ literal[int] ])]= literal[int]
keyword[return] identifier[zero_preds]
identifier[self] . identifier[log] ( literal[string] + identifier[str] ( identifier[singular_value] ))
identifier[mn] = identifier[min] ( identifier[self] . identifier[jco] . identifier[shape] )
keyword[try] :
identifier[mn] = identifier[min] ( identifier[self] . identifier[pst] . identifier[npar_adj] , identifier[self] . identifier[pst] . identifier[nnz_obs] )
keyword[except] :
keyword[pass]
keyword[if] identifier[singular_value] > identifier[mn] :
identifier[inf_pred] ={}
keyword[for] identifier[pred] keyword[in] identifier[self] . identifier[predictions_iter] :
identifier[inf_pred] [( literal[string] , identifier[pred] . identifier[col_names] [ literal[int] ])]= literal[int]
keyword[return] identifier[inf_pred]
keyword[else] :
identifier[results] ={}
keyword[for] identifier[prediction] , identifier[omitted_prediction] keyword[in] identifier[zip] ( identifier[self] . identifier[predictions_iter] , identifier[self] . identifier[omitted_predictions] ):
identifier[p] =(( identifier[prediction] . identifier[T] * identifier[self] . identifier[G] ( identifier[singular_value] )* identifier[self] . identifier[omitted_jco] )
- identifier[omitted_prediction] . identifier[T] ). identifier[T]
identifier[result] = identifier[float] (( identifier[p] . identifier[T] * identifier[self] . identifier[omitted_parcov] * identifier[p] ). identifier[x] )
identifier[results] [( literal[string] , identifier[prediction] . identifier[col_names] [ literal[int] ])]= identifier[result]
identifier[self] . identifier[log] ( literal[string] + identifier[str] ( identifier[singular_value] ))
keyword[return] identifier[results] | def third_prediction(self, singular_value):
"""get the omitted parameter contribution to prediction error variance
at a singular value. used to construct error variance dataframe
Parameters
----------
singular_value : int
singular value to calc third term at
Returns
-------
dict : dict
dictionary of ("third",prediction_names),error variance
"""
if not self.predictions:
raise Exception('ErrVar.third(): not predictions are set') # depends on [control=['if'], data=[]]
if self.__need_omitted is False:
zero_preds = {}
for pred in self.predictions_iter:
zero_preds['third', pred.col_names[0]] = 0.0 # depends on [control=['for'], data=['pred']]
return zero_preds # depends on [control=['if'], data=[]]
self.log('calc third term prediction @' + str(singular_value))
mn = min(self.jco.shape)
try:
mn = min(self.pst.npar_adj, self.pst.nnz_obs) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
if singular_value > mn:
inf_pred = {}
for pred in self.predictions_iter:
inf_pred['third', pred.col_names[0]] = 1e+35 # depends on [control=['for'], data=['pred']]
return inf_pred # depends on [control=['if'], data=[]]
else:
results = {}
for (prediction, omitted_prediction) in zip(self.predictions_iter, self.omitted_predictions):
# comes out as row vector, but needs to be a column vector
p = (prediction.T * self.G(singular_value) * self.omitted_jco - omitted_prediction.T).T
result = float((p.T * self.omitted_parcov * p).x)
results['third', prediction.col_names[0]] = result # depends on [control=['for'], data=[]]
self.log('calc third term prediction @' + str(singular_value))
return results |
def get_proxy_field(self, instance):
"""Get the proxied field of this field
"""
proxy_object = self.get_proxy_object(instance)
if not proxy_object:
return None
return proxy_object.getField(self.name) | def function[get_proxy_field, parameter[self, instance]]:
constant[Get the proxied field of this field
]
variable[proxy_object] assign[=] call[name[self].get_proxy_object, parameter[name[instance]]]
if <ast.UnaryOp object at 0x7da20e955a80> begin[:]
return[constant[None]]
return[call[name[proxy_object].getField, parameter[name[self].name]]] | keyword[def] identifier[get_proxy_field] ( identifier[self] , identifier[instance] ):
literal[string]
identifier[proxy_object] = identifier[self] . identifier[get_proxy_object] ( identifier[instance] )
keyword[if] keyword[not] identifier[proxy_object] :
keyword[return] keyword[None]
keyword[return] identifier[proxy_object] . identifier[getField] ( identifier[self] . identifier[name] ) | def get_proxy_field(self, instance):
"""Get the proxied field of this field
"""
proxy_object = self.get_proxy_object(instance)
if not proxy_object:
return None # depends on [control=['if'], data=[]]
return proxy_object.getField(self.name) |
def triangulate(vertices):
"""Triangulate a set of vertices
Parameters
----------
vertices : array-like
The vertices.
Returns
-------
vertices : array-like
The vertices.
tringles : array-like
The triangles.
"""
n = len(vertices)
vertices = np.asarray(vertices)
zmean = vertices[:, 2].mean()
vertices_2d = vertices[:, :2]
segments = np.repeat(np.arange(n + 1), 2)[1:-1]
segments[-2:] = n - 1, 0
if _TRIANGLE_AVAILABLE:
vertices_2d, triangles = _triangulate_cpp(vertices_2d, segments)
else:
vertices_2d, triangles = _triangulate_python(vertices_2d, segments)
vertices = np.empty((len(vertices_2d), 3))
vertices[:, :2] = vertices_2d
vertices[:, 2] = zmean
return vertices, triangles | def function[triangulate, parameter[vertices]]:
constant[Triangulate a set of vertices
Parameters
----------
vertices : array-like
The vertices.
Returns
-------
vertices : array-like
The vertices.
tringles : array-like
The triangles.
]
variable[n] assign[=] call[name[len], parameter[name[vertices]]]
variable[vertices] assign[=] call[name[np].asarray, parameter[name[vertices]]]
variable[zmean] assign[=] call[call[name[vertices]][tuple[[<ast.Slice object at 0x7da1b10ec730>, <ast.Constant object at 0x7da1b10ec310>]]].mean, parameter[]]
variable[vertices_2d] assign[=] call[name[vertices]][tuple[[<ast.Slice object at 0x7da1b10ec070>, <ast.Slice object at 0x7da1b10ee170>]]]
variable[segments] assign[=] call[call[name[np].repeat, parameter[call[name[np].arange, parameter[binary_operation[name[n] + constant[1]]]], constant[2]]]][<ast.Slice object at 0x7da1b10ee530>]
call[name[segments]][<ast.Slice object at 0x7da1b10ec490>] assign[=] tuple[[<ast.BinOp object at 0x7da1b10ee0e0>, <ast.Constant object at 0x7da1b10ec2e0>]]
if name[_TRIANGLE_AVAILABLE] begin[:]
<ast.Tuple object at 0x7da1b10ec820> assign[=] call[name[_triangulate_cpp], parameter[name[vertices_2d], name[segments]]]
variable[vertices] assign[=] call[name[np].empty, parameter[tuple[[<ast.Call object at 0x7da1b10eded0>, <ast.Constant object at 0x7da1b10ed9c0>]]]]
call[name[vertices]][tuple[[<ast.Slice object at 0x7da1b10eca30>, <ast.Slice object at 0x7da1b10ed2d0>]]] assign[=] name[vertices_2d]
call[name[vertices]][tuple[[<ast.Slice object at 0x7da1b10ee500>, <ast.Constant object at 0x7da1b0f2a5f0>]]] assign[=] name[zmean]
return[tuple[[<ast.Name object at 0x7da1b0f29db0>, <ast.Name object at 0x7da1b0f2a950>]]] | keyword[def] identifier[triangulate] ( identifier[vertices] ):
literal[string]
identifier[n] = identifier[len] ( identifier[vertices] )
identifier[vertices] = identifier[np] . identifier[asarray] ( identifier[vertices] )
identifier[zmean] = identifier[vertices] [:, literal[int] ]. identifier[mean] ()
identifier[vertices_2d] = identifier[vertices] [:,: literal[int] ]
identifier[segments] = identifier[np] . identifier[repeat] ( identifier[np] . identifier[arange] ( identifier[n] + literal[int] ), literal[int] )[ literal[int] :- literal[int] ]
identifier[segments] [- literal[int] :]= identifier[n] - literal[int] , literal[int]
keyword[if] identifier[_TRIANGLE_AVAILABLE] :
identifier[vertices_2d] , identifier[triangles] = identifier[_triangulate_cpp] ( identifier[vertices_2d] , identifier[segments] )
keyword[else] :
identifier[vertices_2d] , identifier[triangles] = identifier[_triangulate_python] ( identifier[vertices_2d] , identifier[segments] )
identifier[vertices] = identifier[np] . identifier[empty] (( identifier[len] ( identifier[vertices_2d] ), literal[int] ))
identifier[vertices] [:,: literal[int] ]= identifier[vertices_2d]
identifier[vertices] [:, literal[int] ]= identifier[zmean]
keyword[return] identifier[vertices] , identifier[triangles] | def triangulate(vertices):
"""Triangulate a set of vertices
Parameters
----------
vertices : array-like
The vertices.
Returns
-------
vertices : array-like
The vertices.
tringles : array-like
The triangles.
"""
n = len(vertices)
vertices = np.asarray(vertices)
zmean = vertices[:, 2].mean()
vertices_2d = vertices[:, :2]
segments = np.repeat(np.arange(n + 1), 2)[1:-1]
segments[-2:] = (n - 1, 0)
if _TRIANGLE_AVAILABLE:
(vertices_2d, triangles) = _triangulate_cpp(vertices_2d, segments) # depends on [control=['if'], data=[]]
else:
(vertices_2d, triangles) = _triangulate_python(vertices_2d, segments)
vertices = np.empty((len(vertices_2d), 3))
vertices[:, :2] = vertices_2d
vertices[:, 2] = zmean
return (vertices, triangles) |
def add_new_resource(self):
"""Handle add new resource requests.
"""
parameters_widget = [
self.parameters_scrollarea.layout().itemAt(i) for i in
range(self.parameters_scrollarea.layout().count())][0].widget()
parameter_widgets = [
parameters_widget.vertical_layout.itemAt(i).widget() for i in
range(parameters_widget.vertical_layout.count())]
parameter_widgets[0].set_text('')
parameter_widgets[1].set_text('')
parameter_widgets[2].set_text('')
parameter_widgets[3].set_text('')
parameter_widgets[4].set_text('')
parameter_widgets[5].set_value(10)
parameter_widgets[6].set_value(0)
parameter_widgets[7].set_value(100)
parameter_widgets[8].set_text(tr('weekly'))
parameter_widgets[9].set_text(tr(
'A displaced person should be provided with '
'%(default value)s %(unit)s/%(units)s/%(unit abbreviation)s of '
'%(resource name)s. Though no less than %(minimum allowed)s '
'and no more than %(maximum allowed)s. This should be provided '
'%(frequency)s.' % {
'default value': '{{ Default }}',
'unit': '{{ Unit }}',
'units': '{{ Units }}',
'unit abbreviation': '{{ Unit abbreviation }}',
'resource name': '{{ Resource name }}',
'minimum allowed': '{{ Minimum allowed }}',
'maximum allowed': '{{ Maximum allowed }}',
'frequency': '{{ Frequency }}'
}))
self.stacked_widget.setCurrentWidget(self.resource_edit_page)
# hide the close button
self.button_box.button(QDialogButtonBox.Close).setHidden(True) | def function[add_new_resource, parameter[self]]:
constant[Handle add new resource requests.
]
variable[parameters_widget] assign[=] call[call[<ast.ListComp object at 0x7da20c6ab070>][constant[0]].widget, parameter[]]
variable[parameter_widgets] assign[=] <ast.ListComp object at 0x7da18ede6140>
call[call[name[parameter_widgets]][constant[0]].set_text, parameter[constant[]]]
call[call[name[parameter_widgets]][constant[1]].set_text, parameter[constant[]]]
call[call[name[parameter_widgets]][constant[2]].set_text, parameter[constant[]]]
call[call[name[parameter_widgets]][constant[3]].set_text, parameter[constant[]]]
call[call[name[parameter_widgets]][constant[4]].set_text, parameter[constant[]]]
call[call[name[parameter_widgets]][constant[5]].set_value, parameter[constant[10]]]
call[call[name[parameter_widgets]][constant[6]].set_value, parameter[constant[0]]]
call[call[name[parameter_widgets]][constant[7]].set_value, parameter[constant[100]]]
call[call[name[parameter_widgets]][constant[8]].set_text, parameter[call[name[tr], parameter[constant[weekly]]]]]
call[call[name[parameter_widgets]][constant[9]].set_text, parameter[call[name[tr], parameter[binary_operation[constant[A displaced person should be provided with %(default value)s %(unit)s/%(units)s/%(unit abbreviation)s of %(resource name)s. Though no less than %(minimum allowed)s and no more than %(maximum allowed)s. This should be provided %(frequency)s.] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da20c6a8bb0>, <ast.Constant object at 0x7da20c6ab760>, <ast.Constant object at 0x7da20c6ab580>, <ast.Constant object at 0x7da20c6a9c60>, <ast.Constant object at 0x7da20c6a9000>, <ast.Constant object at 0x7da20c6a9e70>, <ast.Constant object at 0x7da20c6abcd0>, <ast.Constant object at 0x7da20c6a9060>], [<ast.Constant object at 0x7da20c6a8eb0>, <ast.Constant object at 0x7da20c6a8100>, <ast.Constant object at 0x7da20c6a9db0>, <ast.Constant object at 0x7da20c6a8fd0>, <ast.Constant object at 0x7da20c6a94e0>, <ast.Constant object at 0x7da20c6a97e0>, <ast.Constant object at 0x7da20c6a92a0>, <ast.Constant object at 0x7da20c6a93c0>]]]]]]]
call[name[self].stacked_widget.setCurrentWidget, parameter[name[self].resource_edit_page]]
call[call[name[self].button_box.button, parameter[name[QDialogButtonBox].Close]].setHidden, parameter[constant[True]]] | keyword[def] identifier[add_new_resource] ( identifier[self] ):
literal[string]
identifier[parameters_widget] =[
identifier[self] . identifier[parameters_scrollarea] . identifier[layout] (). identifier[itemAt] ( identifier[i] ) keyword[for] identifier[i] keyword[in]
identifier[range] ( identifier[self] . identifier[parameters_scrollarea] . identifier[layout] (). identifier[count] ())][ literal[int] ]. identifier[widget] ()
identifier[parameter_widgets] =[
identifier[parameters_widget] . identifier[vertical_layout] . identifier[itemAt] ( identifier[i] ). identifier[widget] () keyword[for] identifier[i] keyword[in]
identifier[range] ( identifier[parameters_widget] . identifier[vertical_layout] . identifier[count] ())]
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( literal[string] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( literal[string] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( literal[string] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( literal[string] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( literal[string] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_value] ( literal[int] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_value] ( literal[int] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_value] ( literal[int] )
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( identifier[tr] ( literal[string] ))
identifier[parameter_widgets] [ literal[int] ]. identifier[set_text] ( identifier[tr] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] %{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}))
identifier[self] . identifier[stacked_widget] . identifier[setCurrentWidget] ( identifier[self] . identifier[resource_edit_page] )
identifier[self] . identifier[button_box] . identifier[button] ( identifier[QDialogButtonBox] . identifier[Close] ). identifier[setHidden] ( keyword[True] ) | def add_new_resource(self):
"""Handle add new resource requests.
"""
parameters_widget = [self.parameters_scrollarea.layout().itemAt(i) for i in range(self.parameters_scrollarea.layout().count())][0].widget()
parameter_widgets = [parameters_widget.vertical_layout.itemAt(i).widget() for i in range(parameters_widget.vertical_layout.count())]
parameter_widgets[0].set_text('')
parameter_widgets[1].set_text('')
parameter_widgets[2].set_text('')
parameter_widgets[3].set_text('')
parameter_widgets[4].set_text('')
parameter_widgets[5].set_value(10)
parameter_widgets[6].set_value(0)
parameter_widgets[7].set_value(100)
parameter_widgets[8].set_text(tr('weekly'))
parameter_widgets[9].set_text(tr('A displaced person should be provided with %(default value)s %(unit)s/%(units)s/%(unit abbreviation)s of %(resource name)s. Though no less than %(minimum allowed)s and no more than %(maximum allowed)s. This should be provided %(frequency)s.' % {'default value': '{{ Default }}', 'unit': '{{ Unit }}', 'units': '{{ Units }}', 'unit abbreviation': '{{ Unit abbreviation }}', 'resource name': '{{ Resource name }}', 'minimum allowed': '{{ Minimum allowed }}', 'maximum allowed': '{{ Maximum allowed }}', 'frequency': '{{ Frequency }}'}))
self.stacked_widget.setCurrentWidget(self.resource_edit_page)
# hide the close button
self.button_box.button(QDialogButtonBox.Close).setHidden(True) |
def iter_chunk_index(num_samples, chunksize):
"""Iterator used to iterate in chunks over an array of size `num_samples`.
At each iteration returns a start and stop index for a slice of size
`chunksize`. In the last iteration the slice may be smaller.
"""
i = 0
for c_size in iter_chunksize(num_samples, chunksize):
yield i, i + c_size
i += c_size | def function[iter_chunk_index, parameter[num_samples, chunksize]]:
constant[Iterator used to iterate in chunks over an array of size `num_samples`.
At each iteration returns a start and stop index for a slice of size
`chunksize`. In the last iteration the slice may be smaller.
]
variable[i] assign[=] constant[0]
for taget[name[c_size]] in starred[call[name[iter_chunksize], parameter[name[num_samples], name[chunksize]]]] begin[:]
<ast.Yield object at 0x7da18f58fca0>
<ast.AugAssign object at 0x7da18f58e4a0> | keyword[def] identifier[iter_chunk_index] ( identifier[num_samples] , identifier[chunksize] ):
literal[string]
identifier[i] = literal[int]
keyword[for] identifier[c_size] keyword[in] identifier[iter_chunksize] ( identifier[num_samples] , identifier[chunksize] ):
keyword[yield] identifier[i] , identifier[i] + identifier[c_size]
identifier[i] += identifier[c_size] | def iter_chunk_index(num_samples, chunksize):
"""Iterator used to iterate in chunks over an array of size `num_samples`.
At each iteration returns a start and stop index for a slice of size
`chunksize`. In the last iteration the slice may be smaller.
"""
i = 0
for c_size in iter_chunksize(num_samples, chunksize):
yield (i, i + c_size)
i += c_size # depends on [control=['for'], data=['c_size']] |
def nacm_denied_data_writes(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
nacm = ET.SubElement(config, "nacm", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-acm")
denied_data_writes = ET.SubElement(nacm, "denied-data-writes")
denied_data_writes.text = kwargs.pop('denied_data_writes')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[nacm_denied_data_writes, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[nacm] assign[=] call[name[ET].SubElement, parameter[name[config], constant[nacm]]]
variable[denied_data_writes] assign[=] call[name[ET].SubElement, parameter[name[nacm], constant[denied-data-writes]]]
name[denied_data_writes].text assign[=] call[name[kwargs].pop, parameter[constant[denied_data_writes]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[nacm_denied_data_writes] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[nacm] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[denied_data_writes] = identifier[ET] . identifier[SubElement] ( identifier[nacm] , literal[string] )
identifier[denied_data_writes] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def nacm_denied_data_writes(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
nacm = ET.SubElement(config, 'nacm', xmlns='urn:ietf:params:xml:ns:yang:ietf-netconf-acm')
denied_data_writes = ET.SubElement(nacm, 'denied-data-writes')
denied_data_writes.text = kwargs.pop('denied_data_writes')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def get_sections_2dnt(self, sec2d_go):
"""Return a sections list containing sorted lists of namedtuples."""
return [(nm, self.get_ntgos_sorted(gos)) for nm, gos in sec2d_go] | def function[get_sections_2dnt, parameter[self, sec2d_go]]:
constant[Return a sections list containing sorted lists of namedtuples.]
return[<ast.ListComp object at 0x7da20c6aaad0>] | keyword[def] identifier[get_sections_2dnt] ( identifier[self] , identifier[sec2d_go] ):
literal[string]
keyword[return] [( identifier[nm] , identifier[self] . identifier[get_ntgos_sorted] ( identifier[gos] )) keyword[for] identifier[nm] , identifier[gos] keyword[in] identifier[sec2d_go] ] | def get_sections_2dnt(self, sec2d_go):
"""Return a sections list containing sorted lists of namedtuples."""
return [(nm, self.get_ntgos_sorted(gos)) for (nm, gos) in sec2d_go] |
def embedding_lookup(self, x, means):
"""Compute nearest neighbors and loss for training the embeddings.
Args:
x: Batch of encoder continuous latent states sliced/projected into
shape
[-1, num_blocks, block_dim].
means: Embedding means.
Returns:
The nearest neighbor in one hot form, the nearest neighbor
itself, the
commitment loss, embedding training loss.
"""
x_means_hot = self.nearest_neighbor(x, means)
x_means_hot_flat = tf.reshape(
x_means_hot, [-1, self.hparams.num_blocks, self.hparams.block_v_size])
x_means = tf.matmul(tf.transpose(x_means_hot_flat, perm=[1, 0, 2]), means)
x_means = tf.transpose(x_means, [1, 0, 2])
q_loss = tf.reduce_mean(
tf.squared_difference(tf.stop_gradient(x), x_means))
e_loss = tf.reduce_mean(
tf.squared_difference(x, tf.stop_gradient(x_means)))
return x_means_hot, x_means, q_loss, e_loss | def function[embedding_lookup, parameter[self, x, means]]:
constant[Compute nearest neighbors and loss for training the embeddings.
Args:
x: Batch of encoder continuous latent states sliced/projected into
shape
[-1, num_blocks, block_dim].
means: Embedding means.
Returns:
The nearest neighbor in one hot form, the nearest neighbor
itself, the
commitment loss, embedding training loss.
]
variable[x_means_hot] assign[=] call[name[self].nearest_neighbor, parameter[name[x], name[means]]]
variable[x_means_hot_flat] assign[=] call[name[tf].reshape, parameter[name[x_means_hot], list[[<ast.UnaryOp object at 0x7da20c7cbc40>, <ast.Attribute object at 0x7da20c7cb190>, <ast.Attribute object at 0x7da20c7c88e0>]]]]
variable[x_means] assign[=] call[name[tf].matmul, parameter[call[name[tf].transpose, parameter[name[x_means_hot_flat]]], name[means]]]
variable[x_means] assign[=] call[name[tf].transpose, parameter[name[x_means], list[[<ast.Constant object at 0x7da20c7c9390>, <ast.Constant object at 0x7da20c7ca230>, <ast.Constant object at 0x7da20c7c8880>]]]]
variable[q_loss] assign[=] call[name[tf].reduce_mean, parameter[call[name[tf].squared_difference, parameter[call[name[tf].stop_gradient, parameter[name[x]]], name[x_means]]]]]
variable[e_loss] assign[=] call[name[tf].reduce_mean, parameter[call[name[tf].squared_difference, parameter[name[x], call[name[tf].stop_gradient, parameter[name[x_means]]]]]]]
return[tuple[[<ast.Name object at 0x7da20c7ca1d0>, <ast.Name object at 0x7da20c7cbdf0>, <ast.Name object at 0x7da20c7c80d0>, <ast.Name object at 0x7da20c7c94e0>]]] | keyword[def] identifier[embedding_lookup] ( identifier[self] , identifier[x] , identifier[means] ):
literal[string]
identifier[x_means_hot] = identifier[self] . identifier[nearest_neighbor] ( identifier[x] , identifier[means] )
identifier[x_means_hot_flat] = identifier[tf] . identifier[reshape] (
identifier[x_means_hot] ,[- literal[int] , identifier[self] . identifier[hparams] . identifier[num_blocks] , identifier[self] . identifier[hparams] . identifier[block_v_size] ])
identifier[x_means] = identifier[tf] . identifier[matmul] ( identifier[tf] . identifier[transpose] ( identifier[x_means_hot_flat] , identifier[perm] =[ literal[int] , literal[int] , literal[int] ]), identifier[means] )
identifier[x_means] = identifier[tf] . identifier[transpose] ( identifier[x_means] ,[ literal[int] , literal[int] , literal[int] ])
identifier[q_loss] = identifier[tf] . identifier[reduce_mean] (
identifier[tf] . identifier[squared_difference] ( identifier[tf] . identifier[stop_gradient] ( identifier[x] ), identifier[x_means] ))
identifier[e_loss] = identifier[tf] . identifier[reduce_mean] (
identifier[tf] . identifier[squared_difference] ( identifier[x] , identifier[tf] . identifier[stop_gradient] ( identifier[x_means] )))
keyword[return] identifier[x_means_hot] , identifier[x_means] , identifier[q_loss] , identifier[e_loss] | def embedding_lookup(self, x, means):
"""Compute nearest neighbors and loss for training the embeddings.
Args:
x: Batch of encoder continuous latent states sliced/projected into
shape
[-1, num_blocks, block_dim].
means: Embedding means.
Returns:
The nearest neighbor in one hot form, the nearest neighbor
itself, the
commitment loss, embedding training loss.
"""
x_means_hot = self.nearest_neighbor(x, means)
x_means_hot_flat = tf.reshape(x_means_hot, [-1, self.hparams.num_blocks, self.hparams.block_v_size])
x_means = tf.matmul(tf.transpose(x_means_hot_flat, perm=[1, 0, 2]), means)
x_means = tf.transpose(x_means, [1, 0, 2])
q_loss = tf.reduce_mean(tf.squared_difference(tf.stop_gradient(x), x_means))
e_loss = tf.reduce_mean(tf.squared_difference(x, tf.stop_gradient(x_means)))
return (x_means_hot, x_means, q_loss, e_loss) |
def GetPixelColor(self, x: int, y: int) -> int:
"""
Call native `GetPixelColor` if control has a valid native handle.
Use `self.ToBitmap` if control doesn't have a valid native handle or you get many pixels.
x: int, internal x position.
y: int, internal y position.
Return int, a color value in bgr.
r = bgr & 0x0000FF
g = (bgr & 0x00FF00) >> 8
b = (bgr & 0xFF0000) >> 16
"""
handle = self.NativeWindowHandle
if handle:
return GetPixelColor(x, y, handle) | def function[GetPixelColor, parameter[self, x, y]]:
constant[
Call native `GetPixelColor` if control has a valid native handle.
Use `self.ToBitmap` if control doesn't have a valid native handle or you get many pixels.
x: int, internal x position.
y: int, internal y position.
Return int, a color value in bgr.
r = bgr & 0x0000FF
g = (bgr & 0x00FF00) >> 8
b = (bgr & 0xFF0000) >> 16
]
variable[handle] assign[=] name[self].NativeWindowHandle
if name[handle] begin[:]
return[call[name[GetPixelColor], parameter[name[x], name[y], name[handle]]]] | keyword[def] identifier[GetPixelColor] ( identifier[self] , identifier[x] : identifier[int] , identifier[y] : identifier[int] )-> identifier[int] :
literal[string]
identifier[handle] = identifier[self] . identifier[NativeWindowHandle]
keyword[if] identifier[handle] :
keyword[return] identifier[GetPixelColor] ( identifier[x] , identifier[y] , identifier[handle] ) | def GetPixelColor(self, x: int, y: int) -> int:
"""
Call native `GetPixelColor` if control has a valid native handle.
Use `self.ToBitmap` if control doesn't have a valid native handle or you get many pixels.
x: int, internal x position.
y: int, internal y position.
Return int, a color value in bgr.
r = bgr & 0x0000FF
g = (bgr & 0x00FF00) >> 8
b = (bgr & 0xFF0000) >> 16
"""
handle = self.NativeWindowHandle
if handle:
return GetPixelColor(x, y, handle) # depends on [control=['if'], data=[]] |
def ParseOptions(cls, options, configuration_object):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
"""
if not isinstance(configuration_object, tools.CLITool):
raise errors.BadConfigObject(
'Configuration object is not an instance of CLITool')
preferred_year = cls._ParseNumericOption(options, 'preferred_year')
process_archives = getattr(options, 'process_archives', False)
process_compressed_streams = getattr(
options, 'process_compressed_streams', True)
setattr(configuration_object, '_preferred_year', preferred_year)
setattr(configuration_object, '_process_archives', process_archives)
setattr(
configuration_object, '_process_compressed_streams',
process_compressed_streams) | def function[ParseOptions, parameter[cls, options, configuration_object]]:
constant[Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
]
if <ast.UnaryOp object at 0x7da20c7950f0> begin[:]
<ast.Raise object at 0x7da20c7950c0>
variable[preferred_year] assign[=] call[name[cls]._ParseNumericOption, parameter[name[options], constant[preferred_year]]]
variable[process_archives] assign[=] call[name[getattr], parameter[name[options], constant[process_archives], constant[False]]]
variable[process_compressed_streams] assign[=] call[name[getattr], parameter[name[options], constant[process_compressed_streams], constant[True]]]
call[name[setattr], parameter[name[configuration_object], constant[_preferred_year], name[preferred_year]]]
call[name[setattr], parameter[name[configuration_object], constant[_process_archives], name[process_archives]]]
call[name[setattr], parameter[name[configuration_object], constant[_process_compressed_streams], name[process_compressed_streams]]] | keyword[def] identifier[ParseOptions] ( identifier[cls] , identifier[options] , identifier[configuration_object] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[configuration_object] , identifier[tools] . identifier[CLITool] ):
keyword[raise] identifier[errors] . identifier[BadConfigObject] (
literal[string] )
identifier[preferred_year] = identifier[cls] . identifier[_ParseNumericOption] ( identifier[options] , literal[string] )
identifier[process_archives] = identifier[getattr] ( identifier[options] , literal[string] , keyword[False] )
identifier[process_compressed_streams] = identifier[getattr] (
identifier[options] , literal[string] , keyword[True] )
identifier[setattr] ( identifier[configuration_object] , literal[string] , identifier[preferred_year] )
identifier[setattr] ( identifier[configuration_object] , literal[string] , identifier[process_archives] )
identifier[setattr] (
identifier[configuration_object] , literal[string] ,
identifier[process_compressed_streams] ) | def ParseOptions(cls, options, configuration_object):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
"""
if not isinstance(configuration_object, tools.CLITool):
raise errors.BadConfigObject('Configuration object is not an instance of CLITool') # depends on [control=['if'], data=[]]
preferred_year = cls._ParseNumericOption(options, 'preferred_year')
process_archives = getattr(options, 'process_archives', False)
process_compressed_streams = getattr(options, 'process_compressed_streams', True)
setattr(configuration_object, '_preferred_year', preferred_year)
setattr(configuration_object, '_process_archives', process_archives)
setattr(configuration_object, '_process_compressed_streams', process_compressed_streams) |
def tablestructure(tablename, dataman=True, column=True, subtable=False,
sort=False):
"""Print the structure of a table.
It is the same as :func:`table.showstructure`, but without the need to open
the table first.
"""
t = table(tablename, ack=False)
six.print_(t.showstructure(dataman, column, subtable, sort)) | def function[tablestructure, parameter[tablename, dataman, column, subtable, sort]]:
constant[Print the structure of a table.
It is the same as :func:`table.showstructure`, but without the need to open
the table first.
]
variable[t] assign[=] call[name[table], parameter[name[tablename]]]
call[name[six].print_, parameter[call[name[t].showstructure, parameter[name[dataman], name[column], name[subtable], name[sort]]]]] | keyword[def] identifier[tablestructure] ( identifier[tablename] , identifier[dataman] = keyword[True] , identifier[column] = keyword[True] , identifier[subtable] = keyword[False] ,
identifier[sort] = keyword[False] ):
literal[string]
identifier[t] = identifier[table] ( identifier[tablename] , identifier[ack] = keyword[False] )
identifier[six] . identifier[print_] ( identifier[t] . identifier[showstructure] ( identifier[dataman] , identifier[column] , identifier[subtable] , identifier[sort] )) | def tablestructure(tablename, dataman=True, column=True, subtable=False, sort=False):
"""Print the structure of a table.
It is the same as :func:`table.showstructure`, but without the need to open
the table first.
"""
t = table(tablename, ack=False)
six.print_(t.showstructure(dataman, column, subtable, sort)) |
def update(self, title, unsubscribe_page, confirmed_opt_in,
confirmation_success_page, unsubscribe_setting="AllClientLists",
add_unsubscribes_to_supp_list=False, scrub_active_with_supp_list=False):
"""Updates this list."""
body = {
"Title": title,
"UnsubscribePage": unsubscribe_page,
"ConfirmedOptIn": confirmed_opt_in,
"ConfirmationSuccessPage": confirmation_success_page,
"UnsubscribeSetting": unsubscribe_setting,
"AddUnsubscribesToSuppList": add_unsubscribes_to_supp_list,
"ScrubActiveWithSuppList": scrub_active_with_supp_list}
response = self._put("/lists/%s.json" % self.list_id, json.dumps(body)) | def function[update, parameter[self, title, unsubscribe_page, confirmed_opt_in, confirmation_success_page, unsubscribe_setting, add_unsubscribes_to_supp_list, scrub_active_with_supp_list]]:
constant[Updates this list.]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da18f720820>, <ast.Constant object at 0x7da18f723bb0>, <ast.Constant object at 0x7da18f7223b0>, <ast.Constant object at 0x7da18f721060>, <ast.Constant object at 0x7da18f7220e0>, <ast.Constant object at 0x7da18f722f20>, <ast.Constant object at 0x7da18f720e20>], [<ast.Name object at 0x7da18f720a30>, <ast.Name object at 0x7da18f722530>, <ast.Name object at 0x7da18f722470>, <ast.Name object at 0x7da18f721900>, <ast.Name object at 0x7da18f722800>, <ast.Name object at 0x7da18f7222c0>, <ast.Name object at 0x7da18f7211b0>]]
variable[response] assign[=] call[name[self]._put, parameter[binary_operation[constant[/lists/%s.json] <ast.Mod object at 0x7da2590d6920> name[self].list_id], call[name[json].dumps, parameter[name[body]]]]] | keyword[def] identifier[update] ( identifier[self] , identifier[title] , identifier[unsubscribe_page] , identifier[confirmed_opt_in] ,
identifier[confirmation_success_page] , identifier[unsubscribe_setting] = literal[string] ,
identifier[add_unsubscribes_to_supp_list] = keyword[False] , identifier[scrub_active_with_supp_list] = keyword[False] ):
literal[string]
identifier[body] ={
literal[string] : identifier[title] ,
literal[string] : identifier[unsubscribe_page] ,
literal[string] : identifier[confirmed_opt_in] ,
literal[string] : identifier[confirmation_success_page] ,
literal[string] : identifier[unsubscribe_setting] ,
literal[string] : identifier[add_unsubscribes_to_supp_list] ,
literal[string] : identifier[scrub_active_with_supp_list] }
identifier[response] = identifier[self] . identifier[_put] ( literal[string] % identifier[self] . identifier[list_id] , identifier[json] . identifier[dumps] ( identifier[body] )) | def update(self, title, unsubscribe_page, confirmed_opt_in, confirmation_success_page, unsubscribe_setting='AllClientLists', add_unsubscribes_to_supp_list=False, scrub_active_with_supp_list=False):
"""Updates this list."""
body = {'Title': title, 'UnsubscribePage': unsubscribe_page, 'ConfirmedOptIn': confirmed_opt_in, 'ConfirmationSuccessPage': confirmation_success_page, 'UnsubscribeSetting': unsubscribe_setting, 'AddUnsubscribesToSuppList': add_unsubscribes_to_supp_list, 'ScrubActiveWithSuppList': scrub_active_with_supp_list}
response = self._put('/lists/%s.json' % self.list_id, json.dumps(body)) |
def _setup_helper():
"""Print the shell integration code."""
base = os.path.abspath(os.path.dirname(__file__))
helper = os.path.join(base, "helper.sh")
with open(helper) as fh:
click.echo(fh.read()) | def function[_setup_helper, parameter[]]:
constant[Print the shell integration code.]
variable[base] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.dirname, parameter[name[__file__]]]]]
variable[helper] assign[=] call[name[os].path.join, parameter[name[base], constant[helper.sh]]]
with call[name[open], parameter[name[helper]]] begin[:]
call[name[click].echo, parameter[call[name[fh].read, parameter[]]]] | keyword[def] identifier[_setup_helper] ():
literal[string]
identifier[base] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ))
identifier[helper] = identifier[os] . identifier[path] . identifier[join] ( identifier[base] , literal[string] )
keyword[with] identifier[open] ( identifier[helper] ) keyword[as] identifier[fh] :
identifier[click] . identifier[echo] ( identifier[fh] . identifier[read] ()) | def _setup_helper():
"""Print the shell integration code."""
base = os.path.abspath(os.path.dirname(__file__))
helper = os.path.join(base, 'helper.sh')
with open(helper) as fh:
click.echo(fh.read()) # depends on [control=['with'], data=['fh']] |
def add_inline_interface(self, interface_id, second_interface_id,
logical_interface_ref=None, vlan_id=None, second_vlan_id=None, zone_ref=None,
second_zone_ref=None, failure_mode='normal', comment=None, **kw):
"""
Add an inline interface pair. This method is only for IPS or L2FW engine
types.
:param str interface_id: interface id of first interface
:param str second_interface_id: second interface pair id
:param str, href logical_interface_ref: logical interface by href or name
:param str vlan_id: vlan ID for first interface in pair
:param str second_vlan_id: vlan ID for second interface in pair
:param str, href zone_ref: zone reference by name or href for first interface
:param str, href second_zone_ref: zone reference by nae or href for second interface
:param str failure_mode: normal or bypass
:param str comment: optional comment
:raises EngineCommandFailed: failure creating interface
:return: None
"""
interface_spec = {'interface_id': interface_id, 'second_interface_id': second_interface_id,
'interface': kw.get('interface') if self._engine.type in ('single_fw', 'fw_cluster')
else 'inline_interface'}
_interface = {'logical_interface_ref': logical_interface_ref,
'failure_mode': failure_mode, 'zone_ref': zone_ref, 'second_zone_ref': second_zone_ref,
'comment': comment}
vlan = {'vlan_id': vlan_id, 'second_vlan_id': second_vlan_id}
try:
inline_id = '{}-{}'.format(interface_id, second_interface_id)
interface = self._engine.interface.get(inline_id)
_interface.update(vlan)
interface_spec.update(interfaces=[_interface])
interface._add_interface(**interface_spec)
return interface.update()
except InterfaceNotFound:
_interface.update(interfaces=[vlan])
interface_spec.update(_interface)
interface = Layer2PhysicalInterface(**interface_spec)
return self._engine.add_interface(interface) | def function[add_inline_interface, parameter[self, interface_id, second_interface_id, logical_interface_ref, vlan_id, second_vlan_id, zone_ref, second_zone_ref, failure_mode, comment]]:
constant[
Add an inline interface pair. This method is only for IPS or L2FW engine
types.
:param str interface_id: interface id of first interface
:param str second_interface_id: second interface pair id
:param str, href logical_interface_ref: logical interface by href or name
:param str vlan_id: vlan ID for first interface in pair
:param str second_vlan_id: vlan ID for second interface in pair
:param str, href zone_ref: zone reference by name or href for first interface
:param str, href second_zone_ref: zone reference by nae or href for second interface
:param str failure_mode: normal or bypass
:param str comment: optional comment
:raises EngineCommandFailed: failure creating interface
:return: None
]
variable[interface_spec] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a2d210>, <ast.Constant object at 0x7da1b1a2eec0>, <ast.Constant object at 0x7da1b1a2f100>], [<ast.Name object at 0x7da1b1a2cf70>, <ast.Name object at 0x7da1b1a2fa00>, <ast.IfExp object at 0x7da1b1a2db10>]]
variable[_interface] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a2d6f0>, <ast.Constant object at 0x7da1b1a2d570>, <ast.Constant object at 0x7da1b1a94fa0>, <ast.Constant object at 0x7da1b1a95060>, <ast.Constant object at 0x7da1b1a96dd0>], [<ast.Name object at 0x7da1b1a96f80>, <ast.Name object at 0x7da1b1a944c0>, <ast.Name object at 0x7da1b1a944f0>, <ast.Name object at 0x7da1b1a967a0>, <ast.Name object at 0x7da1b1a96740>]]
variable[vlan] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a96980>, <ast.Constant object at 0x7da1b1a95390>], [<ast.Name object at 0x7da1b1a96f50>, <ast.Name object at 0x7da1b1a96830>]]
<ast.Try object at 0x7da1b1a96860> | keyword[def] identifier[add_inline_interface] ( identifier[self] , identifier[interface_id] , identifier[second_interface_id] ,
identifier[logical_interface_ref] = keyword[None] , identifier[vlan_id] = keyword[None] , identifier[second_vlan_id] = keyword[None] , identifier[zone_ref] = keyword[None] ,
identifier[second_zone_ref] = keyword[None] , identifier[failure_mode] = literal[string] , identifier[comment] = keyword[None] ,** identifier[kw] ):
literal[string]
identifier[interface_spec] ={ literal[string] : identifier[interface_id] , literal[string] : identifier[second_interface_id] ,
literal[string] : identifier[kw] . identifier[get] ( literal[string] ) keyword[if] identifier[self] . identifier[_engine] . identifier[type] keyword[in] ( literal[string] , literal[string] )
keyword[else] literal[string] }
identifier[_interface] ={ literal[string] : identifier[logical_interface_ref] ,
literal[string] : identifier[failure_mode] , literal[string] : identifier[zone_ref] , literal[string] : identifier[second_zone_ref] ,
literal[string] : identifier[comment] }
identifier[vlan] ={ literal[string] : identifier[vlan_id] , literal[string] : identifier[second_vlan_id] }
keyword[try] :
identifier[inline_id] = literal[string] . identifier[format] ( identifier[interface_id] , identifier[second_interface_id] )
identifier[interface] = identifier[self] . identifier[_engine] . identifier[interface] . identifier[get] ( identifier[inline_id] )
identifier[_interface] . identifier[update] ( identifier[vlan] )
identifier[interface_spec] . identifier[update] ( identifier[interfaces] =[ identifier[_interface] ])
identifier[interface] . identifier[_add_interface] (** identifier[interface_spec] )
keyword[return] identifier[interface] . identifier[update] ()
keyword[except] identifier[InterfaceNotFound] :
identifier[_interface] . identifier[update] ( identifier[interfaces] =[ identifier[vlan] ])
identifier[interface_spec] . identifier[update] ( identifier[_interface] )
identifier[interface] = identifier[Layer2PhysicalInterface] (** identifier[interface_spec] )
keyword[return] identifier[self] . identifier[_engine] . identifier[add_interface] ( identifier[interface] ) | def add_inline_interface(self, interface_id, second_interface_id, logical_interface_ref=None, vlan_id=None, second_vlan_id=None, zone_ref=None, second_zone_ref=None, failure_mode='normal', comment=None, **kw):
"""
Add an inline interface pair. This method is only for IPS or L2FW engine
types.
:param str interface_id: interface id of first interface
:param str second_interface_id: second interface pair id
:param str, href logical_interface_ref: logical interface by href or name
:param str vlan_id: vlan ID for first interface in pair
:param str second_vlan_id: vlan ID for second interface in pair
:param str, href zone_ref: zone reference by name or href for first interface
:param str, href second_zone_ref: zone reference by nae or href for second interface
:param str failure_mode: normal or bypass
:param str comment: optional comment
:raises EngineCommandFailed: failure creating interface
:return: None
"""
interface_spec = {'interface_id': interface_id, 'second_interface_id': second_interface_id, 'interface': kw.get('interface') if self._engine.type in ('single_fw', 'fw_cluster') else 'inline_interface'}
_interface = {'logical_interface_ref': logical_interface_ref, 'failure_mode': failure_mode, 'zone_ref': zone_ref, 'second_zone_ref': second_zone_ref, 'comment': comment}
vlan = {'vlan_id': vlan_id, 'second_vlan_id': second_vlan_id}
try:
inline_id = '{}-{}'.format(interface_id, second_interface_id)
interface = self._engine.interface.get(inline_id)
_interface.update(vlan)
interface_spec.update(interfaces=[_interface])
interface._add_interface(**interface_spec)
return interface.update() # depends on [control=['try'], data=[]]
except InterfaceNotFound:
_interface.update(interfaces=[vlan])
interface_spec.update(_interface)
interface = Layer2PhysicalInterface(**interface_spec)
return self._engine.add_interface(interface) # depends on [control=['except'], data=[]] |
def newline(self):
"""
Advances the cursor position ot the left hand side, and to the next
line. If the cursor is on the lowest line, the displayed contents are
scrolled, causing the top line to be lost.
"""
self.carriage_return()
if self._cy + (2 * self._ch) >= self._device.height:
# Simulate a vertical scroll
copy = self._backing_image.crop((0, self._ch, self._device.width,
self._device.height))
self._backing_image.paste(copy, (0, 0))
self._canvas.rectangle((0, copy.height, self._device.width,
self._device.height), fill=self.default_bgcolor)
else:
self._cy += self._ch
self.flush()
if self.animate:
time.sleep(0.2) | def function[newline, parameter[self]]:
constant[
Advances the cursor position ot the left hand side, and to the next
line. If the cursor is on the lowest line, the displayed contents are
scrolled, causing the top line to be lost.
]
call[name[self].carriage_return, parameter[]]
if compare[binary_operation[name[self]._cy + binary_operation[constant[2] * name[self]._ch]] greater_or_equal[>=] name[self]._device.height] begin[:]
variable[copy] assign[=] call[name[self]._backing_image.crop, parameter[tuple[[<ast.Constant object at 0x7da1b07fa560>, <ast.Attribute object at 0x7da1b07fb070>, <ast.Attribute object at 0x7da1b07f8280>, <ast.Attribute object at 0x7da1b07fa680>]]]]
call[name[self]._backing_image.paste, parameter[name[copy], tuple[[<ast.Constant object at 0x7da1b07f9e70>, <ast.Constant object at 0x7da1b07fa650>]]]]
call[name[self]._canvas.rectangle, parameter[tuple[[<ast.Constant object at 0x7da1b07f9f30>, <ast.Attribute object at 0x7da1b07fb4c0>, <ast.Attribute object at 0x7da1b07fafe0>, <ast.Attribute object at 0x7da1b07f8610>]]]]
call[name[self].flush, parameter[]]
if name[self].animate begin[:]
call[name[time].sleep, parameter[constant[0.2]]] | keyword[def] identifier[newline] ( identifier[self] ):
literal[string]
identifier[self] . identifier[carriage_return] ()
keyword[if] identifier[self] . identifier[_cy] +( literal[int] * identifier[self] . identifier[_ch] )>= identifier[self] . identifier[_device] . identifier[height] :
identifier[copy] = identifier[self] . identifier[_backing_image] . identifier[crop] (( literal[int] , identifier[self] . identifier[_ch] , identifier[self] . identifier[_device] . identifier[width] ,
identifier[self] . identifier[_device] . identifier[height] ))
identifier[self] . identifier[_backing_image] . identifier[paste] ( identifier[copy] ,( literal[int] , literal[int] ))
identifier[self] . identifier[_canvas] . identifier[rectangle] (( literal[int] , identifier[copy] . identifier[height] , identifier[self] . identifier[_device] . identifier[width] ,
identifier[self] . identifier[_device] . identifier[height] ), identifier[fill] = identifier[self] . identifier[default_bgcolor] )
keyword[else] :
identifier[self] . identifier[_cy] += identifier[self] . identifier[_ch]
identifier[self] . identifier[flush] ()
keyword[if] identifier[self] . identifier[animate] :
identifier[time] . identifier[sleep] ( literal[int] ) | def newline(self):
"""
Advances the cursor position ot the left hand side, and to the next
line. If the cursor is on the lowest line, the displayed contents are
scrolled, causing the top line to be lost.
"""
self.carriage_return()
if self._cy + 2 * self._ch >= self._device.height:
# Simulate a vertical scroll
copy = self._backing_image.crop((0, self._ch, self._device.width, self._device.height))
self._backing_image.paste(copy, (0, 0))
self._canvas.rectangle((0, copy.height, self._device.width, self._device.height), fill=self.default_bgcolor) # depends on [control=['if'], data=[]]
else:
self._cy += self._ch
self.flush()
if self.animate:
time.sleep(0.2) # depends on [control=['if'], data=[]] |
def _get_batting_order_starting_flg(cls, batter):
"""
get batting order and starting member flg
:param batter: Beautifulsoup object(batter element)
:return: batting order(1-9), starting member flg(True or False)
"""
bo = batter.get('bo', None)
if not bo or len(bo) != 3:
return False, False
batting_order = bo[:1]
starting = True if bo[1:3] == '00' else False
return batting_order, starting | def function[_get_batting_order_starting_flg, parameter[cls, batter]]:
constant[
get batting order and starting member flg
:param batter: Beautifulsoup object(batter element)
:return: batting order(1-9), starting member flg(True or False)
]
variable[bo] assign[=] call[name[batter].get, parameter[constant[bo], constant[None]]]
if <ast.BoolOp object at 0x7da1b25eed10> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b25ec550>, <ast.Constant object at 0x7da1b25ed600>]]]
variable[batting_order] assign[=] call[name[bo]][<ast.Slice object at 0x7da1b25ec040>]
variable[starting] assign[=] <ast.IfExp object at 0x7da1b25eeb00>
return[tuple[[<ast.Name object at 0x7da18eb556c0>, <ast.Name object at 0x7da18eb57640>]]] | keyword[def] identifier[_get_batting_order_starting_flg] ( identifier[cls] , identifier[batter] ):
literal[string]
identifier[bo] = identifier[batter] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[bo] keyword[or] identifier[len] ( identifier[bo] )!= literal[int] :
keyword[return] keyword[False] , keyword[False]
identifier[batting_order] = identifier[bo] [: literal[int] ]
identifier[starting] = keyword[True] keyword[if] identifier[bo] [ literal[int] : literal[int] ]== literal[string] keyword[else] keyword[False]
keyword[return] identifier[batting_order] , identifier[starting] | def _get_batting_order_starting_flg(cls, batter):
"""
get batting order and starting member flg
:param batter: Beautifulsoup object(batter element)
:return: batting order(1-9), starting member flg(True or False)
"""
bo = batter.get('bo', None)
if not bo or len(bo) != 3:
return (False, False) # depends on [control=['if'], data=[]]
batting_order = bo[:1]
starting = True if bo[1:3] == '00' else False
return (batting_order, starting) |
def add_defined_filter(self, *value):
"""
Add a DefinedFilter expression to the query. This filter will be
considered true if the :class:`smc.monitoring.values.Value` instance
has a value.
.. seealso:: :class:`smc_monitoring.models.filters.DefinedFilter` for examples.
:param Value value: single value for the filter. Value is of type
:class:`smc_monitoring.models.values.Value`.
:type: list(QueryFilter)
:rtype: DefinedFilter
"""
filt = DefinedFilter(*value)
self.update_filter(filt)
return filt | def function[add_defined_filter, parameter[self]]:
constant[
Add a DefinedFilter expression to the query. This filter will be
considered true if the :class:`smc.monitoring.values.Value` instance
has a value.
.. seealso:: :class:`smc_monitoring.models.filters.DefinedFilter` for examples.
:param Value value: single value for the filter. Value is of type
:class:`smc_monitoring.models.values.Value`.
:type: list(QueryFilter)
:rtype: DefinedFilter
]
variable[filt] assign[=] call[name[DefinedFilter], parameter[<ast.Starred object at 0x7da1b1a97d30>]]
call[name[self].update_filter, parameter[name[filt]]]
return[name[filt]] | keyword[def] identifier[add_defined_filter] ( identifier[self] ,* identifier[value] ):
literal[string]
identifier[filt] = identifier[DefinedFilter] (* identifier[value] )
identifier[self] . identifier[update_filter] ( identifier[filt] )
keyword[return] identifier[filt] | def add_defined_filter(self, *value):
"""
Add a DefinedFilter expression to the query. This filter will be
considered true if the :class:`smc.monitoring.values.Value` instance
has a value.
.. seealso:: :class:`smc_monitoring.models.filters.DefinedFilter` for examples.
:param Value value: single value for the filter. Value is of type
:class:`smc_monitoring.models.values.Value`.
:type: list(QueryFilter)
:rtype: DefinedFilter
"""
filt = DefinedFilter(*value)
self.update_filter(filt)
return filt |
def balance(self):
"""
Access the balance
:returns: twilio.rest.api.v2010.account.balance.BalanceList
:rtype: twilio.rest.api.v2010.account.balance.BalanceList
"""
if self._balance is None:
self._balance = BalanceList(self._version, account_sid=self._solution['sid'], )
return self._balance | def function[balance, parameter[self]]:
constant[
Access the balance
:returns: twilio.rest.api.v2010.account.balance.BalanceList
:rtype: twilio.rest.api.v2010.account.balance.BalanceList
]
if compare[name[self]._balance is constant[None]] begin[:]
name[self]._balance assign[=] call[name[BalanceList], parameter[name[self]._version]]
return[name[self]._balance] | keyword[def] identifier[balance] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_balance] keyword[is] keyword[None] :
identifier[self] . identifier[_balance] = identifier[BalanceList] ( identifier[self] . identifier[_version] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
keyword[return] identifier[self] . identifier[_balance] | def balance(self):
"""
Access the balance
:returns: twilio.rest.api.v2010.account.balance.BalanceList
:rtype: twilio.rest.api.v2010.account.balance.BalanceList
"""
if self._balance is None:
self._balance = BalanceList(self._version, account_sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._balance |
def convert_dates(self):
'''
This function converts the estimated "time_before_present" properties of all nodes
to numerical dates stored in the "numdate" attribute. This date is further converted
into a human readable date string in format %Y-%m-%d assuming the usual calendar.
Returns
-------
None
All manipulations are done in place on the tree
'''
from datetime import datetime, timedelta
now = numeric_date()
for node in self.tree.find_clades():
years_bp = self.date2dist.to_years(node.time_before_present)
if years_bp < 0 and self.real_dates:
if not hasattr(node, "bad_branch") or node.bad_branch is False:
self.logger("ClockTree.convert_dates -- WARNING: The node is later than today, but it is not "
"marked as \"BAD\", which indicates the error in the "
"likelihood optimization.",4 , warn=True)
else:
self.logger("ClockTree.convert_dates -- WARNING: node which is marked as \"BAD\" optimized "
"later than present day",4 , warn=True)
node.numdate = now - years_bp
# set the human-readable date
year = np.floor(node.numdate)
days = max(0,365.25 * (node.numdate - year)-1)
try: # datetime will only operate on dates after 1900
n_date = datetime(year, 1, 1) + timedelta(days=days)
node.date = datetime.strftime(n_date, "%Y-%m-%d")
except:
# this is the approximation not accounting for gap years etc
n_date = datetime(1900, 1, 1) + timedelta(days=days)
node.date = "%04d-%02d-%02d"%(year, n_date.month, n_date.day) | def function[convert_dates, parameter[self]]:
constant[
This function converts the estimated "time_before_present" properties of all nodes
to numerical dates stored in the "numdate" attribute. This date is further converted
into a human readable date string in format %Y-%m-%d assuming the usual calendar.
Returns
-------
None
All manipulations are done in place on the tree
]
from relative_module[datetime] import module[datetime], module[timedelta]
variable[now] assign[=] call[name[numeric_date], parameter[]]
for taget[name[node]] in starred[call[name[self].tree.find_clades, parameter[]]] begin[:]
variable[years_bp] assign[=] call[name[self].date2dist.to_years, parameter[name[node].time_before_present]]
if <ast.BoolOp object at 0x7da20cabe050> begin[:]
if <ast.BoolOp object at 0x7da20e957370> begin[:]
call[name[self].logger, parameter[constant[ClockTree.convert_dates -- WARNING: The node is later than today, but it is not marked as "BAD", which indicates the error in the likelihood optimization.], constant[4]]]
name[node].numdate assign[=] binary_operation[name[now] - name[years_bp]]
variable[year] assign[=] call[name[np].floor, parameter[name[node].numdate]]
variable[days] assign[=] call[name[max], parameter[constant[0], binary_operation[binary_operation[constant[365.25] * binary_operation[name[node].numdate - name[year]]] - constant[1]]]]
<ast.Try object at 0x7da20e9546d0> | keyword[def] identifier[convert_dates] ( identifier[self] ):
literal[string]
keyword[from] identifier[datetime] keyword[import] identifier[datetime] , identifier[timedelta]
identifier[now] = identifier[numeric_date] ()
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[tree] . identifier[find_clades] ():
identifier[years_bp] = identifier[self] . identifier[date2dist] . identifier[to_years] ( identifier[node] . identifier[time_before_present] )
keyword[if] identifier[years_bp] < literal[int] keyword[and] identifier[self] . identifier[real_dates] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[node] , literal[string] ) keyword[or] identifier[node] . identifier[bad_branch] keyword[is] keyword[False] :
identifier[self] . identifier[logger] ( literal[string]
literal[string]
literal[string] , literal[int] , identifier[warn] = keyword[True] )
keyword[else] :
identifier[self] . identifier[logger] ( literal[string]
literal[string] , literal[int] , identifier[warn] = keyword[True] )
identifier[node] . identifier[numdate] = identifier[now] - identifier[years_bp]
identifier[year] = identifier[np] . identifier[floor] ( identifier[node] . identifier[numdate] )
identifier[days] = identifier[max] ( literal[int] , literal[int] *( identifier[node] . identifier[numdate] - identifier[year] )- literal[int] )
keyword[try] :
identifier[n_date] = identifier[datetime] ( identifier[year] , literal[int] , literal[int] )+ identifier[timedelta] ( identifier[days] = identifier[days] )
identifier[node] . identifier[date] = identifier[datetime] . identifier[strftime] ( identifier[n_date] , literal[string] )
keyword[except] :
identifier[n_date] = identifier[datetime] ( literal[int] , literal[int] , literal[int] )+ identifier[timedelta] ( identifier[days] = identifier[days] )
identifier[node] . identifier[date] = literal[string] %( identifier[year] , identifier[n_date] . identifier[month] , identifier[n_date] . identifier[day] ) | def convert_dates(self):
"""
This function converts the estimated "time_before_present" properties of all nodes
to numerical dates stored in the "numdate" attribute. This date is further converted
into a human readable date string in format %Y-%m-%d assuming the usual calendar.
Returns
-------
None
All manipulations are done in place on the tree
"""
from datetime import datetime, timedelta
now = numeric_date()
for node in self.tree.find_clades():
years_bp = self.date2dist.to_years(node.time_before_present)
if years_bp < 0 and self.real_dates:
if not hasattr(node, 'bad_branch') or node.bad_branch is False:
self.logger('ClockTree.convert_dates -- WARNING: The node is later than today, but it is not marked as "BAD", which indicates the error in the likelihood optimization.', 4, warn=True) # depends on [control=['if'], data=[]]
else:
self.logger('ClockTree.convert_dates -- WARNING: node which is marked as "BAD" optimized later than present day', 4, warn=True) # depends on [control=['if'], data=[]]
node.numdate = now - years_bp
# set the human-readable date
year = np.floor(node.numdate)
days = max(0, 365.25 * (node.numdate - year) - 1)
try: # datetime will only operate on dates after 1900
n_date = datetime(year, 1, 1) + timedelta(days=days)
node.date = datetime.strftime(n_date, '%Y-%m-%d') # depends on [control=['try'], data=[]]
except:
# this is the approximation not accounting for gap years etc
n_date = datetime(1900, 1, 1) + timedelta(days=days)
node.date = '%04d-%02d-%02d' % (year, n_date.month, n_date.day) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['node']] |
def login(self, **params):
"""
**login**
Use the current credentials to get a valid Gett access token.
Input:
* A dict of parameters to use for the login attempt (optional)
Output:
* ``True``
Example::
if client.user.login():
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
"""
if not params:
params = {
"apikey": self.apikey,
"email": self.email,
"password": self.password
}
response = GettRequest().post("/users/login", params)
if response.http_status == 200:
self._access_token = response.response['accesstoken']
self.refresh_token = response.response['refreshtoken']
self.access_token_expires = int(time()) + response.response['expires']
self.userid = response.response['user']['userid']
self.fullname = response.response['user']['fullname']
self.storage_used = response.response['user']['storage']['used']
self.storage_limit = response.response['user']['storage']['limit']
return True | def function[login, parameter[self]]:
constant[
**login**
Use the current credentials to get a valid Gett access token.
Input:
* A dict of parameters to use for the login attempt (optional)
Output:
* ``True``
Example::
if client.user.login():
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
]
if <ast.UnaryOp object at 0x7da1b1340310> begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b13400d0>, <ast.Constant object at 0x7da1b1341660>, <ast.Constant object at 0x7da1b1341ae0>], [<ast.Attribute object at 0x7da1b13403d0>, <ast.Attribute object at 0x7da1b1341a80>, <ast.Attribute object at 0x7da1b1343970>]]
variable[response] assign[=] call[call[name[GettRequest], parameter[]].post, parameter[constant[/users/login], name[params]]]
if compare[name[response].http_status equal[==] constant[200]] begin[:]
name[self]._access_token assign[=] call[name[response].response][constant[accesstoken]]
name[self].refresh_token assign[=] call[name[response].response][constant[refreshtoken]]
name[self].access_token_expires assign[=] binary_operation[call[name[int], parameter[call[name[time], parameter[]]]] + call[name[response].response][constant[expires]]]
name[self].userid assign[=] call[call[name[response].response][constant[user]]][constant[userid]]
name[self].fullname assign[=] call[call[name[response].response][constant[user]]][constant[fullname]]
name[self].storage_used assign[=] call[call[call[name[response].response][constant[user]]][constant[storage]]][constant[used]]
name[self].storage_limit assign[=] call[call[call[name[response].response][constant[user]]][constant[storage]]][constant[limit]]
return[constant[True]] | keyword[def] identifier[login] ( identifier[self] ,** identifier[params] ):
literal[string]
keyword[if] keyword[not] identifier[params] :
identifier[params] ={
literal[string] : identifier[self] . identifier[apikey] ,
literal[string] : identifier[self] . identifier[email] ,
literal[string] : identifier[self] . identifier[password]
}
identifier[response] = identifier[GettRequest] (). identifier[post] ( literal[string] , identifier[params] )
keyword[if] identifier[response] . identifier[http_status] == literal[int] :
identifier[self] . identifier[_access_token] = identifier[response] . identifier[response] [ literal[string] ]
identifier[self] . identifier[refresh_token] = identifier[response] . identifier[response] [ literal[string] ]
identifier[self] . identifier[access_token_expires] = identifier[int] ( identifier[time] ())+ identifier[response] . identifier[response] [ literal[string] ]
identifier[self] . identifier[userid] = identifier[response] . identifier[response] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[fullname] = identifier[response] . identifier[response] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[storage_used] = identifier[response] . identifier[response] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[self] . identifier[storage_limit] = identifier[response] . identifier[response] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[return] keyword[True] | def login(self, **params):
"""
**login**
Use the current credentials to get a valid Gett access token.
Input:
* A dict of parameters to use for the login attempt (optional)
Output:
* ``True``
Example::
if client.user.login():
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
"""
if not params:
params = {'apikey': self.apikey, 'email': self.email, 'password': self.password} # depends on [control=['if'], data=[]]
response = GettRequest().post('/users/login', params)
if response.http_status == 200:
self._access_token = response.response['accesstoken']
self.refresh_token = response.response['refreshtoken']
self.access_token_expires = int(time()) + response.response['expires']
self.userid = response.response['user']['userid']
self.fullname = response.response['user']['fullname']
self.storage_used = response.response['user']['storage']['used']
self.storage_limit = response.response['user']['storage']['limit']
return True # depends on [control=['if'], data=[]] |
def process(self, metric):
"""
Process a metric and send it to CloudWatch
"""
if not boto:
return
collector = str(metric.getCollectorPath())
metricname = str(metric.getMetricPath())
# Send the data as ......
for rule in self.rules:
self.log.debug(
"Comparing Collector: [%s] with (%s) "
"and Metric: [%s] with (%s)",
str(rule['collector']),
collector,
str(rule['metric']),
metricname
)
if ((str(rule['collector']) == collector and
str(rule['metric']) == metricname)):
if rule['collect_by_instance'] and self.instance_id:
self.send_metrics_to_cloudwatch(
rule,
metric,
{'InstanceId': self.instance_id})
if rule['collect_without_dimension']:
self.send_metrics_to_cloudwatch(
rule,
metric,
{}) | def function[process, parameter[self, metric]]:
constant[
Process a metric and send it to CloudWatch
]
if <ast.UnaryOp object at 0x7da1b19f2980> begin[:]
return[None]
variable[collector] assign[=] call[name[str], parameter[call[name[metric].getCollectorPath, parameter[]]]]
variable[metricname] assign[=] call[name[str], parameter[call[name[metric].getMetricPath, parameter[]]]]
for taget[name[rule]] in starred[name[self].rules] begin[:]
call[name[self].log.debug, parameter[constant[Comparing Collector: [%s] with (%s) and Metric: [%s] with (%s)], call[name[str], parameter[call[name[rule]][constant[collector]]]], name[collector], call[name[str], parameter[call[name[rule]][constant[metric]]]], name[metricname]]]
if <ast.BoolOp object at 0x7da204961930> begin[:]
if <ast.BoolOp object at 0x7da204960490> begin[:]
call[name[self].send_metrics_to_cloudwatch, parameter[name[rule], name[metric], dictionary[[<ast.Constant object at 0x7da204961cc0>], [<ast.Attribute object at 0x7da204961bd0>]]]]
if call[name[rule]][constant[collect_without_dimension]] begin[:]
call[name[self].send_metrics_to_cloudwatch, parameter[name[rule], name[metric], dictionary[[], []]]] | keyword[def] identifier[process] ( identifier[self] , identifier[metric] ):
literal[string]
keyword[if] keyword[not] identifier[boto] :
keyword[return]
identifier[collector] = identifier[str] ( identifier[metric] . identifier[getCollectorPath] ())
identifier[metricname] = identifier[str] ( identifier[metric] . identifier[getMetricPath] ())
keyword[for] identifier[rule] keyword[in] identifier[self] . identifier[rules] :
identifier[self] . identifier[log] . identifier[debug] (
literal[string]
literal[string] ,
identifier[str] ( identifier[rule] [ literal[string] ]),
identifier[collector] ,
identifier[str] ( identifier[rule] [ literal[string] ]),
identifier[metricname]
)
keyword[if] (( identifier[str] ( identifier[rule] [ literal[string] ])== identifier[collector] keyword[and]
identifier[str] ( identifier[rule] [ literal[string] ])== identifier[metricname] )):
keyword[if] identifier[rule] [ literal[string] ] keyword[and] identifier[self] . identifier[instance_id] :
identifier[self] . identifier[send_metrics_to_cloudwatch] (
identifier[rule] ,
identifier[metric] ,
{ literal[string] : identifier[self] . identifier[instance_id] })
keyword[if] identifier[rule] [ literal[string] ]:
identifier[self] . identifier[send_metrics_to_cloudwatch] (
identifier[rule] ,
identifier[metric] ,
{}) | def process(self, metric):
"""
Process a metric and send it to CloudWatch
"""
if not boto:
return # depends on [control=['if'], data=[]]
collector = str(metric.getCollectorPath())
metricname = str(metric.getMetricPath())
# Send the data as ......
for rule in self.rules:
self.log.debug('Comparing Collector: [%s] with (%s) and Metric: [%s] with (%s)', str(rule['collector']), collector, str(rule['metric']), metricname)
if str(rule['collector']) == collector and str(rule['metric']) == metricname:
if rule['collect_by_instance'] and self.instance_id:
self.send_metrics_to_cloudwatch(rule, metric, {'InstanceId': self.instance_id}) # depends on [control=['if'], data=[]]
if rule['collect_without_dimension']:
self.send_metrics_to_cloudwatch(rule, metric, {}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']] |
def close(self):
"""
Closes the job manager. No more jobs will be assigned, no more job sets
will be added, and any queued or active job sets will be cancelled.
"""
if self._closed:
return
self._closed = True
if self._active_js is not None:
self._active_js.cancel()
for js in self._js_queue:
js.cancel() | def function[close, parameter[self]]:
constant[
Closes the job manager. No more jobs will be assigned, no more job sets
will be added, and any queued or active job sets will be cancelled.
]
if name[self]._closed begin[:]
return[None]
name[self]._closed assign[=] constant[True]
if compare[name[self]._active_js is_not constant[None]] begin[:]
call[name[self]._active_js.cancel, parameter[]]
for taget[name[js]] in starred[name[self]._js_queue] begin[:]
call[name[js].cancel, parameter[]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_closed] :
keyword[return]
identifier[self] . identifier[_closed] = keyword[True]
keyword[if] identifier[self] . identifier[_active_js] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_active_js] . identifier[cancel] ()
keyword[for] identifier[js] keyword[in] identifier[self] . identifier[_js_queue] :
identifier[js] . identifier[cancel] () | def close(self):
"""
Closes the job manager. No more jobs will be assigned, no more job sets
will be added, and any queued or active job sets will be cancelled.
"""
if self._closed:
return # depends on [control=['if'], data=[]]
self._closed = True
if self._active_js is not None:
self._active_js.cancel() # depends on [control=['if'], data=[]]
for js in self._js_queue:
js.cancel() # depends on [control=['for'], data=['js']] |
def clean_update_fields(self, index, update_fields):
"""
Clean the list of update_fields based on the index being updated.\
If any field in the update_fields list is not in the set of properties
defined by the index mapping for this model, then we ignore it. If
a field _is_ in the mapping, but the underlying model field is a
related object, and thereby not directly serializable, then this
method will raise a ValueError.
"""
search_fields = get_model_index_properties(self, index)
clean_fields = [f for f in update_fields if f in search_fields]
ignore = [f for f in update_fields if f not in search_fields]
if ignore:
logger.debug(
"Ignoring fields from partial update: %s",
[f for f in update_fields if f not in search_fields],
)
for f in clean_fields:
if not self._is_field_serializable(f):
raise ValueError(
"'%s' cannot be automatically serialized into a search document property. Please override as_search_document_update.",
f,
)
return clean_fields | def function[clean_update_fields, parameter[self, index, update_fields]]:
constant[
Clean the list of update_fields based on the index being updated.
If any field in the update_fields list is not in the set of properties
defined by the index mapping for this model, then we ignore it. If
a field _is_ in the mapping, but the underlying model field is a
related object, and thereby not directly serializable, then this
method will raise a ValueError.
]
variable[search_fields] assign[=] call[name[get_model_index_properties], parameter[name[self], name[index]]]
variable[clean_fields] assign[=] <ast.ListComp object at 0x7da1b0faf6a0>
variable[ignore] assign[=] <ast.ListComp object at 0x7da1b0fae2c0>
if name[ignore] begin[:]
call[name[logger].debug, parameter[constant[Ignoring fields from partial update: %s], <ast.ListComp object at 0x7da1b0fac100>]]
for taget[name[f]] in starred[name[clean_fields]] begin[:]
if <ast.UnaryOp object at 0x7da1b0f586d0> begin[:]
<ast.Raise object at 0x7da1b0f5b280>
return[name[clean_fields]] | keyword[def] identifier[clean_update_fields] ( identifier[self] , identifier[index] , identifier[update_fields] ):
literal[string]
identifier[search_fields] = identifier[get_model_index_properties] ( identifier[self] , identifier[index] )
identifier[clean_fields] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[update_fields] keyword[if] identifier[f] keyword[in] identifier[search_fields] ]
identifier[ignore] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[update_fields] keyword[if] identifier[f] keyword[not] keyword[in] identifier[search_fields] ]
keyword[if] identifier[ignore] :
identifier[logger] . identifier[debug] (
literal[string] ,
[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[update_fields] keyword[if] identifier[f] keyword[not] keyword[in] identifier[search_fields] ],
)
keyword[for] identifier[f] keyword[in] identifier[clean_fields] :
keyword[if] keyword[not] identifier[self] . identifier[_is_field_serializable] ( identifier[f] ):
keyword[raise] identifier[ValueError] (
literal[string] ,
identifier[f] ,
)
keyword[return] identifier[clean_fields] | def clean_update_fields(self, index, update_fields):
"""
Clean the list of update_fields based on the index being updated.
If any field in the update_fields list is not in the set of properties
defined by the index mapping for this model, then we ignore it. If
a field _is_ in the mapping, but the underlying model field is a
related object, and thereby not directly serializable, then this
method will raise a ValueError.
"""
search_fields = get_model_index_properties(self, index)
clean_fields = [f for f in update_fields if f in search_fields]
ignore = [f for f in update_fields if f not in search_fields]
if ignore:
logger.debug('Ignoring fields from partial update: %s', [f for f in update_fields if f not in search_fields]) # depends on [control=['if'], data=[]]
for f in clean_fields:
if not self._is_field_serializable(f):
raise ValueError("'%s' cannot be automatically serialized into a search document property. Please override as_search_document_update.", f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
return clean_fields |
def _validate_isvalid_composition(self, isvalid_composition, field, value):
"""Checks for valid specification of composition.
Args:
isvalid_composition (bool): flag from schema indicating
composition to be checked.
field (str): 'composition'
value (dict): dictionary of composition
The rule's arguments are validated against this schema:
{'isvalid_composition': {'type': 'bool'}, 'field': {'type': 'str'},
'value': {'type': 'dict'}}
"""
sum_amount = 0.0
if value['kind'] in ['mass fraction', 'mole fraction']:
low_lim = 0.0
up_lim = 1.0
total_amount = 1.0
elif value['kind'] in ['mole percent']:
low_lim = 0.0
up_lim = 100.0
total_amount = 100.0
else:
self._error(field, 'composition kind must be "mole percent", "mass fraction", or '
'"mole fraction"')
return False
for sp in value['species']:
amount = sp['amount'][0]
sum_amount += amount
# Check that amount within bounds, based on kind specified
if amount < low_lim:
self._error(field, 'Species ' + sp['species-name'] + ' ' +
value['kind'] + ' must be greater than {:.1f}'.format(low_lim)
)
elif amount > up_lim:
self._error(field, 'Species ' + sp['species-name'] + ' ' +
value['kind'] + ' must be less than {:.1f}'.format(up_lim)
)
# Make sure mole/mass fraction sum to 1
if not np.isclose(total_amount, sum_amount):
self._error(field, 'Species ' + value['kind'] +
's do not sum to {:.1f}: '.format(total_amount) +
'{:f}'.format(sum_amount)
) | def function[_validate_isvalid_composition, parameter[self, isvalid_composition, field, value]]:
constant[Checks for valid specification of composition.
Args:
isvalid_composition (bool): flag from schema indicating
composition to be checked.
field (str): 'composition'
value (dict): dictionary of composition
The rule's arguments are validated against this schema:
{'isvalid_composition': {'type': 'bool'}, 'field': {'type': 'str'},
'value': {'type': 'dict'}}
]
variable[sum_amount] assign[=] constant[0.0]
if compare[call[name[value]][constant[kind]] in list[[<ast.Constant object at 0x7da1b244ffd0>, <ast.Constant object at 0x7da1b244eb90>]]] begin[:]
variable[low_lim] assign[=] constant[0.0]
variable[up_lim] assign[=] constant[1.0]
variable[total_amount] assign[=] constant[1.0]
for taget[name[sp]] in starred[call[name[value]][constant[species]]] begin[:]
variable[amount] assign[=] call[call[name[sp]][constant[amount]]][constant[0]]
<ast.AugAssign object at 0x7da1b244fbb0>
if compare[name[amount] less[<] name[low_lim]] begin[:]
call[name[self]._error, parameter[name[field], binary_operation[binary_operation[binary_operation[binary_operation[constant[Species ] + call[name[sp]][constant[species-name]]] + constant[ ]] + call[name[value]][constant[kind]]] + call[constant[ must be greater than {:.1f}].format, parameter[name[low_lim]]]]]]
if <ast.UnaryOp object at 0x7da1b245aef0> begin[:]
call[name[self]._error, parameter[name[field], binary_operation[binary_operation[binary_operation[constant[Species ] + call[name[value]][constant[kind]]] + call[constant[s do not sum to {:.1f}: ].format, parameter[name[total_amount]]]] + call[constant[{:f}].format, parameter[name[sum_amount]]]]]] | keyword[def] identifier[_validate_isvalid_composition] ( identifier[self] , identifier[isvalid_composition] , identifier[field] , identifier[value] ):
literal[string]
identifier[sum_amount] = literal[int]
keyword[if] identifier[value] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]:
identifier[low_lim] = literal[int]
identifier[up_lim] = literal[int]
identifier[total_amount] = literal[int]
keyword[elif] identifier[value] [ literal[string] ] keyword[in] [ literal[string] ]:
identifier[low_lim] = literal[int]
identifier[up_lim] = literal[int]
identifier[total_amount] = literal[int]
keyword[else] :
identifier[self] . identifier[_error] ( identifier[field] , literal[string]
literal[string] )
keyword[return] keyword[False]
keyword[for] identifier[sp] keyword[in] identifier[value] [ literal[string] ]:
identifier[amount] = identifier[sp] [ literal[string] ][ literal[int] ]
identifier[sum_amount] += identifier[amount]
keyword[if] identifier[amount] < identifier[low_lim] :
identifier[self] . identifier[_error] ( identifier[field] , literal[string] + identifier[sp] [ literal[string] ]+ literal[string] +
identifier[value] [ literal[string] ]+ literal[string] . identifier[format] ( identifier[low_lim] )
)
keyword[elif] identifier[amount] > identifier[up_lim] :
identifier[self] . identifier[_error] ( identifier[field] , literal[string] + identifier[sp] [ literal[string] ]+ literal[string] +
identifier[value] [ literal[string] ]+ literal[string] . identifier[format] ( identifier[up_lim] )
)
keyword[if] keyword[not] identifier[np] . identifier[isclose] ( identifier[total_amount] , identifier[sum_amount] ):
identifier[self] . identifier[_error] ( identifier[field] , literal[string] + identifier[value] [ literal[string] ]+
literal[string] . identifier[format] ( identifier[total_amount] )+
literal[string] . identifier[format] ( identifier[sum_amount] )
) | def _validate_isvalid_composition(self, isvalid_composition, field, value):
"""Checks for valid specification of composition.
Args:
isvalid_composition (bool): flag from schema indicating
composition to be checked.
field (str): 'composition'
value (dict): dictionary of composition
The rule's arguments are validated against this schema:
{'isvalid_composition': {'type': 'bool'}, 'field': {'type': 'str'},
'value': {'type': 'dict'}}
"""
sum_amount = 0.0
if value['kind'] in ['mass fraction', 'mole fraction']:
low_lim = 0.0
up_lim = 1.0
total_amount = 1.0 # depends on [control=['if'], data=[]]
elif value['kind'] in ['mole percent']:
low_lim = 0.0
up_lim = 100.0
total_amount = 100.0 # depends on [control=['if'], data=[]]
else:
self._error(field, 'composition kind must be "mole percent", "mass fraction", or "mole fraction"')
return False
for sp in value['species']:
amount = sp['amount'][0]
sum_amount += amount
# Check that amount within bounds, based on kind specified
if amount < low_lim:
self._error(field, 'Species ' + sp['species-name'] + ' ' + value['kind'] + ' must be greater than {:.1f}'.format(low_lim)) # depends on [control=['if'], data=['low_lim']]
elif amount > up_lim:
self._error(field, 'Species ' + sp['species-name'] + ' ' + value['kind'] + ' must be less than {:.1f}'.format(up_lim)) # depends on [control=['if'], data=['up_lim']] # depends on [control=['for'], data=['sp']]
# Make sure mole/mass fraction sum to 1
if not np.isclose(total_amount, sum_amount):
self._error(field, 'Species ' + value['kind'] + 's do not sum to {:.1f}: '.format(total_amount) + '{:f}'.format(sum_amount)) # depends on [control=['if'], data=[]] |
async def fetch_member(self, member_id):
"""|coro|
Retreives a :class:`Member` from a guild ID, and a member ID.
.. note::
This method is an API call. For general usage, consider :meth:`get_member` instead.
Parameters
-----------
member_id: :class:`int`
The member's ID to fetch from.
Raises
-------
Forbidden
You do not have access to the guild.
HTTPException
Getting the guild failed.
Returns
--------
:class:`Member`
The member from the member ID.
"""
data = await self._state.http.get_member(self.id, member_id)
return Member(data=data, state=self._state, guild=self) | <ast.AsyncFunctionDef object at 0x7da1b1f88220> | keyword[async] keyword[def] identifier[fetch_member] ( identifier[self] , identifier[member_id] ):
literal[string]
identifier[data] = keyword[await] identifier[self] . identifier[_state] . identifier[http] . identifier[get_member] ( identifier[self] . identifier[id] , identifier[member_id] )
keyword[return] identifier[Member] ( identifier[data] = identifier[data] , identifier[state] = identifier[self] . identifier[_state] , identifier[guild] = identifier[self] ) | async def fetch_member(self, member_id):
"""|coro|
Retreives a :class:`Member` from a guild ID, and a member ID.
.. note::
This method is an API call. For general usage, consider :meth:`get_member` instead.
Parameters
-----------
member_id: :class:`int`
The member's ID to fetch from.
Raises
-------
Forbidden
You do not have access to the guild.
HTTPException
Getting the guild failed.
Returns
--------
:class:`Member`
The member from the member ID.
"""
data = await self._state.http.get_member(self.id, member_id)
return Member(data=data, state=self._state, guild=self) |
def put(self, request, **resources):
""" Default PUT method. Uses self form. Allow bulk update.
:return object: changed instance or raise form's error
"""
if not self._meta.form:
return None
if not self._meta.name in resources or not resources[self._meta.name]:
raise HttpError(
"Resource not found.", status=status.HTTP_404_NOT_FOUND)
resource = resources.pop(self._meta.name)
updated = UpdatedList()
for o in as_tuple(resource):
form = self._meta.form(data=request.data, instance=o, **resources)
if not form.is_valid():
raise FormError(form)
updated.append(form.save())
return updated if len(updated) > 1 else updated[-1] | def function[put, parameter[self, request]]:
constant[ Default PUT method. Uses self form. Allow bulk update.
:return object: changed instance or raise form's error
]
if <ast.UnaryOp object at 0x7da18c4cf9d0> begin[:]
return[constant[None]]
if <ast.BoolOp object at 0x7da18c4cc6d0> begin[:]
<ast.Raise object at 0x7da18c4cd4b0>
variable[resource] assign[=] call[name[resources].pop, parameter[name[self]._meta.name]]
variable[updated] assign[=] call[name[UpdatedList], parameter[]]
for taget[name[o]] in starred[call[name[as_tuple], parameter[name[resource]]]] begin[:]
variable[form] assign[=] call[name[self]._meta.form, parameter[]]
if <ast.UnaryOp object at 0x7da2044c0850> begin[:]
<ast.Raise object at 0x7da2044c07f0>
call[name[updated].append, parameter[call[name[form].save, parameter[]]]]
return[<ast.IfExp object at 0x7da2044c3c40>] | keyword[def] identifier[put] ( identifier[self] , identifier[request] ,** identifier[resources] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_meta] . identifier[form] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[self] . identifier[_meta] . identifier[name] keyword[in] identifier[resources] keyword[or] keyword[not] identifier[resources] [ identifier[self] . identifier[_meta] . identifier[name] ]:
keyword[raise] identifier[HttpError] (
literal[string] , identifier[status] = identifier[status] . identifier[HTTP_404_NOT_FOUND] )
identifier[resource] = identifier[resources] . identifier[pop] ( identifier[self] . identifier[_meta] . identifier[name] )
identifier[updated] = identifier[UpdatedList] ()
keyword[for] identifier[o] keyword[in] identifier[as_tuple] ( identifier[resource] ):
identifier[form] = identifier[self] . identifier[_meta] . identifier[form] ( identifier[data] = identifier[request] . identifier[data] , identifier[instance] = identifier[o] ,** identifier[resources] )
keyword[if] keyword[not] identifier[form] . identifier[is_valid] ():
keyword[raise] identifier[FormError] ( identifier[form] )
identifier[updated] . identifier[append] ( identifier[form] . identifier[save] ())
keyword[return] identifier[updated] keyword[if] identifier[len] ( identifier[updated] )> literal[int] keyword[else] identifier[updated] [- literal[int] ] | def put(self, request, **resources):
""" Default PUT method. Uses self form. Allow bulk update.
:return object: changed instance or raise form's error
"""
if not self._meta.form:
return None # depends on [control=['if'], data=[]]
if not self._meta.name in resources or not resources[self._meta.name]:
raise HttpError('Resource not found.', status=status.HTTP_404_NOT_FOUND) # depends on [control=['if'], data=[]]
resource = resources.pop(self._meta.name)
updated = UpdatedList()
for o in as_tuple(resource):
form = self._meta.form(data=request.data, instance=o, **resources)
if not form.is_valid():
raise FormError(form) # depends on [control=['if'], data=[]]
updated.append(form.save()) # depends on [control=['for'], data=['o']]
return updated if len(updated) > 1 else updated[-1] |
def _get_storage_vol(conn, pool, vol):
'''
Helper function getting a storage volume. Will throw a libvirtError
if the pool or the volume couldn't be found.
'''
pool_obj = conn.storagePoolLookupByName(pool)
return pool_obj.storageVolLookupByName(vol) | def function[_get_storage_vol, parameter[conn, pool, vol]]:
constant[
Helper function getting a storage volume. Will throw a libvirtError
if the pool or the volume couldn't be found.
]
variable[pool_obj] assign[=] call[name[conn].storagePoolLookupByName, parameter[name[pool]]]
return[call[name[pool_obj].storageVolLookupByName, parameter[name[vol]]]] | keyword[def] identifier[_get_storage_vol] ( identifier[conn] , identifier[pool] , identifier[vol] ):
literal[string]
identifier[pool_obj] = identifier[conn] . identifier[storagePoolLookupByName] ( identifier[pool] )
keyword[return] identifier[pool_obj] . identifier[storageVolLookupByName] ( identifier[vol] ) | def _get_storage_vol(conn, pool, vol):
"""
Helper function getting a storage volume. Will throw a libvirtError
if the pool or the volume couldn't be found.
"""
pool_obj = conn.storagePoolLookupByName(pool)
return pool_obj.storageVolLookupByName(vol) |
def add_route(self, gateway, network):
"""
Add a route to engine. Specify gateway and network.
If this is the default gateway, use a network address of
0.0.0.0/0.
.. note: This will fail if the gateway provided does not have a
corresponding interface on the network.
:param str gateway: gateway of an existing interface
:param str network: network address in cidr format
:raises EngineCommandFailed: invalid route, possibly no network
:return: None
"""
self.make_request(
EngineCommandFailed,
method='create',
resource='add_route',
params={'gateway': gateway,
'network': network}) | def function[add_route, parameter[self, gateway, network]]:
constant[
Add a route to engine. Specify gateway and network.
If this is the default gateway, use a network address of
0.0.0.0/0.
.. note: This will fail if the gateway provided does not have a
corresponding interface on the network.
:param str gateway: gateway of an existing interface
:param str network: network address in cidr format
:raises EngineCommandFailed: invalid route, possibly no network
:return: None
]
call[name[self].make_request, parameter[name[EngineCommandFailed]]] | keyword[def] identifier[add_route] ( identifier[self] , identifier[gateway] , identifier[network] ):
literal[string]
identifier[self] . identifier[make_request] (
identifier[EngineCommandFailed] ,
identifier[method] = literal[string] ,
identifier[resource] = literal[string] ,
identifier[params] ={ literal[string] : identifier[gateway] ,
literal[string] : identifier[network] }) | def add_route(self, gateway, network):
"""
Add a route to engine. Specify gateway and network.
If this is the default gateway, use a network address of
0.0.0.0/0.
.. note: This will fail if the gateway provided does not have a
corresponding interface on the network.
:param str gateway: gateway of an existing interface
:param str network: network address in cidr format
:raises EngineCommandFailed: invalid route, possibly no network
:return: None
"""
self.make_request(EngineCommandFailed, method='create', resource='add_route', params={'gateway': gateway, 'network': network}) |
def agent_updated(self, context, payload):
"""Deal with agent updated RPC message."""
try:
if payload['admin_state_up']:
#TODO(hareeshp): implement agent updated handling
pass
except KeyError as e:
LOG.error("Invalid payload format for received RPC message "
"`agent_updated`. Error is %(error)s. Payload is "
"%(payload)s", {'error': e, 'payload': payload}) | def function[agent_updated, parameter[self, context, payload]]:
constant[Deal with agent updated RPC message.]
<ast.Try object at 0x7da18dc988e0> | keyword[def] identifier[agent_updated] ( identifier[self] , identifier[context] , identifier[payload] ):
literal[string]
keyword[try] :
keyword[if] identifier[payload] [ literal[string] ]:
keyword[pass]
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
identifier[LOG] . identifier[error] ( literal[string]
literal[string]
literal[string] ,{ literal[string] : identifier[e] , literal[string] : identifier[payload] }) | def agent_updated(self, context, payload):
"""Deal with agent updated RPC message."""
try:
if payload['admin_state_up']:
#TODO(hareeshp): implement agent updated handling
pass # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError as e:
LOG.error('Invalid payload format for received RPC message `agent_updated`. Error is %(error)s. Payload is %(payload)s', {'error': e, 'payload': payload}) # depends on [control=['except'], data=['e']] |
def socket_parse(self, astr_destination):
'''
Examines <astr_destination> and if of form <str1>:<str2> assumes
that <str1> is a host to send datagram comms to over port <str2>.
Returns True or False.
'''
t_socketInfo = astr_destination.partition(':')
if len(t_socketInfo[1]):
self._b_isSocket = True
self._socketRemote = t_socketInfo[0]
self._socketPort = t_socketInfo[2]
else:
self._b_isSocket = False
return self._b_isSocket | def function[socket_parse, parameter[self, astr_destination]]:
constant[
Examines <astr_destination> and if of form <str1>:<str2> assumes
that <str1> is a host to send datagram comms to over port <str2>.
Returns True or False.
]
variable[t_socketInfo] assign[=] call[name[astr_destination].partition, parameter[constant[:]]]
if call[name[len], parameter[call[name[t_socketInfo]][constant[1]]]] begin[:]
name[self]._b_isSocket assign[=] constant[True]
name[self]._socketRemote assign[=] call[name[t_socketInfo]][constant[0]]
name[self]._socketPort assign[=] call[name[t_socketInfo]][constant[2]]
return[name[self]._b_isSocket] | keyword[def] identifier[socket_parse] ( identifier[self] , identifier[astr_destination] ):
literal[string]
identifier[t_socketInfo] = identifier[astr_destination] . identifier[partition] ( literal[string] )
keyword[if] identifier[len] ( identifier[t_socketInfo] [ literal[int] ]):
identifier[self] . identifier[_b_isSocket] = keyword[True]
identifier[self] . identifier[_socketRemote] = identifier[t_socketInfo] [ literal[int] ]
identifier[self] . identifier[_socketPort] = identifier[t_socketInfo] [ literal[int] ]
keyword[else] :
identifier[self] . identifier[_b_isSocket] = keyword[False]
keyword[return] identifier[self] . identifier[_b_isSocket] | def socket_parse(self, astr_destination):
"""
Examines <astr_destination> and if of form <str1>:<str2> assumes
that <str1> is a host to send datagram comms to over port <str2>.
Returns True or False.
"""
t_socketInfo = astr_destination.partition(':')
if len(t_socketInfo[1]):
self._b_isSocket = True
self._socketRemote = t_socketInfo[0]
self._socketPort = t_socketInfo[2] # depends on [control=['if'], data=[]]
else:
self._b_isSocket = False
return self._b_isSocket |
def conditional_probability_alive(self, frequency, recency, T):
"""
Conditional probability alive.
Compute the probability that a customer with history (frequency,
recency, T) is currently alive.
From https://www.researchgate.net/publication/247219660_Empirical_validation_and_comparison_of_models_for_customer_base_analysis
Appendix A, eq. (5)
Parameters
----------
frequency: array or float
historical frequency of customer.
recency: array or float
historical recency of customer.
T: array or float
age of the customer.
Returns
-------
array:
value representing probability of being alive
"""
r, alpha, a, b = self._unload_params("r", "alpha", "a", "b")
return np.atleast_1d(1.0 / (1 + (a / (b + frequency)) * ((alpha + T) / (alpha + recency)) ** (r + frequency))) | def function[conditional_probability_alive, parameter[self, frequency, recency, T]]:
constant[
Conditional probability alive.
Compute the probability that a customer with history (frequency,
recency, T) is currently alive.
From https://www.researchgate.net/publication/247219660_Empirical_validation_and_comparison_of_models_for_customer_base_analysis
Appendix A, eq. (5)
Parameters
----------
frequency: array or float
historical frequency of customer.
recency: array or float
historical recency of customer.
T: array or float
age of the customer.
Returns
-------
array:
value representing probability of being alive
]
<ast.Tuple object at 0x7da1b1d285e0> assign[=] call[name[self]._unload_params, parameter[constant[r], constant[alpha], constant[a], constant[b]]]
return[call[name[np].atleast_1d, parameter[binary_operation[constant[1.0] / binary_operation[constant[1] + binary_operation[binary_operation[name[a] / binary_operation[name[b] + name[frequency]]] * binary_operation[binary_operation[binary_operation[name[alpha] + name[T]] / binary_operation[name[alpha] + name[recency]]] ** binary_operation[name[r] + name[frequency]]]]]]]]] | keyword[def] identifier[conditional_probability_alive] ( identifier[self] , identifier[frequency] , identifier[recency] , identifier[T] ):
literal[string]
identifier[r] , identifier[alpha] , identifier[a] , identifier[b] = identifier[self] . identifier[_unload_params] ( literal[string] , literal[string] , literal[string] , literal[string] )
keyword[return] identifier[np] . identifier[atleast_1d] ( literal[int] /( literal[int] +( identifier[a] /( identifier[b] + identifier[frequency] ))*(( identifier[alpha] + identifier[T] )/( identifier[alpha] + identifier[recency] ))**( identifier[r] + identifier[frequency] ))) | def conditional_probability_alive(self, frequency, recency, T):
"""
Conditional probability alive.
Compute the probability that a customer with history (frequency,
recency, T) is currently alive.
From https://www.researchgate.net/publication/247219660_Empirical_validation_and_comparison_of_models_for_customer_base_analysis
Appendix A, eq. (5)
Parameters
----------
frequency: array or float
historical frequency of customer.
recency: array or float
historical recency of customer.
T: array or float
age of the customer.
Returns
-------
array:
value representing probability of being alive
"""
(r, alpha, a, b) = self._unload_params('r', 'alpha', 'a', 'b')
return np.atleast_1d(1.0 / (1 + a / (b + frequency) * ((alpha + T) / (alpha + recency)) ** (r + frequency))) |
def iter_elements(element_function, parent_to_parse, **kwargs):
"""
Applies element_function to each of the sub-elements in parent_to_parse.
The passed in function must take at least one element, and an optional
list of kwargs which are relevant to each of the elements in the list:
def elem_func(each_elem, **kwargs)
"""
parent = get_element(parent_to_parse)
if not hasattr(element_function, '__call__'):
return parent
for child in ([] if parent is None else parent):
element_function(child, **kwargs)
return parent | def function[iter_elements, parameter[element_function, parent_to_parse]]:
constant[
Applies element_function to each of the sub-elements in parent_to_parse.
The passed in function must take at least one element, and an optional
list of kwargs which are relevant to each of the elements in the list:
def elem_func(each_elem, **kwargs)
]
variable[parent] assign[=] call[name[get_element], parameter[name[parent_to_parse]]]
if <ast.UnaryOp object at 0x7da1b2783130> begin[:]
return[name[parent]]
for taget[name[child]] in starred[<ast.IfExp object at 0x7da1b2780eb0>] begin[:]
call[name[element_function], parameter[name[child]]]
return[name[parent]] | keyword[def] identifier[iter_elements] ( identifier[element_function] , identifier[parent_to_parse] ,** identifier[kwargs] ):
literal[string]
identifier[parent] = identifier[get_element] ( identifier[parent_to_parse] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[element_function] , literal[string] ):
keyword[return] identifier[parent]
keyword[for] identifier[child] keyword[in] ([] keyword[if] identifier[parent] keyword[is] keyword[None] keyword[else] identifier[parent] ):
identifier[element_function] ( identifier[child] ,** identifier[kwargs] )
keyword[return] identifier[parent] | def iter_elements(element_function, parent_to_parse, **kwargs):
"""
Applies element_function to each of the sub-elements in parent_to_parse.
The passed in function must take at least one element, and an optional
list of kwargs which are relevant to each of the elements in the list:
def elem_func(each_elem, **kwargs)
"""
parent = get_element(parent_to_parse)
if not hasattr(element_function, '__call__'):
return parent # depends on [control=['if'], data=[]]
for child in [] if parent is None else parent:
element_function(child, **kwargs) # depends on [control=['for'], data=['child']]
return parent |
def encode(self, word):
"""Return the Phonem code for a word.
Parameters
----------
word : str
The word to transform
Returns
-------
str
The Phonem value
Examples
--------
>>> pe = Phonem()
>>> pe.encode('Christopher')
'CRYSDOVR'
>>> pe.encode('Niall')
'NYAL'
>>> pe.encode('Smith')
'SMYD'
>>> pe.encode('Schmidt')
'CMYD'
"""
word = unicode_normalize('NFC', text_type(word.upper()))
for i, j in self._substitutions:
word = word.replace(i, j)
word = word.translate(self._trans)
return ''.join(
c
for c in self._delete_consecutive_repeats(word)
if c in self._uc_set
) | def function[encode, parameter[self, word]]:
constant[Return the Phonem code for a word.
Parameters
----------
word : str
The word to transform
Returns
-------
str
The Phonem value
Examples
--------
>>> pe = Phonem()
>>> pe.encode('Christopher')
'CRYSDOVR'
>>> pe.encode('Niall')
'NYAL'
>>> pe.encode('Smith')
'SMYD'
>>> pe.encode('Schmidt')
'CMYD'
]
variable[word] assign[=] call[name[unicode_normalize], parameter[constant[NFC], call[name[text_type], parameter[call[name[word].upper, parameter[]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b006aad0>, <ast.Name object at 0x7da1b006ab00>]]] in starred[name[self]._substitutions] begin[:]
variable[word] assign[=] call[name[word].replace, parameter[name[i], name[j]]]
variable[word] assign[=] call[name[word].translate, parameter[name[self]._trans]]
return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b006a5c0>]]] | keyword[def] identifier[encode] ( identifier[self] , identifier[word] ):
literal[string]
identifier[word] = identifier[unicode_normalize] ( literal[string] , identifier[text_type] ( identifier[word] . identifier[upper] ()))
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[self] . identifier[_substitutions] :
identifier[word] = identifier[word] . identifier[replace] ( identifier[i] , identifier[j] )
identifier[word] = identifier[word] . identifier[translate] ( identifier[self] . identifier[_trans] )
keyword[return] literal[string] . identifier[join] (
identifier[c]
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[_delete_consecutive_repeats] ( identifier[word] )
keyword[if] identifier[c] keyword[in] identifier[self] . identifier[_uc_set]
) | def encode(self, word):
"""Return the Phonem code for a word.
Parameters
----------
word : str
The word to transform
Returns
-------
str
The Phonem value
Examples
--------
>>> pe = Phonem()
>>> pe.encode('Christopher')
'CRYSDOVR'
>>> pe.encode('Niall')
'NYAL'
>>> pe.encode('Smith')
'SMYD'
>>> pe.encode('Schmidt')
'CMYD'
"""
word = unicode_normalize('NFC', text_type(word.upper()))
for (i, j) in self._substitutions:
word = word.replace(i, j) # depends on [control=['for'], data=[]]
word = word.translate(self._trans)
return ''.join((c for c in self._delete_consecutive_repeats(word) if c in self._uc_set)) |
def handle_deletions(self):
"""
Manages handling deletions of objects that were previously managed by the initial data process but no longer
managed. It does so by mantaining a list of receipts for model objects that are registered for deletion on
each round of initial data processing. Any receipts that are from previous rounds and not the current
round will be deleted.
"""
deduplicated_objs = {}
for model in self.model_objs_registered_for_deletion:
key = '{0}:{1}'.format(
ContentType.objects.get_for_model(model, for_concrete_model=False),
model.id
)
deduplicated_objs[key] = model
# Create receipts for every object registered for deletion
now = timezone.now()
registered_for_deletion_receipts = [
RegisteredForDeletionReceipt(
model_obj_type=ContentType.objects.get_for_model(model_obj, for_concrete_model=False),
model_obj_id=model_obj.id,
register_time=now)
for model_obj in deduplicated_objs.values()
]
# Do a bulk upsert on all of the receipts, updating their registration time.
RegisteredForDeletionReceipt.objects.bulk_upsert(
registered_for_deletion_receipts, ['model_obj_type_id', 'model_obj_id'], update_fields=['register_time'])
# Delete all receipts and their associated model objects that weren't updated
for receipt in RegisteredForDeletionReceipt.objects.exclude(register_time=now):
try:
receipt.model_obj.delete()
except: # noqa
# The model object may no longer be there, its ctype may be invalid, or it might be protected.
# Regardless, the model object cannot be deleted, so go ahead and delete its receipt.
pass
receipt.delete() | def function[handle_deletions, parameter[self]]:
constant[
Manages handling deletions of objects that were previously managed by the initial data process but no longer
managed. It does so by mantaining a list of receipts for model objects that are registered for deletion on
each round of initial data processing. Any receipts that are from previous rounds and not the current
round will be deleted.
]
variable[deduplicated_objs] assign[=] dictionary[[], []]
for taget[name[model]] in starred[name[self].model_objs_registered_for_deletion] begin[:]
variable[key] assign[=] call[constant[{0}:{1}].format, parameter[call[name[ContentType].objects.get_for_model, parameter[name[model]]], name[model].id]]
call[name[deduplicated_objs]][name[key]] assign[=] name[model]
variable[now] assign[=] call[name[timezone].now, parameter[]]
variable[registered_for_deletion_receipts] assign[=] <ast.ListComp object at 0x7da18dc04070>
call[name[RegisteredForDeletionReceipt].objects.bulk_upsert, parameter[name[registered_for_deletion_receipts], list[[<ast.Constant object at 0x7da18eb55cf0>, <ast.Constant object at 0x7da20c991cf0>]]]]
for taget[name[receipt]] in starred[call[name[RegisteredForDeletionReceipt].objects.exclude, parameter[]]] begin[:]
<ast.Try object at 0x7da20c990ee0>
call[name[receipt].delete, parameter[]] | keyword[def] identifier[handle_deletions] ( identifier[self] ):
literal[string]
identifier[deduplicated_objs] ={}
keyword[for] identifier[model] keyword[in] identifier[self] . identifier[model_objs_registered_for_deletion] :
identifier[key] = literal[string] . identifier[format] (
identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[model] , identifier[for_concrete_model] = keyword[False] ),
identifier[model] . identifier[id]
)
identifier[deduplicated_objs] [ identifier[key] ]= identifier[model]
identifier[now] = identifier[timezone] . identifier[now] ()
identifier[registered_for_deletion_receipts] =[
identifier[RegisteredForDeletionReceipt] (
identifier[model_obj_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[model_obj] , identifier[for_concrete_model] = keyword[False] ),
identifier[model_obj_id] = identifier[model_obj] . identifier[id] ,
identifier[register_time] = identifier[now] )
keyword[for] identifier[model_obj] keyword[in] identifier[deduplicated_objs] . identifier[values] ()
]
identifier[RegisteredForDeletionReceipt] . identifier[objects] . identifier[bulk_upsert] (
identifier[registered_for_deletion_receipts] ,[ literal[string] , literal[string] ], identifier[update_fields] =[ literal[string] ])
keyword[for] identifier[receipt] keyword[in] identifier[RegisteredForDeletionReceipt] . identifier[objects] . identifier[exclude] ( identifier[register_time] = identifier[now] ):
keyword[try] :
identifier[receipt] . identifier[model_obj] . identifier[delete] ()
keyword[except] :
keyword[pass]
identifier[receipt] . identifier[delete] () | def handle_deletions(self):
"""
Manages handling deletions of objects that were previously managed by the initial data process but no longer
managed. It does so by mantaining a list of receipts for model objects that are registered for deletion on
each round of initial data processing. Any receipts that are from previous rounds and not the current
round will be deleted.
"""
deduplicated_objs = {}
for model in self.model_objs_registered_for_deletion:
key = '{0}:{1}'.format(ContentType.objects.get_for_model(model, for_concrete_model=False), model.id)
deduplicated_objs[key] = model # depends on [control=['for'], data=['model']]
# Create receipts for every object registered for deletion
now = timezone.now()
registered_for_deletion_receipts = [RegisteredForDeletionReceipt(model_obj_type=ContentType.objects.get_for_model(model_obj, for_concrete_model=False), model_obj_id=model_obj.id, register_time=now) for model_obj in deduplicated_objs.values()]
# Do a bulk upsert on all of the receipts, updating their registration time.
RegisteredForDeletionReceipt.objects.bulk_upsert(registered_for_deletion_receipts, ['model_obj_type_id', 'model_obj_id'], update_fields=['register_time'])
# Delete all receipts and their associated model objects that weren't updated
for receipt in RegisteredForDeletionReceipt.objects.exclude(register_time=now):
try:
receipt.model_obj.delete() # depends on [control=['try'], data=[]]
except: # noqa
# The model object may no longer be there, its ctype may be invalid, or it might be protected.
# Regardless, the model object cannot be deleted, so go ahead and delete its receipt.
pass # depends on [control=['except'], data=[]]
receipt.delete() # depends on [control=['for'], data=['receipt']] |
def is_vagrant_plugin_installed(plugin, use_sudo=False):
""" checks if vagrant plugin is installed """
cmd = 'vagrant plugin list'
if use_sudo:
results = sudo(cmd)
else:
results = run(cmd)
installed_plugins = []
for line in results:
plugin = re.search('^(\S.*) \((.*)\)$', line)
installed_plugins.append({'name': plugin.group(0),
'version': plugin.group(1)})
return installed_plugins | def function[is_vagrant_plugin_installed, parameter[plugin, use_sudo]]:
constant[ checks if vagrant plugin is installed ]
variable[cmd] assign[=] constant[vagrant plugin list]
if name[use_sudo] begin[:]
variable[results] assign[=] call[name[sudo], parameter[name[cmd]]]
variable[installed_plugins] assign[=] list[[]]
for taget[name[line]] in starred[name[results]] begin[:]
variable[plugin] assign[=] call[name[re].search, parameter[constant[^(\S.*) \((.*)\)$], name[line]]]
call[name[installed_plugins].append, parameter[dictionary[[<ast.Constant object at 0x7da1b11e2fb0>, <ast.Constant object at 0x7da1b11e1600>], [<ast.Call object at 0x7da1b11e1c60>, <ast.Call object at 0x7da1b11e37f0>]]]]
return[name[installed_plugins]] | keyword[def] identifier[is_vagrant_plugin_installed] ( identifier[plugin] , identifier[use_sudo] = keyword[False] ):
literal[string]
identifier[cmd] = literal[string]
keyword[if] identifier[use_sudo] :
identifier[results] = identifier[sudo] ( identifier[cmd] )
keyword[else] :
identifier[results] = identifier[run] ( identifier[cmd] )
identifier[installed_plugins] =[]
keyword[for] identifier[line] keyword[in] identifier[results] :
identifier[plugin] = identifier[re] . identifier[search] ( literal[string] , identifier[line] )
identifier[installed_plugins] . identifier[append] ({ literal[string] : identifier[plugin] . identifier[group] ( literal[int] ),
literal[string] : identifier[plugin] . identifier[group] ( literal[int] )})
keyword[return] identifier[installed_plugins] | def is_vagrant_plugin_installed(plugin, use_sudo=False):
""" checks if vagrant plugin is installed """
cmd = 'vagrant plugin list'
if use_sudo:
results = sudo(cmd) # depends on [control=['if'], data=[]]
else:
results = run(cmd)
installed_plugins = []
for line in results:
plugin = re.search('^(\\S.*) \\((.*)\\)$', line)
installed_plugins.append({'name': plugin.group(0), 'version': plugin.group(1)})
return installed_plugins # depends on [control=['for'], data=['line']] |
def synchronize(self, graph_data=None):
"""
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
"""
profile = graph_data or self.graph.get('me')
self.facebook_username = profile.get('username')
self.first_name = profile.get('first_name')
self.middle_name = profile.get('middle_name')
self.last_name = profile.get('last_name')
self.birthday = datetime.strptime(profile['birthday'], '%m/%d/%Y') if profile.has_key('birthday') else None
self.email = profile.get('email')
self.locale = profile.get('locale')
self.gender = profile.get('gender')
self.extra_data = profile
self.save() | def function[synchronize, parameter[self, graph_data]]:
constant[
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
]
variable[profile] assign[=] <ast.BoolOp object at 0x7da1b0494490>
name[self].facebook_username assign[=] call[name[profile].get, parameter[constant[username]]]
name[self].first_name assign[=] call[name[profile].get, parameter[constant[first_name]]]
name[self].middle_name assign[=] call[name[profile].get, parameter[constant[middle_name]]]
name[self].last_name assign[=] call[name[profile].get, parameter[constant[last_name]]]
name[self].birthday assign[=] <ast.IfExp object at 0x7da1b040df30>
name[self].email assign[=] call[name[profile].get, parameter[constant[email]]]
name[self].locale assign[=] call[name[profile].get, parameter[constant[locale]]]
name[self].gender assign[=] call[name[profile].get, parameter[constant[gender]]]
name[self].extra_data assign[=] name[profile]
call[name[self].save, parameter[]] | keyword[def] identifier[synchronize] ( identifier[self] , identifier[graph_data] = keyword[None] ):
literal[string]
identifier[profile] = identifier[graph_data] keyword[or] identifier[self] . identifier[graph] . identifier[get] ( literal[string] )
identifier[self] . identifier[facebook_username] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[first_name] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[middle_name] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[last_name] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[birthday] = identifier[datetime] . identifier[strptime] ( identifier[profile] [ literal[string] ], literal[string] ) keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[else] keyword[None]
identifier[self] . identifier[email] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[locale] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[gender] = identifier[profile] . identifier[get] ( literal[string] )
identifier[self] . identifier[extra_data] = identifier[profile]
identifier[self] . identifier[save] () | def synchronize(self, graph_data=None):
"""
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
"""
profile = graph_data or self.graph.get('me')
self.facebook_username = profile.get('username')
self.first_name = profile.get('first_name')
self.middle_name = profile.get('middle_name')
self.last_name = profile.get('last_name')
self.birthday = datetime.strptime(profile['birthday'], '%m/%d/%Y') if profile.has_key('birthday') else None
self.email = profile.get('email')
self.locale = profile.get('locale')
self.gender = profile.get('gender')
self.extra_data = profile
self.save() |
def from_id(reddit_session, subreddit_id):
"""Return an edit-only submission object based on the id."""
pseudo_data = {'id': subreddit_id,
'permalink': '/comments/{0}'.format(subreddit_id)}
return Submission(reddit_session, pseudo_data) | def function[from_id, parameter[reddit_session, subreddit_id]]:
constant[Return an edit-only submission object based on the id.]
variable[pseudo_data] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b0160>, <ast.Constant object at 0x7da20e9b1f30>], [<ast.Name object at 0x7da20e9b1180>, <ast.Call object at 0x7da20e9b1360>]]
return[call[name[Submission], parameter[name[reddit_session], name[pseudo_data]]]] | keyword[def] identifier[from_id] ( identifier[reddit_session] , identifier[subreddit_id] ):
literal[string]
identifier[pseudo_data] ={ literal[string] : identifier[subreddit_id] ,
literal[string] : literal[string] . identifier[format] ( identifier[subreddit_id] )}
keyword[return] identifier[Submission] ( identifier[reddit_session] , identifier[pseudo_data] ) | def from_id(reddit_session, subreddit_id):
"""Return an edit-only submission object based on the id."""
pseudo_data = {'id': subreddit_id, 'permalink': '/comments/{0}'.format(subreddit_id)}
return Submission(reddit_session, pseudo_data) |
def visible(self, visible):
"""When visible changed, do setup or unwatch and call visible_callback"""
self._visible = visible
if visible and len(self.panel.objects) == 0:
self.setup()
self.select.visible = True
self.control_panel.extend(self.controls)
self.panel.extend(self.children)
elif not visible and len(self.panel.objects) > 0:
self.unwatch()
# do children
self.select.visible = False
self.control_panel.clear()
self.search.visible = False
self.add.visible = False
self.panel.clear()
if self.visible_callback:
self.visible_callback(visible) | def function[visible, parameter[self, visible]]:
constant[When visible changed, do setup or unwatch and call visible_callback]
name[self]._visible assign[=] name[visible]
if <ast.BoolOp object at 0x7da1b17f7dc0> begin[:]
call[name[self].setup, parameter[]]
name[self].select.visible assign[=] constant[True]
call[name[self].control_panel.extend, parameter[name[self].controls]]
call[name[self].panel.extend, parameter[name[self].children]]
if name[self].visible_callback begin[:]
call[name[self].visible_callback, parameter[name[visible]]] | keyword[def] identifier[visible] ( identifier[self] , identifier[visible] ):
literal[string]
identifier[self] . identifier[_visible] = identifier[visible]
keyword[if] identifier[visible] keyword[and] identifier[len] ( identifier[self] . identifier[panel] . identifier[objects] )== literal[int] :
identifier[self] . identifier[setup] ()
identifier[self] . identifier[select] . identifier[visible] = keyword[True]
identifier[self] . identifier[control_panel] . identifier[extend] ( identifier[self] . identifier[controls] )
identifier[self] . identifier[panel] . identifier[extend] ( identifier[self] . identifier[children] )
keyword[elif] keyword[not] identifier[visible] keyword[and] identifier[len] ( identifier[self] . identifier[panel] . identifier[objects] )> literal[int] :
identifier[self] . identifier[unwatch] ()
identifier[self] . identifier[select] . identifier[visible] = keyword[False]
identifier[self] . identifier[control_panel] . identifier[clear] ()
identifier[self] . identifier[search] . identifier[visible] = keyword[False]
identifier[self] . identifier[add] . identifier[visible] = keyword[False]
identifier[self] . identifier[panel] . identifier[clear] ()
keyword[if] identifier[self] . identifier[visible_callback] :
identifier[self] . identifier[visible_callback] ( identifier[visible] ) | def visible(self, visible):
"""When visible changed, do setup or unwatch and call visible_callback"""
self._visible = visible
if visible and len(self.panel.objects) == 0:
self.setup()
self.select.visible = True
self.control_panel.extend(self.controls)
self.panel.extend(self.children) # depends on [control=['if'], data=[]]
elif not visible and len(self.panel.objects) > 0:
self.unwatch()
# do children
self.select.visible = False
self.control_panel.clear()
self.search.visible = False
self.add.visible = False
self.panel.clear() # depends on [control=['if'], data=[]]
if self.visible_callback:
self.visible_callback(visible) # depends on [control=['if'], data=[]] |
def getPropErrorNameFromEnum(self, error):
"""
returns a string that corresponds with the specified property error. The string will be the name
of the error enum value for all valid error codes
"""
fn = self.function_table.getPropErrorNameFromEnum
result = fn(error)
return result | def function[getPropErrorNameFromEnum, parameter[self, error]]:
constant[
returns a string that corresponds with the specified property error. The string will be the name
of the error enum value for all valid error codes
]
variable[fn] assign[=] name[self].function_table.getPropErrorNameFromEnum
variable[result] assign[=] call[name[fn], parameter[name[error]]]
return[name[result]] | keyword[def] identifier[getPropErrorNameFromEnum] ( identifier[self] , identifier[error] ):
literal[string]
identifier[fn] = identifier[self] . identifier[function_table] . identifier[getPropErrorNameFromEnum]
identifier[result] = identifier[fn] ( identifier[error] )
keyword[return] identifier[result] | def getPropErrorNameFromEnum(self, error):
"""
returns a string that corresponds with the specified property error. The string will be the name
of the error enum value for all valid error codes
"""
fn = self.function_table.getPropErrorNameFromEnum
result = fn(error)
return result |
def compatible_api_version(server_version):
'''
Check if this server API version is compatible to us.
'''
try:
semver = server_version.split('.')
if semver[0] != '1':
logger.error(
'Server API version (%s) is too new for us. Please update the executor installation.' % server_version)
return False
else:
return True
except Exception:
logger.error(
'Cannot understand the server API version (%s). Please update the executor installation.' % server_version)
return False | def function[compatible_api_version, parameter[server_version]]:
constant[
Check if this server API version is compatible to us.
]
<ast.Try object at 0x7da2044c34f0> | keyword[def] identifier[compatible_api_version] ( identifier[server_version] ):
literal[string]
keyword[try] :
identifier[semver] = identifier[server_version] . identifier[split] ( literal[string] )
keyword[if] identifier[semver] [ literal[int] ]!= literal[string] :
identifier[logger] . identifier[error] (
literal[string] % identifier[server_version] )
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True]
keyword[except] identifier[Exception] :
identifier[logger] . identifier[error] (
literal[string] % identifier[server_version] )
keyword[return] keyword[False] | def compatible_api_version(server_version):
"""
Check if this server API version is compatible to us.
"""
try:
semver = server_version.split('.')
if semver[0] != '1':
logger.error('Server API version (%s) is too new for us. Please update the executor installation.' % server_version)
return False # depends on [control=['if'], data=[]]
else:
return True # depends on [control=['try'], data=[]]
except Exception:
logger.error('Cannot understand the server API version (%s). Please update the executor installation.' % server_version)
return False # depends on [control=['except'], data=[]] |
def check_destination(self, location, group):
'''
Check destination for the archives.
:return:
'''
# Pre-create destination, since rsync will
# put one file named as group
try:
destination = os.path.join(location, group)
if os.path.exists(destination) and not os.path.isdir(destination):
raise salt.exceptions.SaltException('Destination "{}" should be directory!'.format(destination))
if not os.path.exists(destination):
os.makedirs(destination)
log.debug('Created destination directory for archives: %s', destination)
else:
log.debug('Archives destination directory %s already exists', destination)
except OSError as err:
log.error(err) | def function[check_destination, parameter[self, location, group]]:
constant[
Check destination for the archives.
:return:
]
<ast.Try object at 0x7da20cabcd60> | keyword[def] identifier[check_destination] ( identifier[self] , identifier[location] , identifier[group] ):
literal[string]
keyword[try] :
identifier[destination] = identifier[os] . identifier[path] . identifier[join] ( identifier[location] , identifier[group] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[destination] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[destination] ):
keyword[raise] identifier[salt] . identifier[exceptions] . identifier[SaltException] ( literal[string] . identifier[format] ( identifier[destination] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[destination] ):
identifier[os] . identifier[makedirs] ( identifier[destination] )
identifier[log] . identifier[debug] ( literal[string] , identifier[destination] )
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] , identifier[destination] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( identifier[err] ) | def check_destination(self, location, group):
"""
Check destination for the archives.
:return:
"""
# Pre-create destination, since rsync will
# put one file named as group
try:
destination = os.path.join(location, group)
if os.path.exists(destination) and (not os.path.isdir(destination)):
raise salt.exceptions.SaltException('Destination "{}" should be directory!'.format(destination)) # depends on [control=['if'], data=[]]
if not os.path.exists(destination):
os.makedirs(destination)
log.debug('Created destination directory for archives: %s', destination) # depends on [control=['if'], data=[]]
else:
log.debug('Archives destination directory %s already exists', destination) # depends on [control=['try'], data=[]]
except OSError as err:
log.error(err) # depends on [control=['except'], data=['err']] |
def render_indirect(self, buffer, mode=None, count=-1, *, first=0) -> None:
'''
The render primitive (mode) must be the same as
the input primitive of the GeometryShader.
The draw commands are 5 integers: (count, instanceCount, firstIndex, baseVertex, baseInstance).
Args:
buffer (Buffer): Indirect drawing commands.
mode (int): By default :py:data:`TRIANGLES` will be used.
count (int): The number of draws.
Keyword Args:
first (int): The index of the first indirect draw command.
'''
if mode is None:
mode = TRIANGLES
self.mglo.render_indirect(buffer.mglo, mode, count, first) | def function[render_indirect, parameter[self, buffer, mode, count]]:
constant[
The render primitive (mode) must be the same as
the input primitive of the GeometryShader.
The draw commands are 5 integers: (count, instanceCount, firstIndex, baseVertex, baseInstance).
Args:
buffer (Buffer): Indirect drawing commands.
mode (int): By default :py:data:`TRIANGLES` will be used.
count (int): The number of draws.
Keyword Args:
first (int): The index of the first indirect draw command.
]
if compare[name[mode] is constant[None]] begin[:]
variable[mode] assign[=] name[TRIANGLES]
call[name[self].mglo.render_indirect, parameter[name[buffer].mglo, name[mode], name[count], name[first]]] | keyword[def] identifier[render_indirect] ( identifier[self] , identifier[buffer] , identifier[mode] = keyword[None] , identifier[count] =- literal[int] ,*, identifier[first] = literal[int] )-> keyword[None] :
literal[string]
keyword[if] identifier[mode] keyword[is] keyword[None] :
identifier[mode] = identifier[TRIANGLES]
identifier[self] . identifier[mglo] . identifier[render_indirect] ( identifier[buffer] . identifier[mglo] , identifier[mode] , identifier[count] , identifier[first] ) | def render_indirect(self, buffer, mode=None, count=-1, *, first=0) -> None:
"""
The render primitive (mode) must be the same as
the input primitive of the GeometryShader.
The draw commands are 5 integers: (count, instanceCount, firstIndex, baseVertex, baseInstance).
Args:
buffer (Buffer): Indirect drawing commands.
mode (int): By default :py:data:`TRIANGLES` will be used.
count (int): The number of draws.
Keyword Args:
first (int): The index of the first indirect draw command.
"""
if mode is None:
mode = TRIANGLES # depends on [control=['if'], data=['mode']]
self.mglo.render_indirect(buffer.mglo, mode, count, first) |
def FZStaeckel(u,v,pot,delta): #pragma: no cover because unused
"""
NAME:
FZStaeckel
PURPOSE:
return the vertical force
INPUT:
u - confocal u
v - confocal v
pot - potential
delta - focus
OUTPUT:
FZ(u,v)
HISTORY:
2012-11-30 - Written - Bovy (IAS)
"""
R,z= bovy_coords.uv_to_Rz(u,v,delta=delta)
return _evaluatezforces(pot,R,z) | def function[FZStaeckel, parameter[u, v, pot, delta]]:
constant[
NAME:
FZStaeckel
PURPOSE:
return the vertical force
INPUT:
u - confocal u
v - confocal v
pot - potential
delta - focus
OUTPUT:
FZ(u,v)
HISTORY:
2012-11-30 - Written - Bovy (IAS)
]
<ast.Tuple object at 0x7da18fe906a0> assign[=] call[name[bovy_coords].uv_to_Rz, parameter[name[u], name[v]]]
return[call[name[_evaluatezforces], parameter[name[pot], name[R], name[z]]]] | keyword[def] identifier[FZStaeckel] ( identifier[u] , identifier[v] , identifier[pot] , identifier[delta] ):
literal[string]
identifier[R] , identifier[z] = identifier[bovy_coords] . identifier[uv_to_Rz] ( identifier[u] , identifier[v] , identifier[delta] = identifier[delta] )
keyword[return] identifier[_evaluatezforces] ( identifier[pot] , identifier[R] , identifier[z] ) | def FZStaeckel(u, v, pot, delta): #pragma: no cover because unused
'\n NAME:\n FZStaeckel\n PURPOSE:\n return the vertical force\n INPUT:\n u - confocal u\n v - confocal v\n pot - potential\n delta - focus\n OUTPUT:\n FZ(u,v)\n HISTORY:\n 2012-11-30 - Written - Bovy (IAS)\n '
(R, z) = bovy_coords.uv_to_Rz(u, v, delta=delta)
return _evaluatezforces(pot, R, z) |
def set_properties_from_solution(self, T, P, V, b, delta, epsilon, a_alpha,
da_alpha_dT, d2a_alpha_dT2, quick=True):
r'''Sets all interesting properties which can be calculated from an
EOS alone. Determines which phase the fluid is on its own; for details,
see `phase_identification_parameter`.
The list of properties set is as follows, with all properties suffixed
with '_l' or '_g'.
dP_dT, dP_dV, dV_dT, dV_dP, dT_dV, dT_dP, d2P_dT2, d2P_dV2, d2V_dT2,
d2V_dP2, d2T_dV2, d2T_dP2, d2V_dPdT, d2P_dTdV, d2T_dPdV, H_dep, S_dep,
beta, kappa, Cp_minus_Cv, V_dep, U_dep, G_dep, A_dep, fugacity, phi,
and PIP.
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [Pa]
V : float
Molar volume, [m^3/mol]
b : float
Coefficient calculated by EOS-specific method, [m^3/mol]
delta : float
Coefficient calculated by EOS-specific method, [m^3/mol]
epsilon : float
Coefficient calculated by EOS-specific method, [m^6/mol^2]
a_alpha : float
Coefficient calculated by EOS-specific method, [J^2/mol^2/Pa]
da_alpha_dT : float
Temperature derivative of coefficient calculated by EOS-specific
method, [J^2/mol^2/Pa/K]
d2a_alpha_dT2 : float
Second temperature derivative of coefficient calculated by
EOS-specific method, [J^2/mol^2/Pa/K**2]
quick : bool, optional
Whether to use a SymPy cse-derived expression (3x faster) or
individual formulas
Returns
-------
phase : str
Either 'l' or 'g'
Notes
-----
The individual formulas for the derivatives and excess properties are
as follows. For definitions of `beta`, see `isobaric_expansion`;
for `kappa`, see isothermal_compressibility; for `Cp_minus_Cv`, see
`Cp_minus_Cv`; for `phase_identification_parameter`, see
`phase_identification_parameter`.
First derivatives; in part using the Triple Product Rule [2]_, [3]_:
.. math::
\left(\frac{\partial P}{\partial T}\right)_V = \frac{R}{V - b}
- \frac{a \frac{d \alpha{\left (T \right )}}{d T}}{V^{2} + V \delta
+ \epsilon}
\left(\frac{\partial P}{\partial V}\right)_T = - \frac{R T}{\left(
V - b\right)^{2}} - \frac{a \left(- 2 V - \delta\right) \alpha{
\left (T \right )}}{\left(V^{2} + V \delta + \epsilon\right)^{2}}
\left(\frac{\partial V}{\partial T}\right)_P =-\frac{
\left(\frac{\partial P}{\partial T}\right)_V}{
\left(\frac{\partial P}{\partial V}\right)_T}
\left(\frac{\partial V}{\partial P}\right)_T =-\frac{
\left(\frac{\partial V}{\partial T}\right)_P}{
\left(\frac{\partial P}{\partial T}\right)_V}
\left(\frac{\partial T}{\partial V}\right)_P = \frac{1}
{\left(\frac{\partial V}{\partial T}\right)_P}
\left(\frac{\partial T}{\partial P}\right)_V = \frac{1}
{\left(\frac{\partial P}{\partial T}\right)_V}
Second derivatives with respect to one variable; those of `T` and `V`
use identities shown in [1]_ and verified numerically:
.. math::
\left(\frac{\partial^2 P}{\partial T^2}\right)_V = - \frac{a
\frac{d^{2} \alpha{\left (T \right )}}{d T^{2}}}{V^{2} + V \delta
+ \epsilon}
\left(\frac{\partial^2 P}{\partial V^2}\right)_T = 2 \left(\frac{
R T}{\left(V - b\right)^{3}} - \frac{a \left(2 V + \delta\right)^{
2} \alpha{\left (T \right )}}{\left(V^{2} + V \delta + \epsilon
\right)^{3}} + \frac{a \alpha{\left (T \right )}}{\left(V^{2} + V
\delta + \epsilon\right)^{2}}\right)
\left(\frac{\partial^2 T}{\partial P^2}\right)_V = -\left(\frac{
\partial^2 P}{\partial T^2}\right)_V \left(\frac{\partial P}{
\partial T}\right)^{-3}_V
\left(\frac{\partial^2 V}{\partial P^2}\right)_T = -\left(\frac{
\partial^2 P}{\partial V^2}\right)_T \left(\frac{\partial P}{
\partial V}\right)^{-3}_T
\left(\frac{\partial^2 T}{\partial V^2}\right)_P = -\left[
\left(\frac{\partial^2 P}{\partial V^2}\right)_T
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T \partial V}\right) \right]
\left(\frac{\partial P}{\partial T}\right)^{-2}_V
+ \left[\left(\frac{\partial^2 P}{\partial T\partial V}\right)
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T^2}\right)_V\right]
\left(\frac{\partial P}{\partial T}\right)_V^{-3}
\left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 V}{\partial T^2}\right)_P = -\left[
\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial T \partial V}\right) \right]
\left(\frac{\partial P}{\partial V}\right)^{-2}_T
+ \left[\left(\frac{\partial^2 P}{\partial T\partial V}\right)
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial V^2}\right)_T\right]
\left(\frac{\partial P}{\partial V}\right)_T^{-3}
\left(\frac{\partial P}{\partial T}\right)_V
Second derivatives with respect to the other two variables; those of
`T` and `V` use identities shown in [1]_ and verified numerically:
.. math::
\left(\frac{\partial^2 P}{\partial T \partial V}\right) = - \frac{
R}{\left(V - b\right)^{2}} + \frac{a \left(2 V + \delta\right)
\frac{d \alpha{\left (T \right )}}{d T}}{\left(V^{2} + V \delta
+ \epsilon\right)^{2}}
\left(\frac{\partial^2 T}{\partial P\partial V}\right) =
- \left[\left(\frac{\partial^2 P}{\partial T \partial V}\right)
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\right]\left(\frac{\partial P}{\partial T}\right)_V^{-3}
\left(\frac{\partial^2 V}{\partial T\partial P}\right) =
- \left[\left(\frac{\partial^2 P}{\partial T \partial V}\right)
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial V^2}\right)_T
\right]\left(\frac{\partial P}{\partial V}\right)_T^{-3}
Excess properties
.. math::
H_{dep} = \int_{\infty}^V \left[T\frac{\partial P}{\partial T}_V
- P\right]dV + PV - RT= P V - R T + \frac{2}{\sqrt{
\delta^{2} - 4 \epsilon}} \left(T a \frac{d \alpha{\left (T \right
)}}{d T} - a \alpha{\left (T \right )}\right) \operatorname{atanh}
{\left (\frac{2 V + \delta}{\sqrt{\delta^{2} - 4 \epsilon}}
\right)}
S_{dep} = \int_{\infty}^V\left[\frac{\partial P}{\partial T}
- \frac{R}{V}\right] dV + R\log\frac{PV}{RT} = - R \log{\left (V
\right )} + R \log{\left (\frac{P V}{R T} \right )} + R \log{\left
(V - b \right )} + \frac{2 a \frac{d\alpha{\left (T \right )}}{d T}
}{\sqrt{\delta^{2} - 4 \epsilon}} \operatorname{atanh}{\left (\frac
{2 V + \delta}{\sqrt{\delta^{2} - 4 \epsilon}} \right )}
V_{dep} = V - \frac{RT}{P}
U_{dep} = H_{dep} - P V_{dep}
G_{dep} = H_{dep} - T S_{dep}
A_{dep} = U_{dep} - T S_{dep}
\text{fugacity} = P\exp\left(\frac{G_{dep}}{RT}\right)
\phi = \frac{\text{fugacity}}{P}
C_{v, dep} = T\int_\infty^V \left(\frac{\partial^2 P}{\partial
T^2}\right) dV = - T a \left(\sqrt{\frac{1}{\delta^{2} - 4
\epsilon}} \log{\left (V - \frac{\delta^{2}}{2} \sqrt{\frac{1}{
\delta^{2} - 4 \epsilon}} + \frac{\delta}{2} + 2 \epsilon \sqrt{
\frac{1}{\delta^{2} - 4 \epsilon}} \right )} - \sqrt{\frac{1}{
\delta^{2} - 4 \epsilon}} \log{\left (V + \frac{\delta^{2}}{2}
\sqrt{\frac{1}{\delta^{2} - 4 \epsilon}} + \frac{\delta}{2}
- 2 \epsilon \sqrt{\frac{1}{\delta^{2} - 4 \epsilon}} \right )}
\right) \frac{d^{2} \alpha{\left (T \right )} }{d T^{2}}
C_{p, dep} = (C_p-C_v)_{\text{from EOS}} + C_{v, dep} - R
References
----------
.. [1] Thorade, Matthis, and Ali Saadat. "Partial Derivatives of
Thermodynamic State Properties for Dynamic Simulation."
Environmental Earth Sciences 70, no. 8 (April 10, 2013): 3497-3503.
doi:10.1007/s12665-013-2394-z.
.. [2] Poling, Bruce E. The Properties of Gases and Liquids. 5th
edition. New York: McGraw-Hill Professional, 2000.
.. [3] Walas, Stanley M. Phase Equilibria in Chemical Engineering.
Butterworth-Heinemann, 1985.
'''
([dP_dT, dP_dV, dV_dT, dV_dP, dT_dV, dT_dP],
[d2P_dT2, d2P_dV2, d2V_dT2, d2V_dP2, d2T_dV2, d2T_dP2],
[d2V_dPdT, d2P_dTdV, d2T_dPdV],
[H_dep, S_dep, Cv_dep]) = self.derivatives_and_departures(T, P, V, b, delta, epsilon, a_alpha, da_alpha_dT, d2a_alpha_dT2, quick=quick)
beta = dV_dT/V # isobaric_expansion(V, dV_dT)
kappa = -dV_dP/V # isothermal_compressibility(V, dV_dP)
Cp_m_Cv = -T*dP_dT*dP_dT/dP_dV # Cp_minus_Cv(T, dP_dT, dP_dV)
Cp_dep = Cp_m_Cv + Cv_dep - R
V_dep = (V - R*T/P)
U_dep = H_dep - P*V_dep
G_dep = H_dep - T*S_dep
A_dep = U_dep - T*S_dep
fugacity = P*exp(G_dep/(R*T))
phi = fugacity/P
PIP = V*(d2P_dTdV/dP_dT - d2P_dV2/dP_dV) # phase_identification_parameter(V, dP_dT, dP_dV, d2P_dV2, d2P_dTdV)
phase = 'l' if PIP > 1 else 'g' # phase_identification_parameter_phase(PIP)
if phase == 'l':
self.Z_l = self.P*V/(R*self.T)
self.beta_l, self.kappa_l = beta, kappa
self.PIP_l, self.Cp_minus_Cv_l = PIP, Cp_m_Cv
self.dP_dT_l, self.dP_dV_l, self.dV_dT_l = dP_dT, dP_dV, dV_dT
self.dV_dP_l, self.dT_dV_l, self.dT_dP_l = dV_dP, dT_dV, dT_dP
self.d2P_dT2_l, self.d2P_dV2_l = d2P_dT2, d2P_dV2
self.d2V_dT2_l, self.d2V_dP2_l = d2V_dT2, d2V_dP2
self.d2T_dV2_l, self.d2T_dP2_l = d2T_dV2, d2T_dP2
self.d2V_dPdT_l, self.d2P_dTdV_l, self.d2T_dPdV_l = d2V_dPdT, d2P_dTdV, d2T_dPdV
self.H_dep_l, self.S_dep_l, self.V_dep_l = H_dep, S_dep, V_dep,
self.U_dep_l, self.G_dep_l, self.A_dep_l = U_dep, G_dep, A_dep,
self.fugacity_l, self.phi_l = fugacity, phi
self.Cp_dep_l, self.Cv_dep_l = Cp_dep, Cv_dep
else:
self.Z_g = self.P*V/(R*self.T)
self.beta_g, self.kappa_g = beta, kappa
self.PIP_g, self.Cp_minus_Cv_g = PIP, Cp_m_Cv
self.dP_dT_g, self.dP_dV_g, self.dV_dT_g = dP_dT, dP_dV, dV_dT
self.dV_dP_g, self.dT_dV_g, self.dT_dP_g = dV_dP, dT_dV, dT_dP
self.d2P_dT2_g, self.d2P_dV2_g = d2P_dT2, d2P_dV2
self.d2V_dT2_g, self.d2V_dP2_g = d2V_dT2, d2V_dP2
self.d2T_dV2_g, self.d2T_dP2_g = d2T_dV2, d2T_dP2
self.d2V_dPdT_g, self.d2P_dTdV_g, self.d2T_dPdV_g = d2V_dPdT, d2P_dTdV, d2T_dPdV
self.H_dep_g, self.S_dep_g, self.V_dep_g = H_dep, S_dep, V_dep,
self.U_dep_g, self.G_dep_g, self.A_dep_g = U_dep, G_dep, A_dep,
self.fugacity_g, self.phi_g = fugacity, phi
self.Cp_dep_g, self.Cv_dep_g = Cp_dep, Cv_dep
return phase | def function[set_properties_from_solution, parameter[self, T, P, V, b, delta, epsilon, a_alpha, da_alpha_dT, d2a_alpha_dT2, quick]]:
constant[Sets all interesting properties which can be calculated from an
EOS alone. Determines which phase the fluid is on its own; for details,
see `phase_identification_parameter`.
The list of properties set is as follows, with all properties suffixed
with '_l' or '_g'.
dP_dT, dP_dV, dV_dT, dV_dP, dT_dV, dT_dP, d2P_dT2, d2P_dV2, d2V_dT2,
d2V_dP2, d2T_dV2, d2T_dP2, d2V_dPdT, d2P_dTdV, d2T_dPdV, H_dep, S_dep,
beta, kappa, Cp_minus_Cv, V_dep, U_dep, G_dep, A_dep, fugacity, phi,
and PIP.
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [Pa]
V : float
Molar volume, [m^3/mol]
b : float
Coefficient calculated by EOS-specific method, [m^3/mol]
delta : float
Coefficient calculated by EOS-specific method, [m^3/mol]
epsilon : float
Coefficient calculated by EOS-specific method, [m^6/mol^2]
a_alpha : float
Coefficient calculated by EOS-specific method, [J^2/mol^2/Pa]
da_alpha_dT : float
Temperature derivative of coefficient calculated by EOS-specific
method, [J^2/mol^2/Pa/K]
d2a_alpha_dT2 : float
Second temperature derivative of coefficient calculated by
EOS-specific method, [J^2/mol^2/Pa/K**2]
quick : bool, optional
Whether to use a SymPy cse-derived expression (3x faster) or
individual formulas
Returns
-------
phase : str
Either 'l' or 'g'
Notes
-----
The individual formulas for the derivatives and excess properties are
as follows. For definitions of `beta`, see `isobaric_expansion`;
for `kappa`, see isothermal_compressibility; for `Cp_minus_Cv`, see
`Cp_minus_Cv`; for `phase_identification_parameter`, see
`phase_identification_parameter`.
First derivatives; in part using the Triple Product Rule [2]_, [3]_:
.. math::
\left(\frac{\partial P}{\partial T}\right)_V = \frac{R}{V - b}
- \frac{a \frac{d \alpha{\left (T \right )}}{d T}}{V^{2} + V \delta
+ \epsilon}
\left(\frac{\partial P}{\partial V}\right)_T = - \frac{R T}{\left(
V - b\right)^{2}} - \frac{a \left(- 2 V - \delta\right) \alpha{
\left (T \right )}}{\left(V^{2} + V \delta + \epsilon\right)^{2}}
\left(\frac{\partial V}{\partial T}\right)_P =-\frac{
\left(\frac{\partial P}{\partial T}\right)_V}{
\left(\frac{\partial P}{\partial V}\right)_T}
\left(\frac{\partial V}{\partial P}\right)_T =-\frac{
\left(\frac{\partial V}{\partial T}\right)_P}{
\left(\frac{\partial P}{\partial T}\right)_V}
\left(\frac{\partial T}{\partial V}\right)_P = \frac{1}
{\left(\frac{\partial V}{\partial T}\right)_P}
\left(\frac{\partial T}{\partial P}\right)_V = \frac{1}
{\left(\frac{\partial P}{\partial T}\right)_V}
Second derivatives with respect to one variable; those of `T` and `V`
use identities shown in [1]_ and verified numerically:
.. math::
\left(\frac{\partial^2 P}{\partial T^2}\right)_V = - \frac{a
\frac{d^{2} \alpha{\left (T \right )}}{d T^{2}}}{V^{2} + V \delta
+ \epsilon}
\left(\frac{\partial^2 P}{\partial V^2}\right)_T = 2 \left(\frac{
R T}{\left(V - b\right)^{3}} - \frac{a \left(2 V + \delta\right)^{
2} \alpha{\left (T \right )}}{\left(V^{2} + V \delta + \epsilon
\right)^{3}} + \frac{a \alpha{\left (T \right )}}{\left(V^{2} + V
\delta + \epsilon\right)^{2}}\right)
\left(\frac{\partial^2 T}{\partial P^2}\right)_V = -\left(\frac{
\partial^2 P}{\partial T^2}\right)_V \left(\frac{\partial P}{
\partial T}\right)^{-3}_V
\left(\frac{\partial^2 V}{\partial P^2}\right)_T = -\left(\frac{
\partial^2 P}{\partial V^2}\right)_T \left(\frac{\partial P}{
\partial V}\right)^{-3}_T
\left(\frac{\partial^2 T}{\partial V^2}\right)_P = -\left[
\left(\frac{\partial^2 P}{\partial V^2}\right)_T
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T \partial V}\right) \right]
\left(\frac{\partial P}{\partial T}\right)^{-2}_V
+ \left[\left(\frac{\partial^2 P}{\partial T\partial V}\right)
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T^2}\right)_V\right]
\left(\frac{\partial P}{\partial T}\right)_V^{-3}
\left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 V}{\partial T^2}\right)_P = -\left[
\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial T \partial V}\right) \right]
\left(\frac{\partial P}{\partial V}\right)^{-2}_T
+ \left[\left(\frac{\partial^2 P}{\partial T\partial V}\right)
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial V^2}\right)_T\right]
\left(\frac{\partial P}{\partial V}\right)_T^{-3}
\left(\frac{\partial P}{\partial T}\right)_V
Second derivatives with respect to the other two variables; those of
`T` and `V` use identities shown in [1]_ and verified numerically:
.. math::
\left(\frac{\partial^2 P}{\partial T \partial V}\right) = - \frac{
R}{\left(V - b\right)^{2}} + \frac{a \left(2 V + \delta\right)
\frac{d \alpha{\left (T \right )}}{d T}}{\left(V^{2} + V \delta
+ \epsilon\right)^{2}}
\left(\frac{\partial^2 T}{\partial P\partial V}\right) =
- \left[\left(\frac{\partial^2 P}{\partial T \partial V}\right)
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\right]\left(\frac{\partial P}{\partial T}\right)_V^{-3}
\left(\frac{\partial^2 V}{\partial T\partial P}\right) =
- \left[\left(\frac{\partial^2 P}{\partial T \partial V}\right)
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial V^2}\right)_T
\right]\left(\frac{\partial P}{\partial V}\right)_T^{-3}
Excess properties
.. math::
H_{dep} = \int_{\infty}^V \left[T\frac{\partial P}{\partial T}_V
- P\right]dV + PV - RT= P V - R T + \frac{2}{\sqrt{
\delta^{2} - 4 \epsilon}} \left(T a \frac{d \alpha{\left (T \right
)}}{d T} - a \alpha{\left (T \right )}\right) \operatorname{atanh}
{\left (\frac{2 V + \delta}{\sqrt{\delta^{2} - 4 \epsilon}}
\right)}
S_{dep} = \int_{\infty}^V\left[\frac{\partial P}{\partial T}
- \frac{R}{V}\right] dV + R\log\frac{PV}{RT} = - R \log{\left (V
\right )} + R \log{\left (\frac{P V}{R T} \right )} + R \log{\left
(V - b \right )} + \frac{2 a \frac{d\alpha{\left (T \right )}}{d T}
}{\sqrt{\delta^{2} - 4 \epsilon}} \operatorname{atanh}{\left (\frac
{2 V + \delta}{\sqrt{\delta^{2} - 4 \epsilon}} \right )}
V_{dep} = V - \frac{RT}{P}
U_{dep} = H_{dep} - P V_{dep}
G_{dep} = H_{dep} - T S_{dep}
A_{dep} = U_{dep} - T S_{dep}
\text{fugacity} = P\exp\left(\frac{G_{dep}}{RT}\right)
\phi = \frac{\text{fugacity}}{P}
C_{v, dep} = T\int_\infty^V \left(\frac{\partial^2 P}{\partial
T^2}\right) dV = - T a \left(\sqrt{\frac{1}{\delta^{2} - 4
\epsilon}} \log{\left (V - \frac{\delta^{2}}{2} \sqrt{\frac{1}{
\delta^{2} - 4 \epsilon}} + \frac{\delta}{2} + 2 \epsilon \sqrt{
\frac{1}{\delta^{2} - 4 \epsilon}} \right )} - \sqrt{\frac{1}{
\delta^{2} - 4 \epsilon}} \log{\left (V + \frac{\delta^{2}}{2}
\sqrt{\frac{1}{\delta^{2} - 4 \epsilon}} + \frac{\delta}{2}
- 2 \epsilon \sqrt{\frac{1}{\delta^{2} - 4 \epsilon}} \right )}
\right) \frac{d^{2} \alpha{\left (T \right )} }{d T^{2}}
C_{p, dep} = (C_p-C_v)_{\text{from EOS}} + C_{v, dep} - R
References
----------
.. [1] Thorade, Matthis, and Ali Saadat. "Partial Derivatives of
Thermodynamic State Properties for Dynamic Simulation."
Environmental Earth Sciences 70, no. 8 (April 10, 2013): 3497-3503.
doi:10.1007/s12665-013-2394-z.
.. [2] Poling, Bruce E. The Properties of Gases and Liquids. 5th
edition. New York: McGraw-Hill Professional, 2000.
.. [3] Walas, Stanley M. Phase Equilibria in Chemical Engineering.
Butterworth-Heinemann, 1985.
]
<ast.Tuple object at 0x7da207f034c0> assign[=] call[name[self].derivatives_and_departures, parameter[name[T], name[P], name[V], name[b], name[delta], name[epsilon], name[a_alpha], name[da_alpha_dT], name[d2a_alpha_dT2]]]
variable[beta] assign[=] binary_operation[name[dV_dT] / name[V]]
variable[kappa] assign[=] binary_operation[<ast.UnaryOp object at 0x7da207f03640> / name[V]]
variable[Cp_m_Cv] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da207f00d90> * name[dP_dT]] * name[dP_dT]] / name[dP_dV]]
variable[Cp_dep] assign[=] binary_operation[binary_operation[name[Cp_m_Cv] + name[Cv_dep]] - name[R]]
variable[V_dep] assign[=] binary_operation[name[V] - binary_operation[binary_operation[name[R] * name[T]] / name[P]]]
variable[U_dep] assign[=] binary_operation[name[H_dep] - binary_operation[name[P] * name[V_dep]]]
variable[G_dep] assign[=] binary_operation[name[H_dep] - binary_operation[name[T] * name[S_dep]]]
variable[A_dep] assign[=] binary_operation[name[U_dep] - binary_operation[name[T] * name[S_dep]]]
variable[fugacity] assign[=] binary_operation[name[P] * call[name[exp], parameter[binary_operation[name[G_dep] / binary_operation[name[R] * name[T]]]]]]
variable[phi] assign[=] binary_operation[name[fugacity] / name[P]]
variable[PIP] assign[=] binary_operation[name[V] * binary_operation[binary_operation[name[d2P_dTdV] / name[dP_dT]] - binary_operation[name[d2P_dV2] / name[dP_dV]]]]
variable[phase] assign[=] <ast.IfExp object at 0x7da207f03850>
if compare[name[phase] equal[==] constant[l]] begin[:]
name[self].Z_l assign[=] binary_operation[binary_operation[name[self].P * name[V]] / binary_operation[name[R] * name[self].T]]
<ast.Tuple object at 0x7da207f03b20> assign[=] tuple[[<ast.Name object at 0x7da207f01240>, <ast.Name object at 0x7da207f00d00>]]
<ast.Tuple object at 0x7da207f03790> assign[=] tuple[[<ast.Name object at 0x7da207f035e0>, <ast.Name object at 0x7da207f01ff0>]]
<ast.Tuple object at 0x7da207f01cf0> assign[=] tuple[[<ast.Name object at 0x7da207f00220>, <ast.Name object at 0x7da207f006d0>, <ast.Name object at 0x7da207f02a70>]]
<ast.Tuple object at 0x7da207f00b50> assign[=] tuple[[<ast.Name object at 0x7da207f01180>, <ast.Name object at 0x7da207f006a0>, <ast.Name object at 0x7da207f02410>]]
<ast.Tuple object at 0x7da207f00bb0> assign[=] tuple[[<ast.Name object at 0x7da207f00610>, <ast.Name object at 0x7da207f00460>]]
<ast.Tuple object at 0x7da207f01600> assign[=] tuple[[<ast.Name object at 0x7da207f03d90>, <ast.Name object at 0x7da207f02ad0>]]
<ast.Tuple object at 0x7da207f00850> assign[=] tuple[[<ast.Name object at 0x7da207f01ba0>, <ast.Name object at 0x7da207f00be0>]]
<ast.Tuple object at 0x7da207f01300> assign[=] tuple[[<ast.Name object at 0x7da207f02d10>, <ast.Name object at 0x7da207f00130>, <ast.Name object at 0x7da207f01120>]]
<ast.Tuple object at 0x7da207f028c0> assign[=] tuple[[<ast.Name object at 0x7da207f01e10>, <ast.Name object at 0x7da207f01c30>, <ast.Name object at 0x7da207f01540>]]
<ast.Tuple object at 0x7da207f02980> assign[=] tuple[[<ast.Name object at 0x7da207f00190>, <ast.Name object at 0x7da207f03070>, <ast.Name object at 0x7da207f015a0>]]
<ast.Tuple object at 0x7da207f00f70> assign[=] tuple[[<ast.Name object at 0x7da207f00f40>, <ast.Name object at 0x7da207f00700>]]
<ast.Tuple object at 0x7da207f01d20> assign[=] tuple[[<ast.Name object at 0x7da2041dafb0>, <ast.Name object at 0x7da2041d9690>]]
return[name[phase]] | keyword[def] identifier[set_properties_from_solution] ( identifier[self] , identifier[T] , identifier[P] , identifier[V] , identifier[b] , identifier[delta] , identifier[epsilon] , identifier[a_alpha] ,
identifier[da_alpha_dT] , identifier[d2a_alpha_dT2] , identifier[quick] = keyword[True] ):
literal[string]
([ identifier[dP_dT] , identifier[dP_dV] , identifier[dV_dT] , identifier[dV_dP] , identifier[dT_dV] , identifier[dT_dP] ],
[ identifier[d2P_dT2] , identifier[d2P_dV2] , identifier[d2V_dT2] , identifier[d2V_dP2] , identifier[d2T_dV2] , identifier[d2T_dP2] ],
[ identifier[d2V_dPdT] , identifier[d2P_dTdV] , identifier[d2T_dPdV] ],
[ identifier[H_dep] , identifier[S_dep] , identifier[Cv_dep] ])= identifier[self] . identifier[derivatives_and_departures] ( identifier[T] , identifier[P] , identifier[V] , identifier[b] , identifier[delta] , identifier[epsilon] , identifier[a_alpha] , identifier[da_alpha_dT] , identifier[d2a_alpha_dT2] , identifier[quick] = identifier[quick] )
identifier[beta] = identifier[dV_dT] / identifier[V]
identifier[kappa] =- identifier[dV_dP] / identifier[V]
identifier[Cp_m_Cv] =- identifier[T] * identifier[dP_dT] * identifier[dP_dT] / identifier[dP_dV]
identifier[Cp_dep] = identifier[Cp_m_Cv] + identifier[Cv_dep] - identifier[R]
identifier[V_dep] =( identifier[V] - identifier[R] * identifier[T] / identifier[P] )
identifier[U_dep] = identifier[H_dep] - identifier[P] * identifier[V_dep]
identifier[G_dep] = identifier[H_dep] - identifier[T] * identifier[S_dep]
identifier[A_dep] = identifier[U_dep] - identifier[T] * identifier[S_dep]
identifier[fugacity] = identifier[P] * identifier[exp] ( identifier[G_dep] /( identifier[R] * identifier[T] ))
identifier[phi] = identifier[fugacity] / identifier[P]
identifier[PIP] = identifier[V] *( identifier[d2P_dTdV] / identifier[dP_dT] - identifier[d2P_dV2] / identifier[dP_dV] )
identifier[phase] = literal[string] keyword[if] identifier[PIP] > literal[int] keyword[else] literal[string]
keyword[if] identifier[phase] == literal[string] :
identifier[self] . identifier[Z_l] = identifier[self] . identifier[P] * identifier[V] /( identifier[R] * identifier[self] . identifier[T] )
identifier[self] . identifier[beta_l] , identifier[self] . identifier[kappa_l] = identifier[beta] , identifier[kappa]
identifier[self] . identifier[PIP_l] , identifier[self] . identifier[Cp_minus_Cv_l] = identifier[PIP] , identifier[Cp_m_Cv]
identifier[self] . identifier[dP_dT_l] , identifier[self] . identifier[dP_dV_l] , identifier[self] . identifier[dV_dT_l] = identifier[dP_dT] , identifier[dP_dV] , identifier[dV_dT]
identifier[self] . identifier[dV_dP_l] , identifier[self] . identifier[dT_dV_l] , identifier[self] . identifier[dT_dP_l] = identifier[dV_dP] , identifier[dT_dV] , identifier[dT_dP]
identifier[self] . identifier[d2P_dT2_l] , identifier[self] . identifier[d2P_dV2_l] = identifier[d2P_dT2] , identifier[d2P_dV2]
identifier[self] . identifier[d2V_dT2_l] , identifier[self] . identifier[d2V_dP2_l] = identifier[d2V_dT2] , identifier[d2V_dP2]
identifier[self] . identifier[d2T_dV2_l] , identifier[self] . identifier[d2T_dP2_l] = identifier[d2T_dV2] , identifier[d2T_dP2]
identifier[self] . identifier[d2V_dPdT_l] , identifier[self] . identifier[d2P_dTdV_l] , identifier[self] . identifier[d2T_dPdV_l] = identifier[d2V_dPdT] , identifier[d2P_dTdV] , identifier[d2T_dPdV]
identifier[self] . identifier[H_dep_l] , identifier[self] . identifier[S_dep_l] , identifier[self] . identifier[V_dep_l] = identifier[H_dep] , identifier[S_dep] , identifier[V_dep] ,
identifier[self] . identifier[U_dep_l] , identifier[self] . identifier[G_dep_l] , identifier[self] . identifier[A_dep_l] = identifier[U_dep] , identifier[G_dep] , identifier[A_dep] ,
identifier[self] . identifier[fugacity_l] , identifier[self] . identifier[phi_l] = identifier[fugacity] , identifier[phi]
identifier[self] . identifier[Cp_dep_l] , identifier[self] . identifier[Cv_dep_l] = identifier[Cp_dep] , identifier[Cv_dep]
keyword[else] :
identifier[self] . identifier[Z_g] = identifier[self] . identifier[P] * identifier[V] /( identifier[R] * identifier[self] . identifier[T] )
identifier[self] . identifier[beta_g] , identifier[self] . identifier[kappa_g] = identifier[beta] , identifier[kappa]
identifier[self] . identifier[PIP_g] , identifier[self] . identifier[Cp_minus_Cv_g] = identifier[PIP] , identifier[Cp_m_Cv]
identifier[self] . identifier[dP_dT_g] , identifier[self] . identifier[dP_dV_g] , identifier[self] . identifier[dV_dT_g] = identifier[dP_dT] , identifier[dP_dV] , identifier[dV_dT]
identifier[self] . identifier[dV_dP_g] , identifier[self] . identifier[dT_dV_g] , identifier[self] . identifier[dT_dP_g] = identifier[dV_dP] , identifier[dT_dV] , identifier[dT_dP]
identifier[self] . identifier[d2P_dT2_g] , identifier[self] . identifier[d2P_dV2_g] = identifier[d2P_dT2] , identifier[d2P_dV2]
identifier[self] . identifier[d2V_dT2_g] , identifier[self] . identifier[d2V_dP2_g] = identifier[d2V_dT2] , identifier[d2V_dP2]
identifier[self] . identifier[d2T_dV2_g] , identifier[self] . identifier[d2T_dP2_g] = identifier[d2T_dV2] , identifier[d2T_dP2]
identifier[self] . identifier[d2V_dPdT_g] , identifier[self] . identifier[d2P_dTdV_g] , identifier[self] . identifier[d2T_dPdV_g] = identifier[d2V_dPdT] , identifier[d2P_dTdV] , identifier[d2T_dPdV]
identifier[self] . identifier[H_dep_g] , identifier[self] . identifier[S_dep_g] , identifier[self] . identifier[V_dep_g] = identifier[H_dep] , identifier[S_dep] , identifier[V_dep] ,
identifier[self] . identifier[U_dep_g] , identifier[self] . identifier[G_dep_g] , identifier[self] . identifier[A_dep_g] = identifier[U_dep] , identifier[G_dep] , identifier[A_dep] ,
identifier[self] . identifier[fugacity_g] , identifier[self] . identifier[phi_g] = identifier[fugacity] , identifier[phi]
identifier[self] . identifier[Cp_dep_g] , identifier[self] . identifier[Cv_dep_g] = identifier[Cp_dep] , identifier[Cv_dep]
keyword[return] identifier[phase] | def set_properties_from_solution(self, T, P, V, b, delta, epsilon, a_alpha, da_alpha_dT, d2a_alpha_dT2, quick=True):
"""Sets all interesting properties which can be calculated from an
EOS alone. Determines which phase the fluid is on its own; for details,
see `phase_identification_parameter`.
The list of properties set is as follows, with all properties suffixed
with '_l' or '_g'.
dP_dT, dP_dV, dV_dT, dV_dP, dT_dV, dT_dP, d2P_dT2, d2P_dV2, d2V_dT2,
d2V_dP2, d2T_dV2, d2T_dP2, d2V_dPdT, d2P_dTdV, d2T_dPdV, H_dep, S_dep,
beta, kappa, Cp_minus_Cv, V_dep, U_dep, G_dep, A_dep, fugacity, phi,
and PIP.
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [Pa]
V : float
Molar volume, [m^3/mol]
b : float
Coefficient calculated by EOS-specific method, [m^3/mol]
delta : float
Coefficient calculated by EOS-specific method, [m^3/mol]
epsilon : float
Coefficient calculated by EOS-specific method, [m^6/mol^2]
a_alpha : float
Coefficient calculated by EOS-specific method, [J^2/mol^2/Pa]
da_alpha_dT : float
Temperature derivative of coefficient calculated by EOS-specific
method, [J^2/mol^2/Pa/K]
d2a_alpha_dT2 : float
Second temperature derivative of coefficient calculated by
EOS-specific method, [J^2/mol^2/Pa/K**2]
quick : bool, optional
Whether to use a SymPy cse-derived expression (3x faster) or
individual formulas
Returns
-------
phase : str
Either 'l' or 'g'
Notes
-----
The individual formulas for the derivatives and excess properties are
as follows. For definitions of `beta`, see `isobaric_expansion`;
for `kappa`, see isothermal_compressibility; for `Cp_minus_Cv`, see
`Cp_minus_Cv`; for `phase_identification_parameter`, see
`phase_identification_parameter`.
First derivatives; in part using the Triple Product Rule [2]_, [3]_:
.. math::
\\left(\\frac{\\partial P}{\\partial T}\\right)_V = \\frac{R}{V - b}
- \\frac{a \\frac{d \\alpha{\\left (T \\right )}}{d T}}{V^{2} + V \\delta
+ \\epsilon}
\\left(\\frac{\\partial P}{\\partial V}\\right)_T = - \\frac{R T}{\\left(
V - b\\right)^{2}} - \\frac{a \\left(- 2 V - \\delta\\right) \\alpha{
\\left (T \\right )}}{\\left(V^{2} + V \\delta + \\epsilon\\right)^{2}}
\\left(\\frac{\\partial V}{\\partial T}\\right)_P =-\\frac{
\\left(\\frac{\\partial P}{\\partial T}\\right)_V}{
\\left(\\frac{\\partial P}{\\partial V}\\right)_T}
\\left(\\frac{\\partial V}{\\partial P}\\right)_T =-\\frac{
\\left(\\frac{\\partial V}{\\partial T}\\right)_P}{
\\left(\\frac{\\partial P}{\\partial T}\\right)_V}
\\left(\\frac{\\partial T}{\\partial V}\\right)_P = \\frac{1}
{\\left(\\frac{\\partial V}{\\partial T}\\right)_P}
\\left(\\frac{\\partial T}{\\partial P}\\right)_V = \\frac{1}
{\\left(\\frac{\\partial P}{\\partial T}\\right)_V}
Second derivatives with respect to one variable; those of `T` and `V`
use identities shown in [1]_ and verified numerically:
.. math::
\\left(\\frac{\\partial^2 P}{\\partial T^2}\\right)_V = - \\frac{a
\\frac{d^{2} \\alpha{\\left (T \\right )}}{d T^{2}}}{V^{2} + V \\delta
+ \\epsilon}
\\left(\\frac{\\partial^2 P}{\\partial V^2}\\right)_T = 2 \\left(\\frac{
R T}{\\left(V - b\\right)^{3}} - \\frac{a \\left(2 V + \\delta\\right)^{
2} \\alpha{\\left (T \\right )}}{\\left(V^{2} + V \\delta + \\epsilon
\\right)^{3}} + \\frac{a \\alpha{\\left (T \\right )}}{\\left(V^{2} + V
\\delta + \\epsilon\\right)^{2}}\\right)
\\left(\\frac{\\partial^2 T}{\\partial P^2}\\right)_V = -\\left(\\frac{
\\partial^2 P}{\\partial T^2}\\right)_V \\left(\\frac{\\partial P}{
\\partial T}\\right)^{-3}_V
\\left(\\frac{\\partial^2 V}{\\partial P^2}\\right)_T = -\\left(\\frac{
\\partial^2 P}{\\partial V^2}\\right)_T \\left(\\frac{\\partial P}{
\\partial V}\\right)^{-3}_T
\\left(\\frac{\\partial^2 T}{\\partial V^2}\\right)_P = -\\left[
\\left(\\frac{\\partial^2 P}{\\partial V^2}\\right)_T
\\left(\\frac{\\partial P}{\\partial T}\\right)_V
- \\left(\\frac{\\partial P}{\\partial V}\\right)_T
\\left(\\frac{\\partial^2 P}{\\partial T \\partial V}\\right) \\right]
\\left(\\frac{\\partial P}{\\partial T}\\right)^{-2}_V
+ \\left[\\left(\\frac{\\partial^2 P}{\\partial T\\partial V}\\right)
\\left(\\frac{\\partial P}{\\partial T}\\right)_V
- \\left(\\frac{\\partial P}{\\partial V}\\right)_T
\\left(\\frac{\\partial^2 P}{\\partial T^2}\\right)_V\\right]
\\left(\\frac{\\partial P}{\\partial T}\\right)_V^{-3}
\\left(\\frac{\\partial P}{\\partial V}\\right)_T
\\left(\\frac{\\partial^2 V}{\\partial T^2}\\right)_P = -\\left[
\\left(\\frac{\\partial^2 P}{\\partial T^2}\\right)_V
\\left(\\frac{\\partial P}{\\partial V}\\right)_T
- \\left(\\frac{\\partial P}{\\partial T}\\right)_V
\\left(\\frac{\\partial^2 P}{\\partial T \\partial V}\\right) \\right]
\\left(\\frac{\\partial P}{\\partial V}\\right)^{-2}_T
+ \\left[\\left(\\frac{\\partial^2 P}{\\partial T\\partial V}\\right)
\\left(\\frac{\\partial P}{\\partial V}\\right)_T
- \\left(\\frac{\\partial P}{\\partial T}\\right)_V
\\left(\\frac{\\partial^2 P}{\\partial V^2}\\right)_T\\right]
\\left(\\frac{\\partial P}{\\partial V}\\right)_T^{-3}
\\left(\\frac{\\partial P}{\\partial T}\\right)_V
Second derivatives with respect to the other two variables; those of
`T` and `V` use identities shown in [1]_ and verified numerically:
.. math::
\\left(\\frac{\\partial^2 P}{\\partial T \\partial V}\\right) = - \\frac{
R}{\\left(V - b\\right)^{2}} + \\frac{a \\left(2 V + \\delta\\right)
\\frac{d \\alpha{\\left (T \\right )}}{d T}}{\\left(V^{2} + V \\delta
+ \\epsilon\\right)^{2}}
\\left(\\frac{\\partial^2 T}{\\partial P\\partial V}\\right) =
- \\left[\\left(\\frac{\\partial^2 P}{\\partial T \\partial V}\\right)
\\left(\\frac{\\partial P}{\\partial T}\\right)_V
- \\left(\\frac{\\partial P}{\\partial V}\\right)_T
\\left(\\frac{\\partial^2 P}{\\partial T^2}\\right)_V
\\right]\\left(\\frac{\\partial P}{\\partial T}\\right)_V^{-3}
\\left(\\frac{\\partial^2 V}{\\partial T\\partial P}\\right) =
- \\left[\\left(\\frac{\\partial^2 P}{\\partial T \\partial V}\\right)
\\left(\\frac{\\partial P}{\\partial V}\\right)_T
- \\left(\\frac{\\partial P}{\\partial T}\\right)_V
\\left(\\frac{\\partial^2 P}{\\partial V^2}\\right)_T
\\right]\\left(\\frac{\\partial P}{\\partial V}\\right)_T^{-3}
Excess properties
.. math::
H_{dep} = \\int_{\\infty}^V \\left[T\\frac{\\partial P}{\\partial T}_V
- P\\right]dV + PV - RT= P V - R T + \\frac{2}{\\sqrt{
\\delta^{2} - 4 \\epsilon}} \\left(T a \\frac{d \\alpha{\\left (T \\right
)}}{d T} - a \\alpha{\\left (T \\right )}\\right) \\operatorname{atanh}
{\\left (\\frac{2 V + \\delta}{\\sqrt{\\delta^{2} - 4 \\epsilon}}
\\right)}
S_{dep} = \\int_{\\infty}^V\\left[\\frac{\\partial P}{\\partial T}
- \\frac{R}{V}\\right] dV + R\\log\\frac{PV}{RT} = - R \\log{\\left (V
\\right )} + R \\log{\\left (\\frac{P V}{R T} \\right )} + R \\log{\\left
(V - b \\right )} + \\frac{2 a \\frac{d\\alpha{\\left (T \\right )}}{d T}
}{\\sqrt{\\delta^{2} - 4 \\epsilon}} \\operatorname{atanh}{\\left (\\frac
{2 V + \\delta}{\\sqrt{\\delta^{2} - 4 \\epsilon}} \\right )}
V_{dep} = V - \\frac{RT}{P}
U_{dep} = H_{dep} - P V_{dep}
G_{dep} = H_{dep} - T S_{dep}
A_{dep} = U_{dep} - T S_{dep}
\\text{fugacity} = P\\exp\\left(\\frac{G_{dep}}{RT}\\right)
\\phi = \\frac{\\text{fugacity}}{P}
C_{v, dep} = T\\int_\\infty^V \\left(\\frac{\\partial^2 P}{\\partial
T^2}\\right) dV = - T a \\left(\\sqrt{\\frac{1}{\\delta^{2} - 4
\\epsilon}} \\log{\\left (V - \\frac{\\delta^{2}}{2} \\sqrt{\\frac{1}{
\\delta^{2} - 4 \\epsilon}} + \\frac{\\delta}{2} + 2 \\epsilon \\sqrt{
\\frac{1}{\\delta^{2} - 4 \\epsilon}} \\right )} - \\sqrt{\\frac{1}{
\\delta^{2} - 4 \\epsilon}} \\log{\\left (V + \\frac{\\delta^{2}}{2}
\\sqrt{\\frac{1}{\\delta^{2} - 4 \\epsilon}} + \\frac{\\delta}{2}
- 2 \\epsilon \\sqrt{\\frac{1}{\\delta^{2} - 4 \\epsilon}} \\right )}
\\right) \\frac{d^{2} \\alpha{\\left (T \\right )} }{d T^{2}}
C_{p, dep} = (C_p-C_v)_{\\text{from EOS}} + C_{v, dep} - R
References
----------
.. [1] Thorade, Matthis, and Ali Saadat. "Partial Derivatives of
Thermodynamic State Properties for Dynamic Simulation."
Environmental Earth Sciences 70, no. 8 (April 10, 2013): 3497-3503.
doi:10.1007/s12665-013-2394-z.
.. [2] Poling, Bruce E. The Properties of Gases and Liquids. 5th
edition. New York: McGraw-Hill Professional, 2000.
.. [3] Walas, Stanley M. Phase Equilibria in Chemical Engineering.
Butterworth-Heinemann, 1985.
"""
([dP_dT, dP_dV, dV_dT, dV_dP, dT_dV, dT_dP], [d2P_dT2, d2P_dV2, d2V_dT2, d2V_dP2, d2T_dV2, d2T_dP2], [d2V_dPdT, d2P_dTdV, d2T_dPdV], [H_dep, S_dep, Cv_dep]) = self.derivatives_and_departures(T, P, V, b, delta, epsilon, a_alpha, da_alpha_dT, d2a_alpha_dT2, quick=quick)
beta = dV_dT / V # isobaric_expansion(V, dV_dT)
kappa = -dV_dP / V # isothermal_compressibility(V, dV_dP)
Cp_m_Cv = -T * dP_dT * dP_dT / dP_dV # Cp_minus_Cv(T, dP_dT, dP_dV)
Cp_dep = Cp_m_Cv + Cv_dep - R
V_dep = V - R * T / P
U_dep = H_dep - P * V_dep
G_dep = H_dep - T * S_dep
A_dep = U_dep - T * S_dep
fugacity = P * exp(G_dep / (R * T))
phi = fugacity / P
PIP = V * (d2P_dTdV / dP_dT - d2P_dV2 / dP_dV) # phase_identification_parameter(V, dP_dT, dP_dV, d2P_dV2, d2P_dTdV)
phase = 'l' if PIP > 1 else 'g' # phase_identification_parameter_phase(PIP)
if phase == 'l':
self.Z_l = self.P * V / (R * self.T)
(self.beta_l, self.kappa_l) = (beta, kappa)
(self.PIP_l, self.Cp_minus_Cv_l) = (PIP, Cp_m_Cv)
(self.dP_dT_l, self.dP_dV_l, self.dV_dT_l) = (dP_dT, dP_dV, dV_dT)
(self.dV_dP_l, self.dT_dV_l, self.dT_dP_l) = (dV_dP, dT_dV, dT_dP)
(self.d2P_dT2_l, self.d2P_dV2_l) = (d2P_dT2, d2P_dV2)
(self.d2V_dT2_l, self.d2V_dP2_l) = (d2V_dT2, d2V_dP2)
(self.d2T_dV2_l, self.d2T_dP2_l) = (d2T_dV2, d2T_dP2)
(self.d2V_dPdT_l, self.d2P_dTdV_l, self.d2T_dPdV_l) = (d2V_dPdT, d2P_dTdV, d2T_dPdV)
(self.H_dep_l, self.S_dep_l, self.V_dep_l) = (H_dep, S_dep, V_dep)
(self.U_dep_l, self.G_dep_l, self.A_dep_l) = (U_dep, G_dep, A_dep)
(self.fugacity_l, self.phi_l) = (fugacity, phi)
(self.Cp_dep_l, self.Cv_dep_l) = (Cp_dep, Cv_dep) # depends on [control=['if'], data=[]]
else:
self.Z_g = self.P * V / (R * self.T)
(self.beta_g, self.kappa_g) = (beta, kappa)
(self.PIP_g, self.Cp_minus_Cv_g) = (PIP, Cp_m_Cv)
(self.dP_dT_g, self.dP_dV_g, self.dV_dT_g) = (dP_dT, dP_dV, dV_dT)
(self.dV_dP_g, self.dT_dV_g, self.dT_dP_g) = (dV_dP, dT_dV, dT_dP)
(self.d2P_dT2_g, self.d2P_dV2_g) = (d2P_dT2, d2P_dV2)
(self.d2V_dT2_g, self.d2V_dP2_g) = (d2V_dT2, d2V_dP2)
(self.d2T_dV2_g, self.d2T_dP2_g) = (d2T_dV2, d2T_dP2)
(self.d2V_dPdT_g, self.d2P_dTdV_g, self.d2T_dPdV_g) = (d2V_dPdT, d2P_dTdV, d2T_dPdV)
(self.H_dep_g, self.S_dep_g, self.V_dep_g) = (H_dep, S_dep, V_dep)
(self.U_dep_g, self.G_dep_g, self.A_dep_g) = (U_dep, G_dep, A_dep)
(self.fugacity_g, self.phi_g) = (fugacity, phi)
(self.Cp_dep_g, self.Cv_dep_g) = (Cp_dep, Cv_dep)
return phase |
def com_adobe_fonts_check_family_bold_italic_unique_for_nameid1(ttFonts):
"""Check that OS/2.fsSelection bold & italic settings are unique
for each NameID1"""
from collections import Counter
from fontbakery.utils import get_name_entry_strings
from fontbakery.constants import NameID, FsSelection
failed = False
family_name_and_bold_italic = list()
for ttFont in ttFonts:
names_list = get_name_entry_strings(ttFont, NameID.FONT_FAMILY_NAME)
# names_list will likely contain multiple entries, e.g. multiple copies
# of the same name in the same language for different platforms, but
# also different names in different languages, we use set() below
# to remove the duplicates and only store the unique family name(s)
# used for a given font
names_set = set(names_list)
bold = (ttFont['OS/2'].fsSelection & FsSelection.BOLD) != 0
italic = (ttFont['OS/2'].fsSelection & FsSelection.ITALIC) != 0
bold_italic = 'Bold=%r, Italic=%r' % (bold, italic)
for name in names_set:
family_name_and_bold_italic.append((name, bold_italic,))
counter = Counter(family_name_and_bold_italic)
for (family_name, bold_italic), count in counter.items():
if count > 1:
failed = True
yield FAIL, ("Family '{}' has {} fonts (should be no more than 1) with "
"the same OS/2.fsSelection bold & italic settings: {}"
).format(family_name, count, bold_italic)
if not failed:
yield PASS, ("The OS/2.fsSelection bold & italic settings were unique "
"within each compatible family group.") | def function[com_adobe_fonts_check_family_bold_italic_unique_for_nameid1, parameter[ttFonts]]:
constant[Check that OS/2.fsSelection bold & italic settings are unique
for each NameID1]
from relative_module[collections] import module[Counter]
from relative_module[fontbakery.utils] import module[get_name_entry_strings]
from relative_module[fontbakery.constants] import module[NameID], module[FsSelection]
variable[failed] assign[=] constant[False]
variable[family_name_and_bold_italic] assign[=] call[name[list], parameter[]]
for taget[name[ttFont]] in starred[name[ttFonts]] begin[:]
variable[names_list] assign[=] call[name[get_name_entry_strings], parameter[name[ttFont], name[NameID].FONT_FAMILY_NAME]]
variable[names_set] assign[=] call[name[set], parameter[name[names_list]]]
variable[bold] assign[=] compare[binary_operation[call[name[ttFont]][constant[OS/2]].fsSelection <ast.BitAnd object at 0x7da2590d6b60> name[FsSelection].BOLD] not_equal[!=] constant[0]]
variable[italic] assign[=] compare[binary_operation[call[name[ttFont]][constant[OS/2]].fsSelection <ast.BitAnd object at 0x7da2590d6b60> name[FsSelection].ITALIC] not_equal[!=] constant[0]]
variable[bold_italic] assign[=] binary_operation[constant[Bold=%r, Italic=%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b12512a0>, <ast.Name object at 0x7da1b1250af0>]]]
for taget[name[name]] in starred[name[names_set]] begin[:]
call[name[family_name_and_bold_italic].append, parameter[tuple[[<ast.Name object at 0x7da1b1250760>, <ast.Name object at 0x7da1b1252d70>]]]]
variable[counter] assign[=] call[name[Counter], parameter[name[family_name_and_bold_italic]]]
for taget[tuple[[<ast.Tuple object at 0x7da1b1251ae0>, <ast.Name object at 0x7da1b1250790>]]] in starred[call[name[counter].items, parameter[]]] begin[:]
if compare[name[count] greater[>] constant[1]] begin[:]
variable[failed] assign[=] constant[True]
<ast.Yield object at 0x7da1b1253fd0>
if <ast.UnaryOp object at 0x7da1b1241750> begin[:]
<ast.Yield object at 0x7da1b1240ca0> | keyword[def] identifier[com_adobe_fonts_check_family_bold_italic_unique_for_nameid1] ( identifier[ttFonts] ):
literal[string]
keyword[from] identifier[collections] keyword[import] identifier[Counter]
keyword[from] identifier[fontbakery] . identifier[utils] keyword[import] identifier[get_name_entry_strings]
keyword[from] identifier[fontbakery] . identifier[constants] keyword[import] identifier[NameID] , identifier[FsSelection]
identifier[failed] = keyword[False]
identifier[family_name_and_bold_italic] = identifier[list] ()
keyword[for] identifier[ttFont] keyword[in] identifier[ttFonts] :
identifier[names_list] = identifier[get_name_entry_strings] ( identifier[ttFont] , identifier[NameID] . identifier[FONT_FAMILY_NAME] )
identifier[names_set] = identifier[set] ( identifier[names_list] )
identifier[bold] =( identifier[ttFont] [ literal[string] ]. identifier[fsSelection] & identifier[FsSelection] . identifier[BOLD] )!= literal[int]
identifier[italic] =( identifier[ttFont] [ literal[string] ]. identifier[fsSelection] & identifier[FsSelection] . identifier[ITALIC] )!= literal[int]
identifier[bold_italic] = literal[string] %( identifier[bold] , identifier[italic] )
keyword[for] identifier[name] keyword[in] identifier[names_set] :
identifier[family_name_and_bold_italic] . identifier[append] (( identifier[name] , identifier[bold_italic] ,))
identifier[counter] = identifier[Counter] ( identifier[family_name_and_bold_italic] )
keyword[for] ( identifier[family_name] , identifier[bold_italic] ), identifier[count] keyword[in] identifier[counter] . identifier[items] ():
keyword[if] identifier[count] > literal[int] :
identifier[failed] = keyword[True]
keyword[yield] identifier[FAIL] ,( literal[string]
literal[string]
). identifier[format] ( identifier[family_name] , identifier[count] , identifier[bold_italic] )
keyword[if] keyword[not] identifier[failed] :
keyword[yield] identifier[PASS] ,( literal[string]
literal[string] ) | def com_adobe_fonts_check_family_bold_italic_unique_for_nameid1(ttFonts):
"""Check that OS/2.fsSelection bold & italic settings are unique
for each NameID1"""
from collections import Counter
from fontbakery.utils import get_name_entry_strings
from fontbakery.constants import NameID, FsSelection
failed = False
family_name_and_bold_italic = list()
for ttFont in ttFonts:
names_list = get_name_entry_strings(ttFont, NameID.FONT_FAMILY_NAME)
# names_list will likely contain multiple entries, e.g. multiple copies
# of the same name in the same language for different platforms, but
# also different names in different languages, we use set() below
# to remove the duplicates and only store the unique family name(s)
# used for a given font
names_set = set(names_list)
bold = ttFont['OS/2'].fsSelection & FsSelection.BOLD != 0
italic = ttFont['OS/2'].fsSelection & FsSelection.ITALIC != 0
bold_italic = 'Bold=%r, Italic=%r' % (bold, italic)
for name in names_set:
family_name_and_bold_italic.append((name, bold_italic)) # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=['ttFont']]
counter = Counter(family_name_and_bold_italic)
for ((family_name, bold_italic), count) in counter.items():
if count > 1:
failed = True
yield (FAIL, "Family '{}' has {} fonts (should be no more than 1) with the same OS/2.fsSelection bold & italic settings: {}".format(family_name, count, bold_italic)) # depends on [control=['if'], data=['count']] # depends on [control=['for'], data=[]]
if not failed:
yield (PASS, 'The OS/2.fsSelection bold & italic settings were unique within each compatible family group.') # depends on [control=['if'], data=[]] |
def rewrite_tg(env, tg_name, code):
"""Re-write a transform generating function pipe specification by extracting the tranform generating part,
and replacing it with the generated transform. so:
tgen(a,b,c).foo.bar
becomes:
tg = tgen(a,b,c)
tg.foo.bar
"""
visitor = ReplaceTG(env, tg_name)
assert visitor.tg_name
tree = visitor.visit(ast.parse(code))
if visitor.loc:
loc = ' #' + visitor.loc
else:
loc = file_loc() # The AST visitor didn't match a call node
if visitor.trans_gen:
tg = meta.dump_python_source(visitor.trans_gen).strip()
else:
tg = None
return meta.dump_python_source(tree).strip(), tg, loc | def function[rewrite_tg, parameter[env, tg_name, code]]:
constant[Re-write a transform generating function pipe specification by extracting the tranform generating part,
and replacing it with the generated transform. so:
tgen(a,b,c).foo.bar
becomes:
tg = tgen(a,b,c)
tg.foo.bar
]
variable[visitor] assign[=] call[name[ReplaceTG], parameter[name[env], name[tg_name]]]
assert[name[visitor].tg_name]
variable[tree] assign[=] call[name[visitor].visit, parameter[call[name[ast].parse, parameter[name[code]]]]]
if name[visitor].loc begin[:]
variable[loc] assign[=] binary_operation[constant[ #] + name[visitor].loc]
if name[visitor].trans_gen begin[:]
variable[tg] assign[=] call[call[name[meta].dump_python_source, parameter[name[visitor].trans_gen]].strip, parameter[]]
return[tuple[[<ast.Call object at 0x7da20c9937c0>, <ast.Name object at 0x7da20c990580>, <ast.Name object at 0x7da20c990f70>]]] | keyword[def] identifier[rewrite_tg] ( identifier[env] , identifier[tg_name] , identifier[code] ):
literal[string]
identifier[visitor] = identifier[ReplaceTG] ( identifier[env] , identifier[tg_name] )
keyword[assert] identifier[visitor] . identifier[tg_name]
identifier[tree] = identifier[visitor] . identifier[visit] ( identifier[ast] . identifier[parse] ( identifier[code] ))
keyword[if] identifier[visitor] . identifier[loc] :
identifier[loc] = literal[string] + identifier[visitor] . identifier[loc]
keyword[else] :
identifier[loc] = identifier[file_loc] ()
keyword[if] identifier[visitor] . identifier[trans_gen] :
identifier[tg] = identifier[meta] . identifier[dump_python_source] ( identifier[visitor] . identifier[trans_gen] ). identifier[strip] ()
keyword[else] :
identifier[tg] = keyword[None]
keyword[return] identifier[meta] . identifier[dump_python_source] ( identifier[tree] ). identifier[strip] (), identifier[tg] , identifier[loc] | def rewrite_tg(env, tg_name, code):
"""Re-write a transform generating function pipe specification by extracting the tranform generating part,
and replacing it with the generated transform. so:
tgen(a,b,c).foo.bar
becomes:
tg = tgen(a,b,c)
tg.foo.bar
"""
visitor = ReplaceTG(env, tg_name)
assert visitor.tg_name
tree = visitor.visit(ast.parse(code))
if visitor.loc:
loc = ' #' + visitor.loc # depends on [control=['if'], data=[]]
else:
loc = file_loc() # The AST visitor didn't match a call node
if visitor.trans_gen:
tg = meta.dump_python_source(visitor.trans_gen).strip() # depends on [control=['if'], data=[]]
else:
tg = None
return (meta.dump_python_source(tree).strip(), tg, loc) |
def debugPreview(self, title="Debug"):
""" Loads and displays the image at ``Pattern.path`` """
haystack = Image.open(self.path)
haystack.show() | def function[debugPreview, parameter[self, title]]:
constant[ Loads and displays the image at ``Pattern.path`` ]
variable[haystack] assign[=] call[name[Image].open, parameter[name[self].path]]
call[name[haystack].show, parameter[]] | keyword[def] identifier[debugPreview] ( identifier[self] , identifier[title] = literal[string] ):
literal[string]
identifier[haystack] = identifier[Image] . identifier[open] ( identifier[self] . identifier[path] )
identifier[haystack] . identifier[show] () | def debugPreview(self, title='Debug'):
""" Loads and displays the image at ``Pattern.path`` """
haystack = Image.open(self.path)
haystack.show() |
def _kappaShapelets(self, shapelets, beta):
"""
calculates the convergence kappa given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of kappa shapelets.
:raises: AttributeError, KeyError
"""
output=np.zeros((len(shapelets)+1,len(shapelets)+1),'complex')
for nl in range(0,len(shapelets)):
for nr in range(0,len(shapelets)):
a_lr=shapelets[nl][nr]
if nl>0:
output[nl-1][nr+1]+=a_lr*np.sqrt(nl*(nr+1))/2
if nr>0:
output[nl-1][nr-1]+=a_lr*np.sqrt(nl*nr)/2
output[nl+1][nr+1]+=a_lr*np.sqrt((nl+1)*(nr+1))/2
if nr>0:
output[nl+1][nr-1]+=a_lr*np.sqrt((nl+1)*nr)/2
return output/beta**2 | def function[_kappaShapelets, parameter[self, shapelets, beta]]:
constant[
calculates the convergence kappa given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of kappa shapelets.
:raises: AttributeError, KeyError
]
variable[output] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da20e9549d0>, <ast.BinOp object at 0x7da20c76dc00>]], constant[complex]]]
for taget[name[nl]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[shapelets]]]]]] begin[:]
for taget[name[nr]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[shapelets]]]]]] begin[:]
variable[a_lr] assign[=] call[call[name[shapelets]][name[nl]]][name[nr]]
if compare[name[nl] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da20c76ebf0>
if compare[name[nr] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da2054a4190>
<ast.AugAssign object at 0x7da2054a7340>
if compare[name[nr] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da2054a5600>
return[binary_operation[name[output] / binary_operation[name[beta] ** constant[2]]]] | keyword[def] identifier[_kappaShapelets] ( identifier[self] , identifier[shapelets] , identifier[beta] ):
literal[string]
identifier[output] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[shapelets] )+ literal[int] , identifier[len] ( identifier[shapelets] )+ literal[int] ), literal[string] )
keyword[for] identifier[nl] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[shapelets] )):
keyword[for] identifier[nr] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[shapelets] )):
identifier[a_lr] = identifier[shapelets] [ identifier[nl] ][ identifier[nr] ]
keyword[if] identifier[nl] > literal[int] :
identifier[output] [ identifier[nl] - literal[int] ][ identifier[nr] + literal[int] ]+= identifier[a_lr] * identifier[np] . identifier[sqrt] ( identifier[nl] *( identifier[nr] + literal[int] ))/ literal[int]
keyword[if] identifier[nr] > literal[int] :
identifier[output] [ identifier[nl] - literal[int] ][ identifier[nr] - literal[int] ]+= identifier[a_lr] * identifier[np] . identifier[sqrt] ( identifier[nl] * identifier[nr] )/ literal[int]
identifier[output] [ identifier[nl] + literal[int] ][ identifier[nr] + literal[int] ]+= identifier[a_lr] * identifier[np] . identifier[sqrt] (( identifier[nl] + literal[int] )*( identifier[nr] + literal[int] ))/ literal[int]
keyword[if] identifier[nr] > literal[int] :
identifier[output] [ identifier[nl] + literal[int] ][ identifier[nr] - literal[int] ]+= identifier[a_lr] * identifier[np] . identifier[sqrt] (( identifier[nl] + literal[int] )* identifier[nr] )/ literal[int]
keyword[return] identifier[output] / identifier[beta] ** literal[int] | def _kappaShapelets(self, shapelets, beta):
"""
calculates the convergence kappa given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of kappa shapelets.
:raises: AttributeError, KeyError
"""
output = np.zeros((len(shapelets) + 1, len(shapelets) + 1), 'complex')
for nl in range(0, len(shapelets)):
for nr in range(0, len(shapelets)):
a_lr = shapelets[nl][nr]
if nl > 0:
output[nl - 1][nr + 1] += a_lr * np.sqrt(nl * (nr + 1)) / 2
if nr > 0:
output[nl - 1][nr - 1] += a_lr * np.sqrt(nl * nr) / 2 # depends on [control=['if'], data=['nr']] # depends on [control=['if'], data=['nl']]
output[nl + 1][nr + 1] += a_lr * np.sqrt((nl + 1) * (nr + 1)) / 2
if nr > 0:
output[nl + 1][nr - 1] += a_lr * np.sqrt((nl + 1) * nr) / 2 # depends on [control=['if'], data=['nr']] # depends on [control=['for'], data=['nr']] # depends on [control=['for'], data=['nl']]
return output / beta ** 2 |
def get(self, name=None, plugin=None):
"""
Returns commands, which can be filtered by name or plugin.
:param name: name of the command
:type name: str
:param plugin: plugin object, which registers the commands
:type plugin: instance of GwBasePattern
:return: None, single command or dict of commands
"""
if plugin is not None:
if name is None:
command_list = {}
for key in self._commands.keys():
if self._commands[key].plugin == plugin:
command_list[key] = self._commands[key]
return command_list
else:
if name in self._commands.keys():
if self._commands[name].plugin == plugin:
return self._commands[name]
else:
return None
else:
return None
else:
if name is None:
return self._commands
else:
if name in self._commands.keys():
return self._commands[name]
else:
return None | def function[get, parameter[self, name, plugin]]:
constant[
Returns commands, which can be filtered by name or plugin.
:param name: name of the command
:type name: str
:param plugin: plugin object, which registers the commands
:type plugin: instance of GwBasePattern
:return: None, single command or dict of commands
]
if compare[name[plugin] is_not constant[None]] begin[:]
if compare[name[name] is constant[None]] begin[:]
variable[command_list] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[self]._commands.keys, parameter[]]] begin[:]
if compare[call[name[self]._commands][name[key]].plugin equal[==] name[plugin]] begin[:]
call[name[command_list]][name[key]] assign[=] call[name[self]._commands][name[key]]
return[name[command_list]] | keyword[def] identifier[get] ( identifier[self] , identifier[name] = keyword[None] , identifier[plugin] = keyword[None] ):
literal[string]
keyword[if] identifier[plugin] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[command_list] ={}
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_commands] . identifier[keys] ():
keyword[if] identifier[self] . identifier[_commands] [ identifier[key] ]. identifier[plugin] == identifier[plugin] :
identifier[command_list] [ identifier[key] ]= identifier[self] . identifier[_commands] [ identifier[key] ]
keyword[return] identifier[command_list]
keyword[else] :
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_commands] . identifier[keys] ():
keyword[if] identifier[self] . identifier[_commands] [ identifier[name] ]. identifier[plugin] == identifier[plugin] :
keyword[return] identifier[self] . identifier[_commands] [ identifier[name] ]
keyword[else] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] keyword[None]
keyword[else] :
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[return] identifier[self] . identifier[_commands]
keyword[else] :
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_commands] . identifier[keys] ():
keyword[return] identifier[self] . identifier[_commands] [ identifier[name] ]
keyword[else] :
keyword[return] keyword[None] | def get(self, name=None, plugin=None):
"""
Returns commands, which can be filtered by name or plugin.
:param name: name of the command
:type name: str
:param plugin: plugin object, which registers the commands
:type plugin: instance of GwBasePattern
:return: None, single command or dict of commands
"""
if plugin is not None:
if name is None:
command_list = {}
for key in self._commands.keys():
if self._commands[key].plugin == plugin:
command_list[key] = self._commands[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return command_list # depends on [control=['if'], data=[]]
elif name in self._commands.keys():
if self._commands[name].plugin == plugin:
return self._commands[name] # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['if'], data=['name']]
else:
return None # depends on [control=['if'], data=['plugin']]
elif name is None:
return self._commands # depends on [control=['if'], data=[]]
elif name in self._commands.keys():
return self._commands[name] # depends on [control=['if'], data=['name']]
else:
return None |
def generate_table(self, rows):
"""
Generates from a list of rows a PrettyTable object.
"""
table = PrettyTable(**self.kwargs)
for row in self.rows:
if len(row[0]) < self.max_row_width:
appends = self.max_row_width - len(row[0])
for i in range(1, appends):
row[0].append("-")
if row[1] is True:
self.make_fields_unique(row[0])
table.field_names = row[0]
else:
table.add_row(row[0])
return table | def function[generate_table, parameter[self, rows]]:
constant[
Generates from a list of rows a PrettyTable object.
]
variable[table] assign[=] call[name[PrettyTable], parameter[]]
for taget[name[row]] in starred[name[self].rows] begin[:]
if compare[call[name[len], parameter[call[name[row]][constant[0]]]] less[<] name[self].max_row_width] begin[:]
variable[appends] assign[=] binary_operation[name[self].max_row_width - call[name[len], parameter[call[name[row]][constant[0]]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[appends]]]] begin[:]
call[call[name[row]][constant[0]].append, parameter[constant[-]]]
if compare[call[name[row]][constant[1]] is constant[True]] begin[:]
call[name[self].make_fields_unique, parameter[call[name[row]][constant[0]]]]
name[table].field_names assign[=] call[name[row]][constant[0]]
return[name[table]] | keyword[def] identifier[generate_table] ( identifier[self] , identifier[rows] ):
literal[string]
identifier[table] = identifier[PrettyTable] (** identifier[self] . identifier[kwargs] )
keyword[for] identifier[row] keyword[in] identifier[self] . identifier[rows] :
keyword[if] identifier[len] ( identifier[row] [ literal[int] ])< identifier[self] . identifier[max_row_width] :
identifier[appends] = identifier[self] . identifier[max_row_width] - identifier[len] ( identifier[row] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[appends] ):
identifier[row] [ literal[int] ]. identifier[append] ( literal[string] )
keyword[if] identifier[row] [ literal[int] ] keyword[is] keyword[True] :
identifier[self] . identifier[make_fields_unique] ( identifier[row] [ literal[int] ])
identifier[table] . identifier[field_names] = identifier[row] [ literal[int] ]
keyword[else] :
identifier[table] . identifier[add_row] ( identifier[row] [ literal[int] ])
keyword[return] identifier[table] | def generate_table(self, rows):
"""
Generates from a list of rows a PrettyTable object.
"""
table = PrettyTable(**self.kwargs)
for row in self.rows:
if len(row[0]) < self.max_row_width:
appends = self.max_row_width - len(row[0])
for i in range(1, appends):
row[0].append('-') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if row[1] is True:
self.make_fields_unique(row[0])
table.field_names = row[0] # depends on [control=['if'], data=[]]
else:
table.add_row(row[0]) # depends on [control=['for'], data=['row']]
return table |
def get_lib_name(self):
""" Parse Cargo.toml to get the name of the shared library. """
# We import in here to make sure the the setup_requires are already installed
import toml
cfg = toml.load(self.path)
name = cfg.get("lib", {}).get("name")
if name is None:
name = cfg.get("package", {}).get("name")
if name is None:
raise Exception(
"Can not parse library name from Cargo.toml. "
"Cargo.toml missing value for 'name' key "
"in both the [package] section and the [lib] section"
)
name = re.sub(r"[./\\-]", "_", name)
return name | def function[get_lib_name, parameter[self]]:
constant[ Parse Cargo.toml to get the name of the shared library. ]
import module[toml]
variable[cfg] assign[=] call[name[toml].load, parameter[name[self].path]]
variable[name] assign[=] call[call[name[cfg].get, parameter[constant[lib], dictionary[[], []]]].get, parameter[constant[name]]]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] call[call[name[cfg].get, parameter[constant[package], dictionary[[], []]]].get, parameter[constant[name]]]
if compare[name[name] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1290490>
variable[name] assign[=] call[name[re].sub, parameter[constant[[./\\-]], constant[_], name[name]]]
return[name[name]] | keyword[def] identifier[get_lib_name] ( identifier[self] ):
literal[string]
keyword[import] identifier[toml]
identifier[cfg] = identifier[toml] . identifier[load] ( identifier[self] . identifier[path] )
identifier[name] = identifier[cfg] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[cfg] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] (
literal[string]
literal[string]
literal[string]
)
identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] )
keyword[return] identifier[name] | def get_lib_name(self):
""" Parse Cargo.toml to get the name of the shared library. """
# We import in here to make sure the the setup_requires are already installed
import toml
cfg = toml.load(self.path)
name = cfg.get('lib', {}).get('name')
if name is None:
name = cfg.get('package', {}).get('name') # depends on [control=['if'], data=['name']]
if name is None:
raise Exception("Can not parse library name from Cargo.toml. Cargo.toml missing value for 'name' key in both the [package] section and the [lib] section") # depends on [control=['if'], data=[]]
name = re.sub('[./\\\\-]', '_', name)
return name |
def _summary(self, name=None):
"""
Return a summarized representation.
Parameters
----------
name : str
name to use in the summary representation
Returns
-------
String with a summarized representation of the index
"""
formatter = self._formatter_func
if len(self) > 0:
index_summary = ', %s to %s' % (formatter(self[0]),
formatter(self[-1]))
else:
index_summary = ''
if name is None:
name = type(self).__name__
result = '%s: %s entries%s' % (printing.pprint_thing(name),
len(self), index_summary)
if self.freq:
result += '\nFreq: %s' % self.freqstr
# display as values, not quoted
result = result.replace("'", "")
return result | def function[_summary, parameter[self, name]]:
constant[
Return a summarized representation.
Parameters
----------
name : str
name to use in the summary representation
Returns
-------
String with a summarized representation of the index
]
variable[formatter] assign[=] name[self]._formatter_func
if compare[call[name[len], parameter[name[self]]] greater[>] constant[0]] begin[:]
variable[index_summary] assign[=] binary_operation[constant[, %s to %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bccb490>, <ast.Call object at 0x7da18bcc83d0>]]]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] call[name[type], parameter[name[self]]].__name__
variable[result] assign[=] binary_operation[constant[%s: %s entries%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bccbc10>, <ast.Call object at 0x7da18bcc87f0>, <ast.Name object at 0x7da18bcc95a0>]]]
if name[self].freq begin[:]
<ast.AugAssign object at 0x7da18bccb370>
variable[result] assign[=] call[name[result].replace, parameter[constant['], constant[]]]
return[name[result]] | keyword[def] identifier[_summary] ( identifier[self] , identifier[name] = keyword[None] ):
literal[string]
identifier[formatter] = identifier[self] . identifier[_formatter_func]
keyword[if] identifier[len] ( identifier[self] )> literal[int] :
identifier[index_summary] = literal[string] %( identifier[formatter] ( identifier[self] [ literal[int] ]),
identifier[formatter] ( identifier[self] [- literal[int] ]))
keyword[else] :
identifier[index_summary] = literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[type] ( identifier[self] ). identifier[__name__]
identifier[result] = literal[string] %( identifier[printing] . identifier[pprint_thing] ( identifier[name] ),
identifier[len] ( identifier[self] ), identifier[index_summary] )
keyword[if] identifier[self] . identifier[freq] :
identifier[result] += literal[string] % identifier[self] . identifier[freqstr]
identifier[result] = identifier[result] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[result] | def _summary(self, name=None):
"""
Return a summarized representation.
Parameters
----------
name : str
name to use in the summary representation
Returns
-------
String with a summarized representation of the index
"""
formatter = self._formatter_func
if len(self) > 0:
index_summary = ', %s to %s' % (formatter(self[0]), formatter(self[-1])) # depends on [control=['if'], data=[]]
else:
index_summary = ''
if name is None:
name = type(self).__name__ # depends on [control=['if'], data=['name']]
result = '%s: %s entries%s' % (printing.pprint_thing(name), len(self), index_summary)
if self.freq:
result += '\nFreq: %s' % self.freqstr # depends on [control=['if'], data=[]]
# display as values, not quoted
result = result.replace("'", '')
return result |
def _quadratic_sum_cost(self, state: _STATE) -> float:
"""Cost function that sums squares of lengths of sequences.
Args:
state: Search state, not mutated.
Returns:
Cost which is minus the normalized quadratic sum of each linear
sequence section in the state. This promotes single, long linear
sequence solutions and converges to number -1. The solution with a
lowest cost consists of every node being a single sequence and is
always less than 0.
"""
cost = 0.0
total_len = float(len(self._c))
seqs, _ = state
for seq in seqs:
cost += (len(seq) / total_len) ** 2
return -cost | def function[_quadratic_sum_cost, parameter[self, state]]:
constant[Cost function that sums squares of lengths of sequences.
Args:
state: Search state, not mutated.
Returns:
Cost which is minus the normalized quadratic sum of each linear
sequence section in the state. This promotes single, long linear
sequence solutions and converges to number -1. The solution with a
lowest cost consists of every node being a single sequence and is
always less than 0.
]
variable[cost] assign[=] constant[0.0]
variable[total_len] assign[=] call[name[float], parameter[call[name[len], parameter[name[self]._c]]]]
<ast.Tuple object at 0x7da1b1cee290> assign[=] name[state]
for taget[name[seq]] in starred[name[seqs]] begin[:]
<ast.AugAssign object at 0x7da1b1cee500>
return[<ast.UnaryOp object at 0x7da1b1c61f30>] | keyword[def] identifier[_quadratic_sum_cost] ( identifier[self] , identifier[state] : identifier[_STATE] )-> identifier[float] :
literal[string]
identifier[cost] = literal[int]
identifier[total_len] = identifier[float] ( identifier[len] ( identifier[self] . identifier[_c] ))
identifier[seqs] , identifier[_] = identifier[state]
keyword[for] identifier[seq] keyword[in] identifier[seqs] :
identifier[cost] +=( identifier[len] ( identifier[seq] )/ identifier[total_len] )** literal[int]
keyword[return] - identifier[cost] | def _quadratic_sum_cost(self, state: _STATE) -> float:
"""Cost function that sums squares of lengths of sequences.
Args:
state: Search state, not mutated.
Returns:
Cost which is minus the normalized quadratic sum of each linear
sequence section in the state. This promotes single, long linear
sequence solutions and converges to number -1. The solution with a
lowest cost consists of every node being a single sequence and is
always less than 0.
"""
cost = 0.0
total_len = float(len(self._c))
(seqs, _) = state
for seq in seqs:
cost += (len(seq) / total_len) ** 2 # depends on [control=['for'], data=['seq']]
return -cost |
def igrf12syn(isv, date, itype, alt, lat, elong):
"""
This is a synthesis routine for the 12th generation IGRF as agreed
in December 2014 by IAGA Working Group V-MOD. It is valid 1900.0 to
2020.0 inclusive. Values for dates from 1945.0 to 2010.0 inclusive are
definitive, otherwise they are non-definitive.
INPUT
isv = 0 if main-field values are required
isv = 1 if secular variation values are required
date = year A.D. Must be greater than or equal to 1900.0 and
less than or equal to 2025.0. Warning message is given
for dates greater than 2020.0. Must be double precision.
itype = 1 if geodetic (spheroid)
itype = 2 if geocentric (sphere)
alt = height in km above sea level if itype = 1
= distance from centre of Earth in km if itype = 2 (>3485 km)
lat = latitude (-90~90)
elong = east-longitude (0-360)
alt, colat and elong must be double precision.
OUTPUT
x = north component (nT) if isv = 0, nT/year if isv = 1
y = east component (nT) if isv = 0, nT/year if isv = 1
z = vertical component (nT) if isv = 0, nT/year if isv = 1
f = total intensity (nT) if isv = 0, rubbish if isv = 1
To get the other geomagnetic elements (D, I, H and secular
variations dD, dH, dI and dF) use routines ptoc and ptocsv.
Adapted from 8th generation version to include new maximum degree for
main-field models for 2000.0 and onwards and use WGS84 spheroid instead
of International Astronomical Union 1966 spheroid as recommended by IAGA
in July 2003. Reference radius remains as 6371.2 km - it is NOT the mean
radius (= 6371.0 km) but 6371.2 km is what is used in determining the
coefficients. Adaptation by Susan Macmillan, August 2003 (for
9th generation), December 2004, December 2009 \ December 2014.
Coefficients at 1995.0 incorrectly rounded (rounded up instead of
to even) included as these are the coefficients published in Excel
spreadsheet July 2005.
"""
p, q, cl, sl = [0.] * 105, [0.] * 105, [0.] * 13, [0.] * 13
# set initial values
x, y, z = 0., 0., 0.
if date < 1900.0 or date > 2025.0:
f = 1.0
print('This subroutine will not work with a date of ' + str(date))
print('Date must be in the range 1900.0 <= date <= 2025.0')
print('On return f = 1.0, x = y = z = 0')
return x, y, z, f
elif date >= 2015.0:
if date > 2020.0:
# not adapt for the model but can calculate
print('This version of the IGRF is intended for use up to 2020.0.')
print('values for ' + str(date) + ' will be computed but may be of reduced accuracy')
t = date - 2015.0
tc = 1.0
if isv == 1:
t = 1.0
tc = 0.0
# pointer for last coefficient in pen-ultimate set of MF coefficients...
ll = 3060
nmx = 13
nc = nmx * (nmx + 2)
kmx = (nmx + 1) * (nmx + 2) / 2
else:
t = 0.2 * (date - 1900.0)
ll = int(t)
t = t - ll
# SH models before 1995.0 are only to degree 10
if date < 1995.0:
nmx = 10
nc = nmx * (nmx + 2)
ll = nc * ll
kmx = (nmx + 1) * (nmx + 2) / 2
else:
nmx = 13
nc = nmx * (nmx + 2)
ll = round(0.2 * (date - 1995.0))
# 19 is the number of SH models that extend to degree 10
ll = 120 * 19 + nc * ll
kmx = (nmx + 1) * (nmx + 2) / 2
tc = 1.0 - t
if isv == 1:
tc = -0.2
t = 0.2
colat = 90-lat
r = alt
one = colat / FACT
ct = np.cos(one)
st = np.sin(one)
one = elong / FACT
cl[0] = np.cos(one)
sl[0] = np.sin(one)
cd = 1.0
sd = 0.0
l = 1
m = 1
n = 0
if itype != 2:
gclat, gclon, r = geodetic2geocentric(np.arctan2(st, ct), alt)
ct, st = np.cos(gclat), np.sin(gclat)
cd, sd = np.cos(gclon), np.sin(gclon)
ratio = 6371.2 / r
rr = ratio * ratio
# computation of Schmidt quasi-normal coefficients p and x(=q)
p[0] = 1.0
p[2] = st
q[0] = 0.0
q[2] = ct
fn, gn = n, n-1
for k in range(2, int(kmx)+1):
if n < m:
m = 0
n = n + 1
rr = rr * ratio
fn = n
gn = n - 1
fm = m
if m != n:
gmm = m * m
one = np.sqrt(fn * fn - gmm)
two = np.sqrt(gn * gn - gmm) / one
three = (fn + gn) / one
i = k - n
j = i - n + 1
p[k - 1] = three * ct * p[i - 1] - two * p[j - 1]
q[k - 1] = three * (ct * q[i - 1] - st * p[i - 1]) - two * q[j - 1]
else:
if k != 3:
one = np.sqrt(1.0 - 0.5 / fm)
j = k - n - 1
p[k-1] = one * st * p[j-1]
q[k-1] = one * (st * q[j-1] + ct * p[j-1])
cl[m-1] = cl[m - 2] * cl[0] - sl[m - 2] * sl[0]
sl[m-1] = sl[m - 2] * cl[0] + cl[m - 2] * sl[0]
# synthesis of x, y and z in geocentric coordinates
lm = ll + l
# print('g', n, m, k, gh[int(lm-1)], gh[int(lm + nc-1)])
one = (tc * gh[int(lm-1)] + t * gh[int(lm + nc-1)]) * rr
if m == 0:
x = x + one * q[k - 1]
z = z - (fn + 1.0) * one * p[k - 1]
l = l + 1
else:
# print('h', n, m, k, gh[int(lm)], gh[int(lm + nc)])
two = (tc * gh[int(lm)] + t * gh[int(lm + nc)]) * rr
three = one * cl[m-1] + two * sl[m-1]
x = x + three * q[k-1]
z = z - (fn + 1.0) * three * p[k-1]
if st == 0.0:
y = y + (one * sl[m - 1] - two * cl[m - 1]) * q[k - 1] * ct
else:
y = y + (one * sl[m-1] - two * cl[m-1]) * fm * p[k-1] / st
l = l + 2
m = m+1
# conversion to coordinate system specified by itype
one = x
x = x * cd + z * sd
z = z * cd - one * sd
f = np.sqrt(x * x + y * y + z * z)
#
return x, y, z, f | def function[igrf12syn, parameter[isv, date, itype, alt, lat, elong]]:
constant[
This is a synthesis routine for the 12th generation IGRF as agreed
in December 2014 by IAGA Working Group V-MOD. It is valid 1900.0 to
2020.0 inclusive. Values for dates from 1945.0 to 2010.0 inclusive are
definitive, otherwise they are non-definitive.
INPUT
isv = 0 if main-field values are required
isv = 1 if secular variation values are required
date = year A.D. Must be greater than or equal to 1900.0 and
less than or equal to 2025.0. Warning message is given
for dates greater than 2020.0. Must be double precision.
itype = 1 if geodetic (spheroid)
itype = 2 if geocentric (sphere)
alt = height in km above sea level if itype = 1
= distance from centre of Earth in km if itype = 2 (>3485 km)
lat = latitude (-90~90)
elong = east-longitude (0-360)
alt, colat and elong must be double precision.
OUTPUT
x = north component (nT) if isv = 0, nT/year if isv = 1
y = east component (nT) if isv = 0, nT/year if isv = 1
z = vertical component (nT) if isv = 0, nT/year if isv = 1
f = total intensity (nT) if isv = 0, rubbish if isv = 1
To get the other geomagnetic elements (D, I, H and secular
variations dD, dH, dI and dF) use routines ptoc and ptocsv.
Adapted from 8th generation version to include new maximum degree for
main-field models for 2000.0 and onwards and use WGS84 spheroid instead
of International Astronomical Union 1966 spheroid as recommended by IAGA
in July 2003. Reference radius remains as 6371.2 km - it is NOT the mean
radius (= 6371.0 km) but 6371.2 km is what is used in determining the
coefficients. Adaptation by Susan Macmillan, August 2003 (for
9th generation), December 2004, December 2009 \ December 2014.
Coefficients at 1995.0 incorrectly rounded (rounded up instead of
to even) included as these are the coefficients published in Excel
spreadsheet July 2005.
]
<ast.Tuple object at 0x7da1b25d3a00> assign[=] tuple[[<ast.BinOp object at 0x7da1b25d3c10>, <ast.BinOp object at 0x7da1b25d3ca0>, <ast.BinOp object at 0x7da1b25d3fa0>, <ast.BinOp object at 0x7da1b25d0160>]]
<ast.Tuple object at 0x7da1b25d00d0> assign[=] tuple[[<ast.Constant object at 0x7da1b25d1270>, <ast.Constant object at 0x7da1b25d18a0>, <ast.Constant object at 0x7da1b25d1900>]]
if <ast.BoolOp object at 0x7da1b25d19c0> begin[:]
variable[f] assign[=] constant[1.0]
call[name[print], parameter[binary_operation[constant[This subroutine will not work with a date of ] + call[name[str], parameter[name[date]]]]]]
call[name[print], parameter[constant[Date must be in the range 1900.0 <= date <= 2025.0]]]
call[name[print], parameter[constant[On return f = 1.0, x = y = z = 0]]]
return[tuple[[<ast.Name object at 0x7da1b25d1510>, <ast.Name object at 0x7da1b25d1450>, <ast.Name object at 0x7da1b25d14e0>, <ast.Name object at 0x7da1b25d1480>]]]
variable[colat] assign[=] binary_operation[constant[90] - name[lat]]
variable[r] assign[=] name[alt]
variable[one] assign[=] binary_operation[name[colat] / name[FACT]]
variable[ct] assign[=] call[name[np].cos, parameter[name[one]]]
variable[st] assign[=] call[name[np].sin, parameter[name[one]]]
variable[one] assign[=] binary_operation[name[elong] / name[FACT]]
call[name[cl]][constant[0]] assign[=] call[name[np].cos, parameter[name[one]]]
call[name[sl]][constant[0]] assign[=] call[name[np].sin, parameter[name[one]]]
variable[cd] assign[=] constant[1.0]
variable[sd] assign[=] constant[0.0]
variable[l] assign[=] constant[1]
variable[m] assign[=] constant[1]
variable[n] assign[=] constant[0]
if compare[name[itype] not_equal[!=] constant[2]] begin[:]
<ast.Tuple object at 0x7da1b24e7eb0> assign[=] call[name[geodetic2geocentric], parameter[call[name[np].arctan2, parameter[name[st], name[ct]]], name[alt]]]
<ast.Tuple object at 0x7da1b24e75b0> assign[=] tuple[[<ast.Call object at 0x7da1b24e6b90>, <ast.Call object at 0x7da1b24e5f60>]]
<ast.Tuple object at 0x7da1b24e5ea0> assign[=] tuple[[<ast.Call object at 0x7da1b24e5db0>, <ast.Call object at 0x7da1b24e7670>]]
variable[ratio] assign[=] binary_operation[constant[6371.2] / name[r]]
variable[rr] assign[=] binary_operation[name[ratio] * name[ratio]]
call[name[p]][constant[0]] assign[=] constant[1.0]
call[name[p]][constant[2]] assign[=] name[st]
call[name[q]][constant[0]] assign[=] constant[0.0]
call[name[q]][constant[2]] assign[=] name[ct]
<ast.Tuple object at 0x7da1b24e79a0> assign[=] tuple[[<ast.Name object at 0x7da1b24e71c0>, <ast.BinOp object at 0x7da1b24e7580>]]
for taget[name[k]] in starred[call[name[range], parameter[constant[2], binary_operation[call[name[int], parameter[name[kmx]]] + constant[1]]]]] begin[:]
if compare[name[n] less[<] name[m]] begin[:]
variable[m] assign[=] constant[0]
variable[n] assign[=] binary_operation[name[n] + constant[1]]
variable[rr] assign[=] binary_operation[name[rr] * name[ratio]]
variable[fn] assign[=] name[n]
variable[gn] assign[=] binary_operation[name[n] - constant[1]]
variable[fm] assign[=] name[m]
if compare[name[m] not_equal[!=] name[n]] begin[:]
variable[gmm] assign[=] binary_operation[name[m] * name[m]]
variable[one] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[fn] * name[fn]] - name[gmm]]]]
variable[two] assign[=] binary_operation[call[name[np].sqrt, parameter[binary_operation[binary_operation[name[gn] * name[gn]] - name[gmm]]]] / name[one]]
variable[three] assign[=] binary_operation[binary_operation[name[fn] + name[gn]] / name[one]]
variable[i] assign[=] binary_operation[name[k] - name[n]]
variable[j] assign[=] binary_operation[binary_operation[name[i] - name[n]] + constant[1]]
call[name[p]][binary_operation[name[k] - constant[1]]] assign[=] binary_operation[binary_operation[binary_operation[name[three] * name[ct]] * call[name[p]][binary_operation[name[i] - constant[1]]]] - binary_operation[name[two] * call[name[p]][binary_operation[name[j] - constant[1]]]]]
call[name[q]][binary_operation[name[k] - constant[1]]] assign[=] binary_operation[binary_operation[name[three] * binary_operation[binary_operation[name[ct] * call[name[q]][binary_operation[name[i] - constant[1]]]] - binary_operation[name[st] * call[name[p]][binary_operation[name[i] - constant[1]]]]]] - binary_operation[name[two] * call[name[q]][binary_operation[name[j] - constant[1]]]]]
variable[lm] assign[=] binary_operation[name[ll] + name[l]]
variable[one] assign[=] binary_operation[binary_operation[binary_operation[name[tc] * call[name[gh]][call[name[int], parameter[binary_operation[name[lm] - constant[1]]]]]] + binary_operation[name[t] * call[name[gh]][call[name[int], parameter[binary_operation[binary_operation[name[lm] + name[nc]] - constant[1]]]]]]] * name[rr]]
if compare[name[m] equal[==] constant[0]] begin[:]
variable[x] assign[=] binary_operation[name[x] + binary_operation[name[one] * call[name[q]][binary_operation[name[k] - constant[1]]]]]
variable[z] assign[=] binary_operation[name[z] - binary_operation[binary_operation[binary_operation[name[fn] + constant[1.0]] * name[one]] * call[name[p]][binary_operation[name[k] - constant[1]]]]]
variable[l] assign[=] binary_operation[name[l] + constant[1]]
variable[m] assign[=] binary_operation[name[m] + constant[1]]
variable[one] assign[=] name[x]
variable[x] assign[=] binary_operation[binary_operation[name[x] * name[cd]] + binary_operation[name[z] * name[sd]]]
variable[z] assign[=] binary_operation[binary_operation[name[z] * name[cd]] - binary_operation[name[one] * name[sd]]]
variable[f] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[x] * name[x]] + binary_operation[name[y] * name[y]]] + binary_operation[name[z] * name[z]]]]]
return[tuple[[<ast.Name object at 0x7da18f8113c0>, <ast.Name object at 0x7da18f812410>, <ast.Name object at 0x7da18f810af0>, <ast.Name object at 0x7da18f811d20>]]] | keyword[def] identifier[igrf12syn] ( identifier[isv] , identifier[date] , identifier[itype] , identifier[alt] , identifier[lat] , identifier[elong] ):
literal[string]
identifier[p] , identifier[q] , identifier[cl] , identifier[sl] =[ literal[int] ]* literal[int] ,[ literal[int] ]* literal[int] ,[ literal[int] ]* literal[int] ,[ literal[int] ]* literal[int]
identifier[x] , identifier[y] , identifier[z] = literal[int] , literal[int] , literal[int]
keyword[if] identifier[date] < literal[int] keyword[or] identifier[date] > literal[int] :
identifier[f] = literal[int]
identifier[print] ( literal[string] + identifier[str] ( identifier[date] ))
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[return] identifier[x] , identifier[y] , identifier[z] , identifier[f]
keyword[elif] identifier[date] >= literal[int] :
keyword[if] identifier[date] > literal[int] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] + identifier[str] ( identifier[date] )+ literal[string] )
identifier[t] = identifier[date] - literal[int]
identifier[tc] = literal[int]
keyword[if] identifier[isv] == literal[int] :
identifier[t] = literal[int]
identifier[tc] = literal[int]
identifier[ll] = literal[int]
identifier[nmx] = literal[int]
identifier[nc] = identifier[nmx] *( identifier[nmx] + literal[int] )
identifier[kmx] =( identifier[nmx] + literal[int] )*( identifier[nmx] + literal[int] )/ literal[int]
keyword[else] :
identifier[t] = literal[int] *( identifier[date] - literal[int] )
identifier[ll] = identifier[int] ( identifier[t] )
identifier[t] = identifier[t] - identifier[ll]
keyword[if] identifier[date] < literal[int] :
identifier[nmx] = literal[int]
identifier[nc] = identifier[nmx] *( identifier[nmx] + literal[int] )
identifier[ll] = identifier[nc] * identifier[ll]
identifier[kmx] =( identifier[nmx] + literal[int] )*( identifier[nmx] + literal[int] )/ literal[int]
keyword[else] :
identifier[nmx] = literal[int]
identifier[nc] = identifier[nmx] *( identifier[nmx] + literal[int] )
identifier[ll] = identifier[round] ( literal[int] *( identifier[date] - literal[int] ))
identifier[ll] = literal[int] * literal[int] + identifier[nc] * identifier[ll]
identifier[kmx] =( identifier[nmx] + literal[int] )*( identifier[nmx] + literal[int] )/ literal[int]
identifier[tc] = literal[int] - identifier[t]
keyword[if] identifier[isv] == literal[int] :
identifier[tc] =- literal[int]
identifier[t] = literal[int]
identifier[colat] = literal[int] - identifier[lat]
identifier[r] = identifier[alt]
identifier[one] = identifier[colat] / identifier[FACT]
identifier[ct] = identifier[np] . identifier[cos] ( identifier[one] )
identifier[st] = identifier[np] . identifier[sin] ( identifier[one] )
identifier[one] = identifier[elong] / identifier[FACT]
identifier[cl] [ literal[int] ]= identifier[np] . identifier[cos] ( identifier[one] )
identifier[sl] [ literal[int] ]= identifier[np] . identifier[sin] ( identifier[one] )
identifier[cd] = literal[int]
identifier[sd] = literal[int]
identifier[l] = literal[int]
identifier[m] = literal[int]
identifier[n] = literal[int]
keyword[if] identifier[itype] != literal[int] :
identifier[gclat] , identifier[gclon] , identifier[r] = identifier[geodetic2geocentric] ( identifier[np] . identifier[arctan2] ( identifier[st] , identifier[ct] ), identifier[alt] )
identifier[ct] , identifier[st] = identifier[np] . identifier[cos] ( identifier[gclat] ), identifier[np] . identifier[sin] ( identifier[gclat] )
identifier[cd] , identifier[sd] = identifier[np] . identifier[cos] ( identifier[gclon] ), identifier[np] . identifier[sin] ( identifier[gclon] )
identifier[ratio] = literal[int] / identifier[r]
identifier[rr] = identifier[ratio] * identifier[ratio]
identifier[p] [ literal[int] ]= literal[int]
identifier[p] [ literal[int] ]= identifier[st]
identifier[q] [ literal[int] ]= literal[int]
identifier[q] [ literal[int] ]= identifier[ct]
identifier[fn] , identifier[gn] = identifier[n] , identifier[n] - literal[int]
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[kmx] )+ literal[int] ):
keyword[if] identifier[n] < identifier[m] :
identifier[m] = literal[int]
identifier[n] = identifier[n] + literal[int]
identifier[rr] = identifier[rr] * identifier[ratio]
identifier[fn] = identifier[n]
identifier[gn] = identifier[n] - literal[int]
identifier[fm] = identifier[m]
keyword[if] identifier[m] != identifier[n] :
identifier[gmm] = identifier[m] * identifier[m]
identifier[one] = identifier[np] . identifier[sqrt] ( identifier[fn] * identifier[fn] - identifier[gmm] )
identifier[two] = identifier[np] . identifier[sqrt] ( identifier[gn] * identifier[gn] - identifier[gmm] )/ identifier[one]
identifier[three] =( identifier[fn] + identifier[gn] )/ identifier[one]
identifier[i] = identifier[k] - identifier[n]
identifier[j] = identifier[i] - identifier[n] + literal[int]
identifier[p] [ identifier[k] - literal[int] ]= identifier[three] * identifier[ct] * identifier[p] [ identifier[i] - literal[int] ]- identifier[two] * identifier[p] [ identifier[j] - literal[int] ]
identifier[q] [ identifier[k] - literal[int] ]= identifier[three] *( identifier[ct] * identifier[q] [ identifier[i] - literal[int] ]- identifier[st] * identifier[p] [ identifier[i] - literal[int] ])- identifier[two] * identifier[q] [ identifier[j] - literal[int] ]
keyword[else] :
keyword[if] identifier[k] != literal[int] :
identifier[one] = identifier[np] . identifier[sqrt] ( literal[int] - literal[int] / identifier[fm] )
identifier[j] = identifier[k] - identifier[n] - literal[int]
identifier[p] [ identifier[k] - literal[int] ]= identifier[one] * identifier[st] * identifier[p] [ identifier[j] - literal[int] ]
identifier[q] [ identifier[k] - literal[int] ]= identifier[one] *( identifier[st] * identifier[q] [ identifier[j] - literal[int] ]+ identifier[ct] * identifier[p] [ identifier[j] - literal[int] ])
identifier[cl] [ identifier[m] - literal[int] ]= identifier[cl] [ identifier[m] - literal[int] ]* identifier[cl] [ literal[int] ]- identifier[sl] [ identifier[m] - literal[int] ]* identifier[sl] [ literal[int] ]
identifier[sl] [ identifier[m] - literal[int] ]= identifier[sl] [ identifier[m] - literal[int] ]* identifier[cl] [ literal[int] ]+ identifier[cl] [ identifier[m] - literal[int] ]* identifier[sl] [ literal[int] ]
identifier[lm] = identifier[ll] + identifier[l]
identifier[one] =( identifier[tc] * identifier[gh] [ identifier[int] ( identifier[lm] - literal[int] )]+ identifier[t] * identifier[gh] [ identifier[int] ( identifier[lm] + identifier[nc] - literal[int] )])* identifier[rr]
keyword[if] identifier[m] == literal[int] :
identifier[x] = identifier[x] + identifier[one] * identifier[q] [ identifier[k] - literal[int] ]
identifier[z] = identifier[z] -( identifier[fn] + literal[int] )* identifier[one] * identifier[p] [ identifier[k] - literal[int] ]
identifier[l] = identifier[l] + literal[int]
keyword[else] :
identifier[two] =( identifier[tc] * identifier[gh] [ identifier[int] ( identifier[lm] )]+ identifier[t] * identifier[gh] [ identifier[int] ( identifier[lm] + identifier[nc] )])* identifier[rr]
identifier[three] = identifier[one] * identifier[cl] [ identifier[m] - literal[int] ]+ identifier[two] * identifier[sl] [ identifier[m] - literal[int] ]
identifier[x] = identifier[x] + identifier[three] * identifier[q] [ identifier[k] - literal[int] ]
identifier[z] = identifier[z] -( identifier[fn] + literal[int] )* identifier[three] * identifier[p] [ identifier[k] - literal[int] ]
keyword[if] identifier[st] == literal[int] :
identifier[y] = identifier[y] +( identifier[one] * identifier[sl] [ identifier[m] - literal[int] ]- identifier[two] * identifier[cl] [ identifier[m] - literal[int] ])* identifier[q] [ identifier[k] - literal[int] ]* identifier[ct]
keyword[else] :
identifier[y] = identifier[y] +( identifier[one] * identifier[sl] [ identifier[m] - literal[int] ]- identifier[two] * identifier[cl] [ identifier[m] - literal[int] ])* identifier[fm] * identifier[p] [ identifier[k] - literal[int] ]/ identifier[st]
identifier[l] = identifier[l] + literal[int]
identifier[m] = identifier[m] + literal[int]
identifier[one] = identifier[x]
identifier[x] = identifier[x] * identifier[cd] + identifier[z] * identifier[sd]
identifier[z] = identifier[z] * identifier[cd] - identifier[one] * identifier[sd]
identifier[f] = identifier[np] . identifier[sqrt] ( identifier[x] * identifier[x] + identifier[y] * identifier[y] + identifier[z] * identifier[z] )
keyword[return] identifier[x] , identifier[y] , identifier[z] , identifier[f] | def igrf12syn(isv, date, itype, alt, lat, elong):
"""
This is a synthesis routine for the 12th generation IGRF as agreed
in December 2014 by IAGA Working Group V-MOD. It is valid 1900.0 to
2020.0 inclusive. Values for dates from 1945.0 to 2010.0 inclusive are
definitive, otherwise they are non-definitive.
INPUT
isv = 0 if main-field values are required
isv = 1 if secular variation values are required
date = year A.D. Must be greater than or equal to 1900.0 and
less than or equal to 2025.0. Warning message is given
for dates greater than 2020.0. Must be double precision.
itype = 1 if geodetic (spheroid)
itype = 2 if geocentric (sphere)
alt = height in km above sea level if itype = 1
= distance from centre of Earth in km if itype = 2 (>3485 km)
lat = latitude (-90~90)
elong = east-longitude (0-360)
alt, colat and elong must be double precision.
OUTPUT
x = north component (nT) if isv = 0, nT/year if isv = 1
y = east component (nT) if isv = 0, nT/year if isv = 1
z = vertical component (nT) if isv = 0, nT/year if isv = 1
f = total intensity (nT) if isv = 0, rubbish if isv = 1
To get the other geomagnetic elements (D, I, H and secular
variations dD, dH, dI and dF) use routines ptoc and ptocsv.
Adapted from 8th generation version to include new maximum degree for
main-field models for 2000.0 and onwards and use WGS84 spheroid instead
of International Astronomical Union 1966 spheroid as recommended by IAGA
in July 2003. Reference radius remains as 6371.2 km - it is NOT the mean
radius (= 6371.0 km) but 6371.2 km is what is used in determining the
coefficients. Adaptation by Susan Macmillan, August 2003 (for
9th generation), December 2004, December 2009 \\ December 2014.
Coefficients at 1995.0 incorrectly rounded (rounded up instead of
to even) included as these are the coefficients published in Excel
spreadsheet July 2005.
"""
(p, q, cl, sl) = ([0.0] * 105, [0.0] * 105, [0.0] * 13, [0.0] * 13)
# set initial values
(x, y, z) = (0.0, 0.0, 0.0)
if date < 1900.0 or date > 2025.0:
f = 1.0
print('This subroutine will not work with a date of ' + str(date))
print('Date must be in the range 1900.0 <= date <= 2025.0')
print('On return f = 1.0, x = y = z = 0')
return (x, y, z, f) # depends on [control=['if'], data=[]]
elif date >= 2015.0:
if date > 2020.0:
# not adapt for the model but can calculate
print('This version of the IGRF is intended for use up to 2020.0.')
print('values for ' + str(date) + ' will be computed but may be of reduced accuracy') # depends on [control=['if'], data=['date']]
t = date - 2015.0
tc = 1.0
if isv == 1:
t = 1.0
tc = 0.0 # depends on [control=['if'], data=[]]
# pointer for last coefficient in pen-ultimate set of MF coefficients...
ll = 3060
nmx = 13
nc = nmx * (nmx + 2)
kmx = (nmx + 1) * (nmx + 2) / 2 # depends on [control=['if'], data=['date']]
else:
t = 0.2 * (date - 1900.0)
ll = int(t)
t = t - ll
# SH models before 1995.0 are only to degree 10
if date < 1995.0:
nmx = 10
nc = nmx * (nmx + 2)
ll = nc * ll
kmx = (nmx + 1) * (nmx + 2) / 2 # depends on [control=['if'], data=[]]
else:
nmx = 13
nc = nmx * (nmx + 2)
ll = round(0.2 * (date - 1995.0))
# 19 is the number of SH models that extend to degree 10
ll = 120 * 19 + nc * ll
kmx = (nmx + 1) * (nmx + 2) / 2
tc = 1.0 - t
if isv == 1:
tc = -0.2
t = 0.2 # depends on [control=['if'], data=[]]
colat = 90 - lat
r = alt
one = colat / FACT
ct = np.cos(one)
st = np.sin(one)
one = elong / FACT
cl[0] = np.cos(one)
sl[0] = np.sin(one)
cd = 1.0
sd = 0.0
l = 1
m = 1
n = 0
if itype != 2:
(gclat, gclon, r) = geodetic2geocentric(np.arctan2(st, ct), alt)
(ct, st) = (np.cos(gclat), np.sin(gclat))
(cd, sd) = (np.cos(gclon), np.sin(gclon)) # depends on [control=['if'], data=[]]
ratio = 6371.2 / r
rr = ratio * ratio
# computation of Schmidt quasi-normal coefficients p and x(=q)
p[0] = 1.0
p[2] = st
q[0] = 0.0
q[2] = ct
(fn, gn) = (n, n - 1)
for k in range(2, int(kmx) + 1):
if n < m:
m = 0
n = n + 1
rr = rr * ratio
fn = n
gn = n - 1 # depends on [control=['if'], data=['n', 'm']]
fm = m
if m != n:
gmm = m * m
one = np.sqrt(fn * fn - gmm)
two = np.sqrt(gn * gn - gmm) / one
three = (fn + gn) / one
i = k - n
j = i - n + 1
p[k - 1] = three * ct * p[i - 1] - two * p[j - 1]
q[k - 1] = three * (ct * q[i - 1] - st * p[i - 1]) - two * q[j - 1] # depends on [control=['if'], data=['m', 'n']]
elif k != 3:
one = np.sqrt(1.0 - 0.5 / fm)
j = k - n - 1
p[k - 1] = one * st * p[j - 1]
q[k - 1] = one * (st * q[j - 1] + ct * p[j - 1])
cl[m - 1] = cl[m - 2] * cl[0] - sl[m - 2] * sl[0]
sl[m - 1] = sl[m - 2] * cl[0] + cl[m - 2] * sl[0] # depends on [control=['if'], data=['k']]
# synthesis of x, y and z in geocentric coordinates
lm = ll + l
# print('g', n, m, k, gh[int(lm-1)], gh[int(lm + nc-1)])
one = (tc * gh[int(lm - 1)] + t * gh[int(lm + nc - 1)]) * rr
if m == 0:
x = x + one * q[k - 1]
z = z - (fn + 1.0) * one * p[k - 1]
l = l + 1 # depends on [control=['if'], data=[]]
else:
# print('h', n, m, k, gh[int(lm)], gh[int(lm + nc)])
two = (tc * gh[int(lm)] + t * gh[int(lm + nc)]) * rr
three = one * cl[m - 1] + two * sl[m - 1]
x = x + three * q[k - 1]
z = z - (fn + 1.0) * three * p[k - 1]
if st == 0.0:
y = y + (one * sl[m - 1] - two * cl[m - 1]) * q[k - 1] * ct # depends on [control=['if'], data=[]]
else:
y = y + (one * sl[m - 1] - two * cl[m - 1]) * fm * p[k - 1] / st
l = l + 2
m = m + 1 # depends on [control=['for'], data=['k']]
# conversion to coordinate system specified by itype
one = x
x = x * cd + z * sd
z = z * cd - one * sd
f = np.sqrt(x * x + y * y + z * z)
#
return (x, y, z, f) |
def release_lock(self, verbose=VERBOSE, raiseError=RAISE_ERROR):
"""
Release the lock when set and close file descriptor if opened.
:Parameters:
#. verbose (bool): Whether to be verbose about errors when encountered
#. raiseError (bool): Whether to raise error exception when encountered
:Returns:
#. result (boolean): Whether the lock is succesfully released.
#. code (integer, Exception): Integer code indicating the reason how the
lock was successfully or unsuccessfully released. When releasing the
lock generates an error, this will be caught and returned in a message
Exception code.
* 0: Lock is not found, therefore successfully released
* 1: Lock is found empty, therefore successfully released
* 2: Lock is found owned by this locker and successfully released
* 3: Lock is found owned by this locker and successfully released and locked file descriptor was successfully closed
* 4: Lock is found owned by another locker, this locker has no permission to release it. Therefore unsuccessfully released
* Exception: Lock was not successfully released because of an unexpected error.
The error is caught and returned in this Exception. In this case
result is False.
"""
if not os.path.isfile(self.__lockPath):
released = True
code = 0
else:
try:
with open(self.__lockPath, 'rb') as fd:
lock = fd.readlines()
except Exception as err:
code = Exception( "Unable to read release lock file '%s' (%s)"%(self.__lockPath,str(err)) )
released = False
if verbose: print(str(code))
if raiseError: raise code
else:
if not len(lock):
code = 1
released = True
elif lock[0].rstrip() == self.__lockPass.encode():
try:
with open(self.__lockPath, 'wb') as f:
#f.write( ''.encode('utf-8') )
f.write( ''.encode() )
f.flush()
os.fsync(f.fileno())
except Exception as err:
released = False
code = Exception( "Unable to write release lock file '%s' (%s)"%(self.__lockPath,str(err)) )
if verbose: print(str(code))
if raiseError: raise code
else:
released = True
code = 2
else:
code = 4
released = False
# close file descriptor if lock is released and descriptor is not None
if released and self.__fd is not None:
try:
if not self.__fd.closed:
self.__fd.flush()
os.fsync(self.__fd.fileno())
self.__fd.close()
except Exception as err:
code = Exception( "Unable to close file descriptor of locked file '%s' (%s)"%(self.__filePath,str(err)) )
if verbose: print(str(code))
if raiseError: raise code
else:
code = 3
# return
return released, code | def function[release_lock, parameter[self, verbose, raiseError]]:
constant[
Release the lock when set and close file descriptor if opened.
:Parameters:
#. verbose (bool): Whether to be verbose about errors when encountered
#. raiseError (bool): Whether to raise error exception when encountered
:Returns:
#. result (boolean): Whether the lock is succesfully released.
#. code (integer, Exception): Integer code indicating the reason how the
lock was successfully or unsuccessfully released. When releasing the
lock generates an error, this will be caught and returned in a message
Exception code.
* 0: Lock is not found, therefore successfully released
* 1: Lock is found empty, therefore successfully released
* 2: Lock is found owned by this locker and successfully released
* 3: Lock is found owned by this locker and successfully released and locked file descriptor was successfully closed
* 4: Lock is found owned by another locker, this locker has no permission to release it. Therefore unsuccessfully released
* Exception: Lock was not successfully released because of an unexpected error.
The error is caught and returned in this Exception. In this case
result is False.
]
if <ast.UnaryOp object at 0x7da1b25854e0> begin[:]
variable[released] assign[=] constant[True]
variable[code] assign[=] constant[0]
if <ast.BoolOp object at 0x7da18dc07f40> begin[:]
<ast.Try object at 0x7da18dc07520>
return[tuple[[<ast.Name object at 0x7da18bcc8760>, <ast.Name object at 0x7da18bccac20>]]] | keyword[def] identifier[release_lock] ( identifier[self] , identifier[verbose] = identifier[VERBOSE] , identifier[raiseError] = identifier[RAISE_ERROR] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[__lockPath] ):
identifier[released] = keyword[True]
identifier[code] = literal[int]
keyword[else] :
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[__lockPath] , literal[string] ) keyword[as] identifier[fd] :
identifier[lock] = identifier[fd] . identifier[readlines] ()
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[code] = identifier[Exception] ( literal[string] %( identifier[self] . identifier[__lockPath] , identifier[str] ( identifier[err] )))
identifier[released] = keyword[False]
keyword[if] identifier[verbose] : identifier[print] ( identifier[str] ( identifier[code] ))
keyword[if] identifier[raiseError] : keyword[raise] identifier[code]
keyword[else] :
keyword[if] keyword[not] identifier[len] ( identifier[lock] ):
identifier[code] = literal[int]
identifier[released] = keyword[True]
keyword[elif] identifier[lock] [ literal[int] ]. identifier[rstrip] ()== identifier[self] . identifier[__lockPass] . identifier[encode] ():
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[__lockPath] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[encode] ())
identifier[f] . identifier[flush] ()
identifier[os] . identifier[fsync] ( identifier[f] . identifier[fileno] ())
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[released] = keyword[False]
identifier[code] = identifier[Exception] ( literal[string] %( identifier[self] . identifier[__lockPath] , identifier[str] ( identifier[err] )))
keyword[if] identifier[verbose] : identifier[print] ( identifier[str] ( identifier[code] ))
keyword[if] identifier[raiseError] : keyword[raise] identifier[code]
keyword[else] :
identifier[released] = keyword[True]
identifier[code] = literal[int]
keyword[else] :
identifier[code] = literal[int]
identifier[released] = keyword[False]
keyword[if] identifier[released] keyword[and] identifier[self] . identifier[__fd] keyword[is] keyword[not] keyword[None] :
keyword[try] :
keyword[if] keyword[not] identifier[self] . identifier[__fd] . identifier[closed] :
identifier[self] . identifier[__fd] . identifier[flush] ()
identifier[os] . identifier[fsync] ( identifier[self] . identifier[__fd] . identifier[fileno] ())
identifier[self] . identifier[__fd] . identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[code] = identifier[Exception] ( literal[string] %( identifier[self] . identifier[__filePath] , identifier[str] ( identifier[err] )))
keyword[if] identifier[verbose] : identifier[print] ( identifier[str] ( identifier[code] ))
keyword[if] identifier[raiseError] : keyword[raise] identifier[code]
keyword[else] :
identifier[code] = literal[int]
keyword[return] identifier[released] , identifier[code] | def release_lock(self, verbose=VERBOSE, raiseError=RAISE_ERROR):
"""
Release the lock when set and close file descriptor if opened.
:Parameters:
#. verbose (bool): Whether to be verbose about errors when encountered
#. raiseError (bool): Whether to raise error exception when encountered
:Returns:
#. result (boolean): Whether the lock is succesfully released.
#. code (integer, Exception): Integer code indicating the reason how the
lock was successfully or unsuccessfully released. When releasing the
lock generates an error, this will be caught and returned in a message
Exception code.
* 0: Lock is not found, therefore successfully released
* 1: Lock is found empty, therefore successfully released
* 2: Lock is found owned by this locker and successfully released
* 3: Lock is found owned by this locker and successfully released and locked file descriptor was successfully closed
* 4: Lock is found owned by another locker, this locker has no permission to release it. Therefore unsuccessfully released
* Exception: Lock was not successfully released because of an unexpected error.
The error is caught and returned in this Exception. In this case
result is False.
"""
if not os.path.isfile(self.__lockPath):
released = True
code = 0 # depends on [control=['if'], data=[]]
else:
try:
with open(self.__lockPath, 'rb') as fd:
lock = fd.readlines() # depends on [control=['with'], data=['fd']] # depends on [control=['try'], data=[]]
except Exception as err:
code = Exception("Unable to read release lock file '%s' (%s)" % (self.__lockPath, str(err)))
released = False
if verbose:
print(str(code)) # depends on [control=['if'], data=[]]
if raiseError:
raise code # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']]
else:
if not len(lock):
code = 1
released = True # depends on [control=['if'], data=[]]
elif lock[0].rstrip() == self.__lockPass.encode():
try:
with open(self.__lockPath, 'wb') as f:
#f.write( ''.encode('utf-8') )
f.write(''.encode())
f.flush()
os.fsync(f.fileno()) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except Exception as err:
released = False
code = Exception("Unable to write release lock file '%s' (%s)" % (self.__lockPath, str(err)))
if verbose:
print(str(code)) # depends on [control=['if'], data=[]]
if raiseError:
raise code # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']]
else:
released = True
code = 2 # depends on [control=['if'], data=[]]
else:
code = 4
released = False
# close file descriptor if lock is released and descriptor is not None
if released and self.__fd is not None:
try:
if not self.__fd.closed:
self.__fd.flush()
os.fsync(self.__fd.fileno())
self.__fd.close() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as err:
code = Exception("Unable to close file descriptor of locked file '%s' (%s)" % (self.__filePath, str(err)))
if verbose:
print(str(code)) # depends on [control=['if'], data=[]]
if raiseError:
raise code # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']]
else:
code = 3 # depends on [control=['if'], data=[]]
# return
return (released, code) |
def _get_indent(self, node):
"""Get node indentation level."""
lineno = node.lineno
if lineno > len(self._lines):
return -1
wsindent = self._wsregexp.match(self._lines[lineno - 1])
return len(wsindent.group(1)) | def function[_get_indent, parameter[self, node]]:
constant[Get node indentation level.]
variable[lineno] assign[=] name[node].lineno
if compare[name[lineno] greater[>] call[name[len], parameter[name[self]._lines]]] begin[:]
return[<ast.UnaryOp object at 0x7da20c6e50f0>]
variable[wsindent] assign[=] call[name[self]._wsregexp.match, parameter[call[name[self]._lines][binary_operation[name[lineno] - constant[1]]]]]
return[call[name[len], parameter[call[name[wsindent].group, parameter[constant[1]]]]]] | keyword[def] identifier[_get_indent] ( identifier[self] , identifier[node] ):
literal[string]
identifier[lineno] = identifier[node] . identifier[lineno]
keyword[if] identifier[lineno] > identifier[len] ( identifier[self] . identifier[_lines] ):
keyword[return] - literal[int]
identifier[wsindent] = identifier[self] . identifier[_wsregexp] . identifier[match] ( identifier[self] . identifier[_lines] [ identifier[lineno] - literal[int] ])
keyword[return] identifier[len] ( identifier[wsindent] . identifier[group] ( literal[int] )) | def _get_indent(self, node):
"""Get node indentation level."""
lineno = node.lineno
if lineno > len(self._lines):
return -1 # depends on [control=['if'], data=[]]
wsindent = self._wsregexp.match(self._lines[lineno - 1])
return len(wsindent.group(1)) |
def _set_cspf_group_computation_mode(self, v, load=False):
"""
Setter method for cspf_group_computation_mode, mapped from YANG variable /mpls_state/policy/cspf_group_computation_mode (mpls-cspf-grp-comp-mode)
If this variable is read-only (config: false) in the
source YANG file, then _set_cspf_group_computation_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cspf_group_computation_mode() directly.
YANG Description: CSPF Group Computation Mode
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'mpls-cspf-grp-comp-mode-default': {'value': 0}, u'mpls-cspf-grp-comp-mode-exclude-groups': {'value': 2}, u'mpls-cspf-grp-comp-mode-max': {'value': 4}, u'mpls-cspf-grp-comp-mode-add-penalty': {'value': 1}, u'mpls-cspf-grp-comp-mode-high-cost': {'value': 3}},), is_leaf=True, yang_name="cspf-group-computation-mode", rest_name="cspf-group-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-cspf-grp-comp-mode', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cspf_group_computation_mode must be of a type compatible with mpls-cspf-grp-comp-mode""",
'defined-type': "brocade-mpls-operational:mpls-cspf-grp-comp-mode",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'mpls-cspf-grp-comp-mode-default': {'value': 0}, u'mpls-cspf-grp-comp-mode-exclude-groups': {'value': 2}, u'mpls-cspf-grp-comp-mode-max': {'value': 4}, u'mpls-cspf-grp-comp-mode-add-penalty': {'value': 1}, u'mpls-cspf-grp-comp-mode-high-cost': {'value': 3}},), is_leaf=True, yang_name="cspf-group-computation-mode", rest_name="cspf-group-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-cspf-grp-comp-mode', is_config=False)""",
})
self.__cspf_group_computation_mode = t
if hasattr(self, '_set'):
self._set() | def function[_set_cspf_group_computation_mode, parameter[self, v, load]]:
constant[
Setter method for cspf_group_computation_mode, mapped from YANG variable /mpls_state/policy/cspf_group_computation_mode (mpls-cspf-grp-comp-mode)
If this variable is read-only (config: false) in the
source YANG file, then _set_cspf_group_computation_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cspf_group_computation_mode() directly.
YANG Description: CSPF Group Computation Mode
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da207f9a410>
name[self].__cspf_group_computation_mode assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_cspf_group_computation_mode] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__cspf_group_computation_mode] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_cspf_group_computation_mode(self, v, load=False):
"""
Setter method for cspf_group_computation_mode, mapped from YANG variable /mpls_state/policy/cspf_group_computation_mode (mpls-cspf-grp-comp-mode)
If this variable is read-only (config: false) in the
source YANG file, then _set_cspf_group_computation_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cspf_group_computation_mode() directly.
YANG Description: CSPF Group Computation Mode
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'mpls-cspf-grp-comp-mode-default': {'value': 0}, u'mpls-cspf-grp-comp-mode-exclude-groups': {'value': 2}, u'mpls-cspf-grp-comp-mode-max': {'value': 4}, u'mpls-cspf-grp-comp-mode-add-penalty': {'value': 1}, u'mpls-cspf-grp-comp-mode-high-cost': {'value': 3}}), is_leaf=True, yang_name='cspf-group-computation-mode', rest_name='cspf-group-computation-mode', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-cspf-grp-comp-mode', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'cspf_group_computation_mode must be of a type compatible with mpls-cspf-grp-comp-mode', 'defined-type': 'brocade-mpls-operational:mpls-cspf-grp-comp-mode', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'mpls-cspf-grp-comp-mode-default\': {\'value\': 0}, u\'mpls-cspf-grp-comp-mode-exclude-groups\': {\'value\': 2}, u\'mpls-cspf-grp-comp-mode-max\': {\'value\': 4}, u\'mpls-cspf-grp-comp-mode-add-penalty\': {\'value\': 1}, u\'mpls-cspf-grp-comp-mode-high-cost\': {\'value\': 3}},), is_leaf=True, yang_name="cspf-group-computation-mode", rest_name="cspf-group-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace=\'urn:brocade.com:mgmt:brocade-mpls-operational\', defining_module=\'brocade-mpls-operational\', yang_type=\'mpls-cspf-grp-comp-mode\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__cspf_group_computation_mode = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def stop(name, call=None):
'''
stop a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: true if successful
CLI Example:
.. code-block:: bash
salt-cloud -a stop vm_name
'''
datacenter_id = get_datacenter_id()
conn = get_conn()
node = get_node(conn, name)
conn.stop_server(datacenter_id=datacenter_id, server_id=node['id'])
return True | def function[stop, parameter[name, call]]:
constant[
stop a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: true if successful
CLI Example:
.. code-block:: bash
salt-cloud -a stop vm_name
]
variable[datacenter_id] assign[=] call[name[get_datacenter_id], parameter[]]
variable[conn] assign[=] call[name[get_conn], parameter[]]
variable[node] assign[=] call[name[get_node], parameter[name[conn], name[name]]]
call[name[conn].stop_server, parameter[]]
return[constant[True]] | keyword[def] identifier[stop] ( identifier[name] , identifier[call] = keyword[None] ):
literal[string]
identifier[datacenter_id] = identifier[get_datacenter_id] ()
identifier[conn] = identifier[get_conn] ()
identifier[node] = identifier[get_node] ( identifier[conn] , identifier[name] )
identifier[conn] . identifier[stop_server] ( identifier[datacenter_id] = identifier[datacenter_id] , identifier[server_id] = identifier[node] [ literal[string] ])
keyword[return] keyword[True] | def stop(name, call=None):
"""
stop a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: true if successful
CLI Example:
.. code-block:: bash
salt-cloud -a stop vm_name
"""
datacenter_id = get_datacenter_id()
conn = get_conn()
node = get_node(conn, name)
conn.stop_server(datacenter_id=datacenter_id, server_id=node['id'])
return True |
def register(style, func=None):
"""注册一个拼音风格实现
::
@register('echo')
def echo(pinyin, **kwargs):
return pinyin
# or
register('echo', echo)
"""
if func is not None:
_registry[style] = func
return
def decorator(func):
_registry[style] = func
@wraps(func)
def wrapper(pinyin, **kwargs):
return func(pinyin, **kwargs)
return wrapper
return decorator | def function[register, parameter[style, func]]:
constant[注册一个拼音风格实现
::
@register('echo')
def echo(pinyin, **kwargs):
return pinyin
# or
register('echo', echo)
]
if compare[name[func] is_not constant[None]] begin[:]
call[name[_registry]][name[style]] assign[=] name[func]
return[None]
def function[decorator, parameter[func]]:
call[name[_registry]][name[style]] assign[=] name[func]
def function[wrapper, parameter[pinyin]]:
return[call[name[func], parameter[name[pinyin]]]]
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[register] ( identifier[style] , identifier[func] = keyword[None] ):
literal[string]
keyword[if] identifier[func] keyword[is] keyword[not] keyword[None] :
identifier[_registry] [ identifier[style] ]= identifier[func]
keyword[return]
keyword[def] identifier[decorator] ( identifier[func] ):
identifier[_registry] [ identifier[style] ]= identifier[func]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[pinyin] ,** identifier[kwargs] ):
keyword[return] identifier[func] ( identifier[pinyin] ,** identifier[kwargs] )
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def register(style, func=None):
"""注册一个拼音风格实现
::
@register('echo')
def echo(pinyin, **kwargs):
return pinyin
# or
register('echo', echo)
"""
if func is not None:
_registry[style] = func
return # depends on [control=['if'], data=['func']]
def decorator(func):
_registry[style] = func
@wraps(func)
def wrapper(pinyin, **kwargs):
return func(pinyin, **kwargs)
return wrapper
return decorator |
def parse_elements(elements):
"""Parse position data elements.
Args:
elements (list): Data values for position
Returns:
Position: Position object representing data
"""
if not len(elements) in (11, 12):
raise ValueError('Invalid RMC position data')
time = datetime.time(*[int(elements[0][i:i + 2])
for i in range(0, 6, 2)])
active = True if elements[1] == 'A' else False
# Latitude and longitude are checked for validity during Fix
# instantiation
latitude = parse_latitude(elements[2], elements[3])
longitude = parse_longitude(elements[4], elements[5])
speed = float(elements[6])
track = float(elements[7])
date = datetime.date(2000 + int(elements[8][4:6]),
int(elements[8][2:4]), int(elements[8][:2]))
variation = float(elements[9]) if not elements[9] == '' else None
if elements[10] == 'W':
variation = -variation
elif variation and not elements[10] == 'E':
raise ValueError('Incorrect variation value %r'
% elements[10])
mode = elements[11] if len(elements) == 12 else None
return Position(time, active, latitude, longitude, speed, track, date,
variation, mode) | def function[parse_elements, parameter[elements]]:
constant[Parse position data elements.
Args:
elements (list): Data values for position
Returns:
Position: Position object representing data
]
if <ast.UnaryOp object at 0x7da18dc9b850> begin[:]
<ast.Raise object at 0x7da18dc9a9e0>
variable[time] assign[=] call[name[datetime].time, parameter[<ast.Starred object at 0x7da18dc99b40>]]
variable[active] assign[=] <ast.IfExp object at 0x7da18dc98760>
variable[latitude] assign[=] call[name[parse_latitude], parameter[call[name[elements]][constant[2]], call[name[elements]][constant[3]]]]
variable[longitude] assign[=] call[name[parse_longitude], parameter[call[name[elements]][constant[4]], call[name[elements]][constant[5]]]]
variable[speed] assign[=] call[name[float], parameter[call[name[elements]][constant[6]]]]
variable[track] assign[=] call[name[float], parameter[call[name[elements]][constant[7]]]]
variable[date] assign[=] call[name[datetime].date, parameter[binary_operation[constant[2000] + call[name[int], parameter[call[call[name[elements]][constant[8]]][<ast.Slice object at 0x7da18dc9ae60>]]]], call[name[int], parameter[call[call[name[elements]][constant[8]]][<ast.Slice object at 0x7da18dc9bc40>]]], call[name[int], parameter[call[call[name[elements]][constant[8]]][<ast.Slice object at 0x7da18dc99e10>]]]]]
variable[variation] assign[=] <ast.IfExp object at 0x7da18dc98670>
if compare[call[name[elements]][constant[10]] equal[==] constant[W]] begin[:]
variable[variation] assign[=] <ast.UnaryOp object at 0x7da18dc9a890>
variable[mode] assign[=] <ast.IfExp object at 0x7da1b09243a0>
return[call[name[Position], parameter[name[time], name[active], name[latitude], name[longitude], name[speed], name[track], name[date], name[variation], name[mode]]]] | keyword[def] identifier[parse_elements] ( identifier[elements] ):
literal[string]
keyword[if] keyword[not] identifier[len] ( identifier[elements] ) keyword[in] ( literal[int] , literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[time] = identifier[datetime] . identifier[time] (*[ identifier[int] ( identifier[elements] [ literal[int] ][ identifier[i] : identifier[i] + literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] , literal[int] )])
identifier[active] = keyword[True] keyword[if] identifier[elements] [ literal[int] ]== literal[string] keyword[else] keyword[False]
identifier[latitude] = identifier[parse_latitude] ( identifier[elements] [ literal[int] ], identifier[elements] [ literal[int] ])
identifier[longitude] = identifier[parse_longitude] ( identifier[elements] [ literal[int] ], identifier[elements] [ literal[int] ])
identifier[speed] = identifier[float] ( identifier[elements] [ literal[int] ])
identifier[track] = identifier[float] ( identifier[elements] [ literal[int] ])
identifier[date] = identifier[datetime] . identifier[date] ( literal[int] + identifier[int] ( identifier[elements] [ literal[int] ][ literal[int] : literal[int] ]),
identifier[int] ( identifier[elements] [ literal[int] ][ literal[int] : literal[int] ]), identifier[int] ( identifier[elements] [ literal[int] ][: literal[int] ]))
identifier[variation] = identifier[float] ( identifier[elements] [ literal[int] ]) keyword[if] keyword[not] identifier[elements] [ literal[int] ]== literal[string] keyword[else] keyword[None]
keyword[if] identifier[elements] [ literal[int] ]== literal[string] :
identifier[variation] =- identifier[variation]
keyword[elif] identifier[variation] keyword[and] keyword[not] identifier[elements] [ literal[int] ]== literal[string] :
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[elements] [ literal[int] ])
identifier[mode] = identifier[elements] [ literal[int] ] keyword[if] identifier[len] ( identifier[elements] )== literal[int] keyword[else] keyword[None]
keyword[return] identifier[Position] ( identifier[time] , identifier[active] , identifier[latitude] , identifier[longitude] , identifier[speed] , identifier[track] , identifier[date] ,
identifier[variation] , identifier[mode] ) | def parse_elements(elements):
"""Parse position data elements.
Args:
elements (list): Data values for position
Returns:
Position: Position object representing data
"""
if not len(elements) in (11, 12):
raise ValueError('Invalid RMC position data') # depends on [control=['if'], data=[]]
time = datetime.time(*[int(elements[0][i:i + 2]) for i in range(0, 6, 2)])
active = True if elements[1] == 'A' else False
# Latitude and longitude are checked for validity during Fix
# instantiation
latitude = parse_latitude(elements[2], elements[3])
longitude = parse_longitude(elements[4], elements[5])
speed = float(elements[6])
track = float(elements[7])
date = datetime.date(2000 + int(elements[8][4:6]), int(elements[8][2:4]), int(elements[8][:2]))
variation = float(elements[9]) if not elements[9] == '' else None
if elements[10] == 'W':
variation = -variation # depends on [control=['if'], data=[]]
elif variation and (not elements[10] == 'E'):
raise ValueError('Incorrect variation value %r' % elements[10]) # depends on [control=['if'], data=[]]
mode = elements[11] if len(elements) == 12 else None
return Position(time, active, latitude, longitude, speed, track, date, variation, mode) |
def _set_show_mpls_dynamic_bypass(self, v, load=False):
"""
Setter method for show_mpls_dynamic_bypass, mapped from YANG variable /brocade_mpls_rpc/show_mpls_dynamic_bypass (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_dynamic_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_dynamic_bypass() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=show_mpls_dynamic_bypass.show_mpls_dynamic_bypass, is_leaf=True, yang_name="show-mpls-dynamic-bypass", rest_name="show-mpls-dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsDynamicBypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """show_mpls_dynamic_bypass must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=show_mpls_dynamic_bypass.show_mpls_dynamic_bypass, is_leaf=True, yang_name="show-mpls-dynamic-bypass", rest_name="show-mpls-dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsDynamicBypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""",
})
self.__show_mpls_dynamic_bypass = t
if hasattr(self, '_set'):
self._set() | def function[_set_show_mpls_dynamic_bypass, parameter[self, v, load]]:
constant[
Setter method for show_mpls_dynamic_bypass, mapped from YANG variable /brocade_mpls_rpc/show_mpls_dynamic_bypass (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_dynamic_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_dynamic_bypass() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f00fcd0>
name[self].__show_mpls_dynamic_bypass assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_show_mpls_dynamic_bypass] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[show_mpls_dynamic_bypass] . identifier[show_mpls_dynamic_bypass] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__show_mpls_dynamic_bypass] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_show_mpls_dynamic_bypass(self, v, load=False):
"""
Setter method for show_mpls_dynamic_bypass, mapped from YANG variable /brocade_mpls_rpc/show_mpls_dynamic_bypass (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_dynamic_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_dynamic_bypass() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=show_mpls_dynamic_bypass.show_mpls_dynamic_bypass, is_leaf=True, yang_name='show-mpls-dynamic-bypass', rest_name='show-mpls-dynamic-bypass', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsDynamicBypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'show_mpls_dynamic_bypass must be of a type compatible with rpc', 'defined-type': 'rpc', 'generated-type': 'YANGDynClass(base=show_mpls_dynamic_bypass.show_mpls_dynamic_bypass, is_leaf=True, yang_name="show-mpls-dynamic-bypass", rest_name="show-mpls-dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'hidden\': u\'full\', u\'actionpoint\': u\'showMplsDynamicBypass\'}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls\', defining_module=\'brocade-mpls\', yang_type=\'rpc\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__show_mpls_dynamic_bypass = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def tokenize(self, text):
"""Tokenizes a piece of text into its word pieces.
This uses a greedy longest-match-first algorithm to perform tokenization
using the given vocabulary.
For example:
input = "unaffable"
output = ["un", "##aff", "##able"]
Args:
text: A single token or whitespace separated tokens. This should have
already been passed through `BasicTokenizer`.
Returns:
A list of wordpiece tokens.
"""
output_tokens = []
for token in whitespace_tokenize(text):
chars = list(token)
if len(chars) > self.max_input_chars_per_word:
output_tokens.append(self.unk_token)
continue
is_bad = False
start = 0
sub_tokens = []
while start < len(chars):
end = len(chars)
cur_substr = None
while start < end:
substr = "".join(chars[start:end])
if start > 0:
substr = "##" + substr
if substr in self.vocab:
cur_substr = substr
break
end -= 1
if cur_substr is None:
is_bad = True
break
sub_tokens.append(cur_substr)
start = end
if is_bad:
output_tokens.append(self.unk_token)
else:
output_tokens.extend(sub_tokens)
return output_tokens | def function[tokenize, parameter[self, text]]:
constant[Tokenizes a piece of text into its word pieces.
This uses a greedy longest-match-first algorithm to perform tokenization
using the given vocabulary.
For example:
input = "unaffable"
output = ["un", "##aff", "##able"]
Args:
text: A single token or whitespace separated tokens. This should have
already been passed through `BasicTokenizer`.
Returns:
A list of wordpiece tokens.
]
variable[output_tokens] assign[=] list[[]]
for taget[name[token]] in starred[call[name[whitespace_tokenize], parameter[name[text]]]] begin[:]
variable[chars] assign[=] call[name[list], parameter[name[token]]]
if compare[call[name[len], parameter[name[chars]]] greater[>] name[self].max_input_chars_per_word] begin[:]
call[name[output_tokens].append, parameter[name[self].unk_token]]
continue
variable[is_bad] assign[=] constant[False]
variable[start] assign[=] constant[0]
variable[sub_tokens] assign[=] list[[]]
while compare[name[start] less[<] call[name[len], parameter[name[chars]]]] begin[:]
variable[end] assign[=] call[name[len], parameter[name[chars]]]
variable[cur_substr] assign[=] constant[None]
while compare[name[start] less[<] name[end]] begin[:]
variable[substr] assign[=] call[constant[].join, parameter[call[name[chars]][<ast.Slice object at 0x7da20c7cae00>]]]
if compare[name[start] greater[>] constant[0]] begin[:]
variable[substr] assign[=] binary_operation[constant[##] + name[substr]]
if compare[name[substr] in name[self].vocab] begin[:]
variable[cur_substr] assign[=] name[substr]
break
<ast.AugAssign object at 0x7da1b2347640>
if compare[name[cur_substr] is constant[None]] begin[:]
variable[is_bad] assign[=] constant[True]
break
call[name[sub_tokens].append, parameter[name[cur_substr]]]
variable[start] assign[=] name[end]
if name[is_bad] begin[:]
call[name[output_tokens].append, parameter[name[self].unk_token]]
return[name[output_tokens]] | keyword[def] identifier[tokenize] ( identifier[self] , identifier[text] ):
literal[string]
identifier[output_tokens] =[]
keyword[for] identifier[token] keyword[in] identifier[whitespace_tokenize] ( identifier[text] ):
identifier[chars] = identifier[list] ( identifier[token] )
keyword[if] identifier[len] ( identifier[chars] )> identifier[self] . identifier[max_input_chars_per_word] :
identifier[output_tokens] . identifier[append] ( identifier[self] . identifier[unk_token] )
keyword[continue]
identifier[is_bad] = keyword[False]
identifier[start] = literal[int]
identifier[sub_tokens] =[]
keyword[while] identifier[start] < identifier[len] ( identifier[chars] ):
identifier[end] = identifier[len] ( identifier[chars] )
identifier[cur_substr] = keyword[None]
keyword[while] identifier[start] < identifier[end] :
identifier[substr] = literal[string] . identifier[join] ( identifier[chars] [ identifier[start] : identifier[end] ])
keyword[if] identifier[start] > literal[int] :
identifier[substr] = literal[string] + identifier[substr]
keyword[if] identifier[substr] keyword[in] identifier[self] . identifier[vocab] :
identifier[cur_substr] = identifier[substr]
keyword[break]
identifier[end] -= literal[int]
keyword[if] identifier[cur_substr] keyword[is] keyword[None] :
identifier[is_bad] = keyword[True]
keyword[break]
identifier[sub_tokens] . identifier[append] ( identifier[cur_substr] )
identifier[start] = identifier[end]
keyword[if] identifier[is_bad] :
identifier[output_tokens] . identifier[append] ( identifier[self] . identifier[unk_token] )
keyword[else] :
identifier[output_tokens] . identifier[extend] ( identifier[sub_tokens] )
keyword[return] identifier[output_tokens] | def tokenize(self, text):
"""Tokenizes a piece of text into its word pieces.
This uses a greedy longest-match-first algorithm to perform tokenization
using the given vocabulary.
For example:
input = "unaffable"
output = ["un", "##aff", "##able"]
Args:
text: A single token or whitespace separated tokens. This should have
already been passed through `BasicTokenizer`.
Returns:
A list of wordpiece tokens.
"""
output_tokens = []
for token in whitespace_tokenize(text):
chars = list(token)
if len(chars) > self.max_input_chars_per_word:
output_tokens.append(self.unk_token)
continue # depends on [control=['if'], data=[]]
is_bad = False
start = 0
sub_tokens = []
while start < len(chars):
end = len(chars)
cur_substr = None
while start < end:
substr = ''.join(chars[start:end])
if start > 0:
substr = '##' + substr # depends on [control=['if'], data=[]]
if substr in self.vocab:
cur_substr = substr
break # depends on [control=['if'], data=['substr']]
end -= 1 # depends on [control=['while'], data=['start', 'end']]
if cur_substr is None:
is_bad = True
break # depends on [control=['if'], data=[]]
sub_tokens.append(cur_substr)
start = end # depends on [control=['while'], data=['start']]
if is_bad:
output_tokens.append(self.unk_token) # depends on [control=['if'], data=[]]
else:
output_tokens.extend(sub_tokens) # depends on [control=['for'], data=['token']]
return output_tokens |
def corr_bin_genes(self, number_of_features=None, input_gene=None):
"""A (hacky) method for binning groups of genes correlated along the
SAM manifold.
Parameters
----------
number_of_features - int, optional, default None
The number of genes to bin. Capped at 5000 due to memory
considerations.
input_gene - str, optional, default None
If not None, use this gene as the first seed when growing the
correlation bins.
"""
weights = self.adata.var['spatial_dispersions'].values
all_gene_names = np.array(list(self.adata.var_names))
D_avg = self.adata.layers['X_knn_avg']
idx2 = np.argsort(-weights)[:weights[weights > 0].size]
if(number_of_features is None or number_of_features > idx2.size):
number_of_features = idx2.size
if number_of_features > 1000:
number_of_features = 1000
if(input_gene is not None):
input_gene = np.where(all_gene_names == input_gene)[0]
if(input_gene.size == 0):
print(
"Gene note found in the filtered dataset. Note "
"that genes are case sensitive.")
return
seeds = [np.array([input_gene])]
pw_corr = np.corrcoef(
D_avg[:, idx2[:number_of_features]].T.toarray())
for i in range(1, number_of_features):
flag = False
maxd = np.mean(pw_corr[i, :][pw_corr[i, :] > 0])
maxi = 0
for j in range(len(seeds)):
if(pw_corr[np.where(idx2 == seeds[j][0])[0], i]
> maxd):
maxd = pw_corr[np.where(idx2 == seeds[j][0])[0], i]
maxi = j
flag = True
if(not flag):
seeds.append(np.array([idx2[i]]))
else:
seeds[maxi] = np.append(seeds[maxi], idx2[i])
geneID_groups = []
for i in range(len(seeds)):
geneID_groups.append(all_gene_names[seeds[i]])
return geneID_groups[0]
else:
seeds = [np.array([idx2[0]])]
pw_corr = np.corrcoef(
D_avg[:, idx2[:number_of_features]].T.toarray())
for i in range(1, number_of_features):
flag = False
maxd = np.mean(pw_corr[i, :][pw_corr[i, :] > 0])
maxi = 0
for j in range(len(seeds)):
if(pw_corr[np.where(idx2 == seeds[j][0])[0], i]
> maxd):
maxd = pw_corr[np.where(idx2 == seeds[j][0])[0], i]
maxi = j
flag = True
if(not flag):
seeds.append(np.array([idx2[i]]))
else:
seeds[maxi] = np.append(seeds[maxi], idx2[i])
geneID_groups = []
for i in range(len(seeds)):
geneID_groups.append(
all_gene_names[seeds[i]])
self.adata.uns['gene_groups'] = geneID_groups
return geneID_groups | def function[corr_bin_genes, parameter[self, number_of_features, input_gene]]:
constant[A (hacky) method for binning groups of genes correlated along the
SAM manifold.
Parameters
----------
number_of_features - int, optional, default None
The number of genes to bin. Capped at 5000 due to memory
considerations.
input_gene - str, optional, default None
If not None, use this gene as the first seed when growing the
correlation bins.
]
variable[weights] assign[=] call[name[self].adata.var][constant[spatial_dispersions]].values
variable[all_gene_names] assign[=] call[name[np].array, parameter[call[name[list], parameter[name[self].adata.var_names]]]]
variable[D_avg] assign[=] call[name[self].adata.layers][constant[X_knn_avg]]
variable[idx2] assign[=] call[call[name[np].argsort, parameter[<ast.UnaryOp object at 0x7da1b24a3b50>]]][<ast.Slice object at 0x7da1b24a3a90>]
if <ast.BoolOp object at 0x7da1b24a3910> begin[:]
variable[number_of_features] assign[=] name[idx2].size
if compare[name[number_of_features] greater[>] constant[1000]] begin[:]
variable[number_of_features] assign[=] constant[1000]
if compare[name[input_gene] is_not constant[None]] begin[:]
variable[input_gene] assign[=] call[call[name[np].where, parameter[compare[name[all_gene_names] equal[==] name[input_gene]]]]][constant[0]]
if compare[name[input_gene].size equal[==] constant[0]] begin[:]
call[name[print], parameter[constant[Gene note found in the filtered dataset. Note that genes are case sensitive.]]]
return[None]
variable[seeds] assign[=] list[[<ast.Call object at 0x7da1b24a3250>]]
variable[pw_corr] assign[=] call[name[np].corrcoef, parameter[call[call[name[D_avg]][tuple[[<ast.Slice object at 0x7da1b24a1b70>, <ast.Subscript object at 0x7da1b24a1ab0>]]].T.toarray, parameter[]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[number_of_features]]]] begin[:]
variable[flag] assign[=] constant[False]
variable[maxd] assign[=] call[name[np].mean, parameter[call[call[name[pw_corr]][tuple[[<ast.Name object at 0x7da1b24a2080>, <ast.Slice object at 0x7da1b24a2140>]]]][compare[call[name[pw_corr]][tuple[[<ast.Name object at 0x7da1b24a2a40>, <ast.Slice object at 0x7da1b24a1d80>]]] greater[>] constant[0]]]]]
variable[maxi] assign[=] constant[0]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[seeds]]]]]] begin[:]
if compare[call[name[pw_corr]][tuple[[<ast.Subscript object at 0x7da1b24a2b60>, <ast.Name object at 0x7da1b24a18d0>]]] greater[>] name[maxd]] begin[:]
variable[maxd] assign[=] call[name[pw_corr]][tuple[[<ast.Subscript object at 0x7da1b24a3f70>, <ast.Name object at 0x7da1b24a10f0>]]]
variable[maxi] assign[=] name[j]
variable[flag] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b24a0fd0> begin[:]
call[name[seeds].append, parameter[call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da1b24a1360>]]]]]]
variable[geneID_groups] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[seeds]]]]]] begin[:]
call[name[geneID_groups].append, parameter[call[name[all_gene_names]][call[name[seeds]][name[i]]]]]
return[call[name[geneID_groups]][constant[0]]] | keyword[def] identifier[corr_bin_genes] ( identifier[self] , identifier[number_of_features] = keyword[None] , identifier[input_gene] = keyword[None] ):
literal[string]
identifier[weights] = identifier[self] . identifier[adata] . identifier[var] [ literal[string] ]. identifier[values]
identifier[all_gene_names] = identifier[np] . identifier[array] ( identifier[list] ( identifier[self] . identifier[adata] . identifier[var_names] ))
identifier[D_avg] = identifier[self] . identifier[adata] . identifier[layers] [ literal[string] ]
identifier[idx2] = identifier[np] . identifier[argsort] (- identifier[weights] )[: identifier[weights] [ identifier[weights] > literal[int] ]. identifier[size] ]
keyword[if] ( identifier[number_of_features] keyword[is] keyword[None] keyword[or] identifier[number_of_features] > identifier[idx2] . identifier[size] ):
identifier[number_of_features] = identifier[idx2] . identifier[size]
keyword[if] identifier[number_of_features] > literal[int] :
identifier[number_of_features] = literal[int]
keyword[if] ( identifier[input_gene] keyword[is] keyword[not] keyword[None] ):
identifier[input_gene] = identifier[np] . identifier[where] ( identifier[all_gene_names] == identifier[input_gene] )[ literal[int] ]
keyword[if] ( identifier[input_gene] . identifier[size] == literal[int] ):
identifier[print] (
literal[string]
literal[string] )
keyword[return]
identifier[seeds] =[ identifier[np] . identifier[array] ([ identifier[input_gene] ])]
identifier[pw_corr] = identifier[np] . identifier[corrcoef] (
identifier[D_avg] [:, identifier[idx2] [: identifier[number_of_features] ]]. identifier[T] . identifier[toarray] ())
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[number_of_features] ):
identifier[flag] = keyword[False]
identifier[maxd] = identifier[np] . identifier[mean] ( identifier[pw_corr] [ identifier[i] ,:][ identifier[pw_corr] [ identifier[i] ,:]> literal[int] ])
identifier[maxi] = literal[int]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[seeds] )):
keyword[if] ( identifier[pw_corr] [ identifier[np] . identifier[where] ( identifier[idx2] == identifier[seeds] [ identifier[j] ][ literal[int] ])[ literal[int] ], identifier[i] ]
> identifier[maxd] ):
identifier[maxd] = identifier[pw_corr] [ identifier[np] . identifier[where] ( identifier[idx2] == identifier[seeds] [ identifier[j] ][ literal[int] ])[ literal[int] ], identifier[i] ]
identifier[maxi] = identifier[j]
identifier[flag] = keyword[True]
keyword[if] ( keyword[not] identifier[flag] ):
identifier[seeds] . identifier[append] ( identifier[np] . identifier[array] ([ identifier[idx2] [ identifier[i] ]]))
keyword[else] :
identifier[seeds] [ identifier[maxi] ]= identifier[np] . identifier[append] ( identifier[seeds] [ identifier[maxi] ], identifier[idx2] [ identifier[i] ])
identifier[geneID_groups] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[seeds] )):
identifier[geneID_groups] . identifier[append] ( identifier[all_gene_names] [ identifier[seeds] [ identifier[i] ]])
keyword[return] identifier[geneID_groups] [ literal[int] ]
keyword[else] :
identifier[seeds] =[ identifier[np] . identifier[array] ([ identifier[idx2] [ literal[int] ]])]
identifier[pw_corr] = identifier[np] . identifier[corrcoef] (
identifier[D_avg] [:, identifier[idx2] [: identifier[number_of_features] ]]. identifier[T] . identifier[toarray] ())
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[number_of_features] ):
identifier[flag] = keyword[False]
identifier[maxd] = identifier[np] . identifier[mean] ( identifier[pw_corr] [ identifier[i] ,:][ identifier[pw_corr] [ identifier[i] ,:]> literal[int] ])
identifier[maxi] = literal[int]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[seeds] )):
keyword[if] ( identifier[pw_corr] [ identifier[np] . identifier[where] ( identifier[idx2] == identifier[seeds] [ identifier[j] ][ literal[int] ])[ literal[int] ], identifier[i] ]
> identifier[maxd] ):
identifier[maxd] = identifier[pw_corr] [ identifier[np] . identifier[where] ( identifier[idx2] == identifier[seeds] [ identifier[j] ][ literal[int] ])[ literal[int] ], identifier[i] ]
identifier[maxi] = identifier[j]
identifier[flag] = keyword[True]
keyword[if] ( keyword[not] identifier[flag] ):
identifier[seeds] . identifier[append] ( identifier[np] . identifier[array] ([ identifier[idx2] [ identifier[i] ]]))
keyword[else] :
identifier[seeds] [ identifier[maxi] ]= identifier[np] . identifier[append] ( identifier[seeds] [ identifier[maxi] ], identifier[idx2] [ identifier[i] ])
identifier[geneID_groups] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[seeds] )):
identifier[geneID_groups] . identifier[append] (
identifier[all_gene_names] [ identifier[seeds] [ identifier[i] ]])
identifier[self] . identifier[adata] . identifier[uns] [ literal[string] ]= identifier[geneID_groups]
keyword[return] identifier[geneID_groups] | def corr_bin_genes(self, number_of_features=None, input_gene=None):
"""A (hacky) method for binning groups of genes correlated along the
SAM manifold.
Parameters
----------
number_of_features - int, optional, default None
The number of genes to bin. Capped at 5000 due to memory
considerations.
input_gene - str, optional, default None
If not None, use this gene as the first seed when growing the
correlation bins.
"""
weights = self.adata.var['spatial_dispersions'].values
all_gene_names = np.array(list(self.adata.var_names))
D_avg = self.adata.layers['X_knn_avg']
idx2 = np.argsort(-weights)[:weights[weights > 0].size]
if number_of_features is None or number_of_features > idx2.size:
number_of_features = idx2.size # depends on [control=['if'], data=[]]
if number_of_features > 1000:
number_of_features = 1000 # depends on [control=['if'], data=['number_of_features']]
if input_gene is not None:
input_gene = np.where(all_gene_names == input_gene)[0]
if input_gene.size == 0:
print('Gene note found in the filtered dataset. Note that genes are case sensitive.')
return # depends on [control=['if'], data=[]]
seeds = [np.array([input_gene])]
pw_corr = np.corrcoef(D_avg[:, idx2[:number_of_features]].T.toarray())
for i in range(1, number_of_features):
flag = False
maxd = np.mean(pw_corr[i, :][pw_corr[i, :] > 0])
maxi = 0
for j in range(len(seeds)):
if pw_corr[np.where(idx2 == seeds[j][0])[0], i] > maxd:
maxd = pw_corr[np.where(idx2 == seeds[j][0])[0], i]
maxi = j
flag = True # depends on [control=['if'], data=['maxd']] # depends on [control=['for'], data=['j']]
if not flag:
seeds.append(np.array([idx2[i]])) # depends on [control=['if'], data=[]]
else:
seeds[maxi] = np.append(seeds[maxi], idx2[i]) # depends on [control=['for'], data=['i']]
geneID_groups = []
for i in range(len(seeds)):
geneID_groups.append(all_gene_names[seeds[i]]) # depends on [control=['for'], data=['i']]
return geneID_groups[0] # depends on [control=['if'], data=['input_gene']]
else:
seeds = [np.array([idx2[0]])]
pw_corr = np.corrcoef(D_avg[:, idx2[:number_of_features]].T.toarray())
for i in range(1, number_of_features):
flag = False
maxd = np.mean(pw_corr[i, :][pw_corr[i, :] > 0])
maxi = 0
for j in range(len(seeds)):
if pw_corr[np.where(idx2 == seeds[j][0])[0], i] > maxd:
maxd = pw_corr[np.where(idx2 == seeds[j][0])[0], i]
maxi = j
flag = True # depends on [control=['if'], data=['maxd']] # depends on [control=['for'], data=['j']]
if not flag:
seeds.append(np.array([idx2[i]])) # depends on [control=['if'], data=[]]
else:
seeds[maxi] = np.append(seeds[maxi], idx2[i]) # depends on [control=['for'], data=['i']]
geneID_groups = []
for i in range(len(seeds)):
geneID_groups.append(all_gene_names[seeds[i]]) # depends on [control=['for'], data=['i']]
self.adata.uns['gene_groups'] = geneID_groups
return geneID_groups |
def _confirm_dialog(self, prompt):
''' Prompts for a 'yes' or 'no' to given prompt. '''
response = raw_input(prompt).strip().lower()
valid = {'y': True, 'ye': True, 'yes': True, 'n': False, 'no': False}
while True:
try:
return valid[response]
except:
response = raw_input("Please respond 'y' or 'n': ").strip().lower() | def function[_confirm_dialog, parameter[self, prompt]]:
constant[ Prompts for a 'yes' or 'no' to given prompt. ]
variable[response] assign[=] call[call[call[name[raw_input], parameter[name[prompt]]].strip, parameter[]].lower, parameter[]]
variable[valid] assign[=] dictionary[[<ast.Constant object at 0x7da1b07473d0>, <ast.Constant object at 0x7da1b0745330>, <ast.Constant object at 0x7da1b0745c60>, <ast.Constant object at 0x7da1b07463b0>, <ast.Constant object at 0x7da1b0747e80>], [<ast.Constant object at 0x7da1b0745b10>, <ast.Constant object at 0x7da1b0746170>, <ast.Constant object at 0x7da1b0745d20>, <ast.Constant object at 0x7da1b07467a0>, <ast.Constant object at 0x7da1b07466e0>]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b0745870> | keyword[def] identifier[_confirm_dialog] ( identifier[self] , identifier[prompt] ):
literal[string]
identifier[response] = identifier[raw_input] ( identifier[prompt] ). identifier[strip] (). identifier[lower] ()
identifier[valid] ={ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[False] , literal[string] : keyword[False] }
keyword[while] keyword[True] :
keyword[try] :
keyword[return] identifier[valid] [ identifier[response] ]
keyword[except] :
identifier[response] = identifier[raw_input] ( literal[string] ). identifier[strip] (). identifier[lower] () | def _confirm_dialog(self, prompt):
""" Prompts for a 'yes' or 'no' to given prompt. """
response = raw_input(prompt).strip().lower()
valid = {'y': True, 'ye': True, 'yes': True, 'n': False, 'no': False}
while True:
try:
return valid[response] # depends on [control=['try'], data=[]]
except:
response = raw_input("Please respond 'y' or 'n': ").strip().lower() # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def Create(self, body, path, type, id, initial_headers, options=None):
"""Creates a Azure Cosmos resource and returns it.
:param dict body:
:param str path:
:param str type:
:param str id:
:param dict initial_headers:
:param dict options:
The request options for the request.
:return:
The created Azure Cosmos resource.
:rtype:
dict
"""
if options is None:
options = {}
initial_headers = initial_headers or self.default_headers
headers = base.GetHeaders(self,
initial_headers,
'post',
path,
id,
type,
options)
# Create will use WriteEndpoint since it uses POST operation
request = request_object._RequestObject(type, documents._OperationType.Create)
result, self.last_response_headers = self.__Post(path,
request,
body,
headers)
# update session for write request
self._UpdateSessionIfRequired(headers, result, self.last_response_headers)
return result | def function[Create, parameter[self, body, path, type, id, initial_headers, options]]:
constant[Creates a Azure Cosmos resource and returns it.
:param dict body:
:param str path:
:param str type:
:param str id:
:param dict initial_headers:
:param dict options:
The request options for the request.
:return:
The created Azure Cosmos resource.
:rtype:
dict
]
if compare[name[options] is constant[None]] begin[:]
variable[options] assign[=] dictionary[[], []]
variable[initial_headers] assign[=] <ast.BoolOp object at 0x7da1b172c1f0>
variable[headers] assign[=] call[name[base].GetHeaders, parameter[name[self], name[initial_headers], constant[post], name[path], name[id], name[type], name[options]]]
variable[request] assign[=] call[name[request_object]._RequestObject, parameter[name[type], name[documents]._OperationType.Create]]
<ast.Tuple object at 0x7da1b172cb50> assign[=] call[name[self].__Post, parameter[name[path], name[request], name[body], name[headers]]]
call[name[self]._UpdateSessionIfRequired, parameter[name[headers], name[result], name[self].last_response_headers]]
return[name[result]] | keyword[def] identifier[Create] ( identifier[self] , identifier[body] , identifier[path] , identifier[type] , identifier[id] , identifier[initial_headers] , identifier[options] = keyword[None] ):
literal[string]
keyword[if] identifier[options] keyword[is] keyword[None] :
identifier[options] ={}
identifier[initial_headers] = identifier[initial_headers] keyword[or] identifier[self] . identifier[default_headers]
identifier[headers] = identifier[base] . identifier[GetHeaders] ( identifier[self] ,
identifier[initial_headers] ,
literal[string] ,
identifier[path] ,
identifier[id] ,
identifier[type] ,
identifier[options] )
identifier[request] = identifier[request_object] . identifier[_RequestObject] ( identifier[type] , identifier[documents] . identifier[_OperationType] . identifier[Create] )
identifier[result] , identifier[self] . identifier[last_response_headers] = identifier[self] . identifier[__Post] ( identifier[path] ,
identifier[request] ,
identifier[body] ,
identifier[headers] )
identifier[self] . identifier[_UpdateSessionIfRequired] ( identifier[headers] , identifier[result] , identifier[self] . identifier[last_response_headers] )
keyword[return] identifier[result] | def Create(self, body, path, type, id, initial_headers, options=None):
"""Creates a Azure Cosmos resource and returns it.
:param dict body:
:param str path:
:param str type:
:param str id:
:param dict initial_headers:
:param dict options:
The request options for the request.
:return:
The created Azure Cosmos resource.
:rtype:
dict
"""
if options is None:
options = {} # depends on [control=['if'], data=['options']]
initial_headers = initial_headers or self.default_headers
headers = base.GetHeaders(self, initial_headers, 'post', path, id, type, options)
# Create will use WriteEndpoint since it uses POST operation
request = request_object._RequestObject(type, documents._OperationType.Create)
(result, self.last_response_headers) = self.__Post(path, request, body, headers)
# update session for write request
self._UpdateSessionIfRequired(headers, result, self.last_response_headers)
return result |
def render_summary(self, include_title=True, request=None):
"""Render the traceback for the interactive console."""
title = ''
frames = []
classes = ['traceback']
if not self.frames:
classes.append('noframe-traceback')
if include_title:
if self.is_syntax_error:
title = text_('Syntax Error')
else:
title = text_('Traceback <small>(most recent call last)'
'</small>')
for frame in self.frames:
frames.append(
text_('<li%s>%s') % (
frame.info and text_(' title="%s"' % escape(frame.info)) or
text_(''),
frame.render()
))
if self.is_syntax_error:
description_wrapper = text_('<pre class=syntaxerror>%s</pre>')
else:
description_wrapper = text_('<blockquote>%s</blockquote>')
vars = {
'classes': text_(' '.join(classes)),
'title': title and text_('<h3 class="traceback">%s</h3>'
% title) or text_(''),
'frames': text_('\n'.join(frames)),
'description': description_wrapper % escape(self.exception),
}
app = request.app
template = app.ps.jinja2.env.get_template('debugtoolbar/exception_summary.html')
return template.render(app=app, request=request, **vars) | def function[render_summary, parameter[self, include_title, request]]:
constant[Render the traceback for the interactive console.]
variable[title] assign[=] constant[]
variable[frames] assign[=] list[[]]
variable[classes] assign[=] list[[<ast.Constant object at 0x7da18ede4280>]]
if <ast.UnaryOp object at 0x7da18ede7d30> begin[:]
call[name[classes].append, parameter[constant[noframe-traceback]]]
if name[include_title] begin[:]
if name[self].is_syntax_error begin[:]
variable[title] assign[=] call[name[text_], parameter[constant[Syntax Error]]]
for taget[name[frame]] in starred[name[self].frames] begin[:]
call[name[frames].append, parameter[binary_operation[call[name[text_], parameter[constant[<li%s>%s]]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BoolOp object at 0x7da2043466b0>, <ast.Call object at 0x7da204347790>]]]]]
if name[self].is_syntax_error begin[:]
variable[description_wrapper] assign[=] call[name[text_], parameter[constant[<pre class=syntaxerror>%s</pre>]]]
variable[vars] assign[=] dictionary[[<ast.Constant object at 0x7da204344b50>, <ast.Constant object at 0x7da204346020>, <ast.Constant object at 0x7da2043477c0>, <ast.Constant object at 0x7da204344460>], [<ast.Call object at 0x7da204346800>, <ast.BoolOp object at 0x7da204346ec0>, <ast.Call object at 0x7da204345c00>, <ast.BinOp object at 0x7da204347700>]]
variable[app] assign[=] name[request].app
variable[template] assign[=] call[name[app].ps.jinja2.env.get_template, parameter[constant[debugtoolbar/exception_summary.html]]]
return[call[name[template].render, parameter[]]] | keyword[def] identifier[render_summary] ( identifier[self] , identifier[include_title] = keyword[True] , identifier[request] = keyword[None] ):
literal[string]
identifier[title] = literal[string]
identifier[frames] =[]
identifier[classes] =[ literal[string] ]
keyword[if] keyword[not] identifier[self] . identifier[frames] :
identifier[classes] . identifier[append] ( literal[string] )
keyword[if] identifier[include_title] :
keyword[if] identifier[self] . identifier[is_syntax_error] :
identifier[title] = identifier[text_] ( literal[string] )
keyword[else] :
identifier[title] = identifier[text_] ( literal[string]
literal[string] )
keyword[for] identifier[frame] keyword[in] identifier[self] . identifier[frames] :
identifier[frames] . identifier[append] (
identifier[text_] ( literal[string] )%(
identifier[frame] . identifier[info] keyword[and] identifier[text_] ( literal[string] % identifier[escape] ( identifier[frame] . identifier[info] )) keyword[or]
identifier[text_] ( literal[string] ),
identifier[frame] . identifier[render] ()
))
keyword[if] identifier[self] . identifier[is_syntax_error] :
identifier[description_wrapper] = identifier[text_] ( literal[string] )
keyword[else] :
identifier[description_wrapper] = identifier[text_] ( literal[string] )
identifier[vars] ={
literal[string] : identifier[text_] ( literal[string] . identifier[join] ( identifier[classes] )),
literal[string] : identifier[title] keyword[and] identifier[text_] ( literal[string]
% identifier[title] ) keyword[or] identifier[text_] ( literal[string] ),
literal[string] : identifier[text_] ( literal[string] . identifier[join] ( identifier[frames] )),
literal[string] : identifier[description_wrapper] % identifier[escape] ( identifier[self] . identifier[exception] ),
}
identifier[app] = identifier[request] . identifier[app]
identifier[template] = identifier[app] . identifier[ps] . identifier[jinja2] . identifier[env] . identifier[get_template] ( literal[string] )
keyword[return] identifier[template] . identifier[render] ( identifier[app] = identifier[app] , identifier[request] = identifier[request] ,** identifier[vars] ) | def render_summary(self, include_title=True, request=None):
"""Render the traceback for the interactive console."""
title = ''
frames = []
classes = ['traceback']
if not self.frames:
classes.append('noframe-traceback') # depends on [control=['if'], data=[]]
if include_title:
if self.is_syntax_error:
title = text_('Syntax Error') # depends on [control=['if'], data=[]]
else:
title = text_('Traceback <small>(most recent call last)</small>') # depends on [control=['if'], data=[]]
for frame in self.frames:
frames.append(text_('<li%s>%s') % (frame.info and text_(' title="%s"' % escape(frame.info)) or text_(''), frame.render())) # depends on [control=['for'], data=['frame']]
if self.is_syntax_error:
description_wrapper = text_('<pre class=syntaxerror>%s</pre>') # depends on [control=['if'], data=[]]
else:
description_wrapper = text_('<blockquote>%s</blockquote>')
vars = {'classes': text_(' '.join(classes)), 'title': title and text_('<h3 class="traceback">%s</h3>' % title) or text_(''), 'frames': text_('\n'.join(frames)), 'description': description_wrapper % escape(self.exception)}
app = request.app
template = app.ps.jinja2.env.get_template('debugtoolbar/exception_summary.html')
return template.render(app=app, request=request, **vars) |
def namedb_get_num_block_vtxs( cur, block_number ):
"""
How many virtual transactions were processed for this block?
"""
select_query = "SELECT vtxindex FROM history WHERE history_id = ?;"
args = (block_number,)
rows = namedb_query_execute( cur, select_query, args )
count = 0
for r in rows:
count += 1
return count | def function[namedb_get_num_block_vtxs, parameter[cur, block_number]]:
constant[
How many virtual transactions were processed for this block?
]
variable[select_query] assign[=] constant[SELECT vtxindex FROM history WHERE history_id = ?;]
variable[args] assign[=] tuple[[<ast.Name object at 0x7da20c6ab130>]]
variable[rows] assign[=] call[name[namedb_query_execute], parameter[name[cur], name[select_query], name[args]]]
variable[count] assign[=] constant[0]
for taget[name[r]] in starred[name[rows]] begin[:]
<ast.AugAssign object at 0x7da20c6a8190>
return[name[count]] | keyword[def] identifier[namedb_get_num_block_vtxs] ( identifier[cur] , identifier[block_number] ):
literal[string]
identifier[select_query] = literal[string]
identifier[args] =( identifier[block_number] ,)
identifier[rows] = identifier[namedb_query_execute] ( identifier[cur] , identifier[select_query] , identifier[args] )
identifier[count] = literal[int]
keyword[for] identifier[r] keyword[in] identifier[rows] :
identifier[count] += literal[int]
keyword[return] identifier[count] | def namedb_get_num_block_vtxs(cur, block_number):
"""
How many virtual transactions were processed for this block?
"""
select_query = 'SELECT vtxindex FROM history WHERE history_id = ?;'
args = (block_number,)
rows = namedb_query_execute(cur, select_query, args)
count = 0
for r in rows:
count += 1 # depends on [control=['for'], data=[]]
return count |
def tag_lookup(request):
"""JSON endpoint that returns a list of potential tags.
Used for upload template autocomplete.
"""
tag = request.GET['tag']
tagSlug = slugify(tag.strip())
tagCandidates = Tag.objects.values('word').filter(slug__startswith=tagSlug)
tags = json.dumps([candidate['word'] for candidate in tagCandidates])
return HttpResponse(tags, content_type='application/json') | def function[tag_lookup, parameter[request]]:
constant[JSON endpoint that returns a list of potential tags.
Used for upload template autocomplete.
]
variable[tag] assign[=] call[name[request].GET][constant[tag]]
variable[tagSlug] assign[=] call[name[slugify], parameter[call[name[tag].strip, parameter[]]]]
variable[tagCandidates] assign[=] call[call[name[Tag].objects.values, parameter[constant[word]]].filter, parameter[]]
variable[tags] assign[=] call[name[json].dumps, parameter[<ast.ListComp object at 0x7da1b26ae230>]]
return[call[name[HttpResponse], parameter[name[tags]]]] | keyword[def] identifier[tag_lookup] ( identifier[request] ):
literal[string]
identifier[tag] = identifier[request] . identifier[GET] [ literal[string] ]
identifier[tagSlug] = identifier[slugify] ( identifier[tag] . identifier[strip] ())
identifier[tagCandidates] = identifier[Tag] . identifier[objects] . identifier[values] ( literal[string] ). identifier[filter] ( identifier[slug__startswith] = identifier[tagSlug] )
identifier[tags] = identifier[json] . identifier[dumps] ([ identifier[candidate] [ literal[string] ] keyword[for] identifier[candidate] keyword[in] identifier[tagCandidates] ])
keyword[return] identifier[HttpResponse] ( identifier[tags] , identifier[content_type] = literal[string] ) | def tag_lookup(request):
"""JSON endpoint that returns a list of potential tags.
Used for upload template autocomplete.
"""
tag = request.GET['tag']
tagSlug = slugify(tag.strip())
tagCandidates = Tag.objects.values('word').filter(slug__startswith=tagSlug)
tags = json.dumps([candidate['word'] for candidate in tagCandidates])
return HttpResponse(tags, content_type='application/json') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.