code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def mock_session_with_class(session, cls, url):
"""
Context Manager
Mock the responses with a particular session
to any private methods for the URLs
:param session: The requests session object
:type session: :class:`requests.Session`
:param cls: The class instance with private methods for URLs
:type cls: ``object``
:param url: The base URL to mock, e.g. http://mock.com, http://
supports a single URL or a list
:type url: ``str`` or ``list``
"""
_orig_adapters = session.adapters
mock_adapter = adapter.ClassAdapter(cls)
session.adapters = OrderedDict()
if isinstance(url, (list, tuple)):
for u in url:
session.mount(u, mock_adapter)
else:
session.mount(url, mock_adapter)
yield
session.adapters = _orig_adapters
|
def function[mock_session_with_class, parameter[session, cls, url]]:
constant[
Context Manager
Mock the responses with a particular session
to any private methods for the URLs
:param session: The requests session object
:type session: :class:`requests.Session`
:param cls: The class instance with private methods for URLs
:type cls: ``object``
:param url: The base URL to mock, e.g. http://mock.com, http://
supports a single URL or a list
:type url: ``str`` or ``list``
]
variable[_orig_adapters] assign[=] name[session].adapters
variable[mock_adapter] assign[=] call[name[adapter].ClassAdapter, parameter[name[cls]]]
name[session].adapters assign[=] call[name[OrderedDict], parameter[]]
if call[name[isinstance], parameter[name[url], tuple[[<ast.Name object at 0x7da207f00640>, <ast.Name object at 0x7da207f03d30>]]]] begin[:]
for taget[name[u]] in starred[name[url]] begin[:]
call[name[session].mount, parameter[name[u], name[mock_adapter]]]
<ast.Yield object at 0x7da1b26adbd0>
name[session].adapters assign[=] name[_orig_adapters]
|
keyword[def] identifier[mock_session_with_class] ( identifier[session] , identifier[cls] , identifier[url] ):
literal[string]
identifier[_orig_adapters] = identifier[session] . identifier[adapters]
identifier[mock_adapter] = identifier[adapter] . identifier[ClassAdapter] ( identifier[cls] )
identifier[session] . identifier[adapters] = identifier[OrderedDict] ()
keyword[if] identifier[isinstance] ( identifier[url] ,( identifier[list] , identifier[tuple] )):
keyword[for] identifier[u] keyword[in] identifier[url] :
identifier[session] . identifier[mount] ( identifier[u] , identifier[mock_adapter] )
keyword[else] :
identifier[session] . identifier[mount] ( identifier[url] , identifier[mock_adapter] )
keyword[yield]
identifier[session] . identifier[adapters] = identifier[_orig_adapters]
|
def mock_session_with_class(session, cls, url):
"""
Context Manager
Mock the responses with a particular session
to any private methods for the URLs
:param session: The requests session object
:type session: :class:`requests.Session`
:param cls: The class instance with private methods for URLs
:type cls: ``object``
:param url: The base URL to mock, e.g. http://mock.com, http://
supports a single URL or a list
:type url: ``str`` or ``list``
"""
_orig_adapters = session.adapters
mock_adapter = adapter.ClassAdapter(cls)
session.adapters = OrderedDict()
if isinstance(url, (list, tuple)):
for u in url:
session.mount(u, mock_adapter) # depends on [control=['for'], data=['u']] # depends on [control=['if'], data=[]]
else:
session.mount(url, mock_adapter)
yield
session.adapters = _orig_adapters
|
def load(self, commit=None):
"""Load a result from the storage directory."""
git_info = self.record_git_info(commit)
LOGGER.debug("Loading the result for commit '%s'.", git_info.hexsha)
filename = self.get_filename(git_info)
LOGGER.debug("Loading the result '%s'.", filename)
result = super(RepoResultManager, self).load(filename)
self.add_git(result.meta, git_info)
return result
|
def function[load, parameter[self, commit]]:
constant[Load a result from the storage directory.]
variable[git_info] assign[=] call[name[self].record_git_info, parameter[name[commit]]]
call[name[LOGGER].debug, parameter[constant[Loading the result for commit '%s'.], name[git_info].hexsha]]
variable[filename] assign[=] call[name[self].get_filename, parameter[name[git_info]]]
call[name[LOGGER].debug, parameter[constant[Loading the result '%s'.], name[filename]]]
variable[result] assign[=] call[call[name[super], parameter[name[RepoResultManager], name[self]]].load, parameter[name[filename]]]
call[name[self].add_git, parameter[name[result].meta, name[git_info]]]
return[name[result]]
|
keyword[def] identifier[load] ( identifier[self] , identifier[commit] = keyword[None] ):
literal[string]
identifier[git_info] = identifier[self] . identifier[record_git_info] ( identifier[commit] )
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[git_info] . identifier[hexsha] )
identifier[filename] = identifier[self] . identifier[get_filename] ( identifier[git_info] )
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[filename] )
identifier[result] = identifier[super] ( identifier[RepoResultManager] , identifier[self] ). identifier[load] ( identifier[filename] )
identifier[self] . identifier[add_git] ( identifier[result] . identifier[meta] , identifier[git_info] )
keyword[return] identifier[result]
|
def load(self, commit=None):
"""Load a result from the storage directory."""
git_info = self.record_git_info(commit)
LOGGER.debug("Loading the result for commit '%s'.", git_info.hexsha)
filename = self.get_filename(git_info)
LOGGER.debug("Loading the result '%s'.", filename)
result = super(RepoResultManager, self).load(filename)
self.add_git(result.meta, git_info)
return result
|
def tokenize_block(iterable, token_types):
"""
Returns a list of pairs (token_type, read_result).
Footnotes are parsed here, but span-level parsing has not
started yet.
"""
lines = FileWrapper(iterable)
parse_buffer = ParseBuffer()
line = lines.peek()
while line is not None:
for token_type in token_types:
if token_type.start(line):
result = token_type.read(lines)
if result is not None:
parse_buffer.append((token_type, result))
break
else: # unmatched newlines
next(lines)
parse_buffer.loose = True
line = lines.peek()
return parse_buffer
|
def function[tokenize_block, parameter[iterable, token_types]]:
constant[
Returns a list of pairs (token_type, read_result).
Footnotes are parsed here, but span-level parsing has not
started yet.
]
variable[lines] assign[=] call[name[FileWrapper], parameter[name[iterable]]]
variable[parse_buffer] assign[=] call[name[ParseBuffer], parameter[]]
variable[line] assign[=] call[name[lines].peek, parameter[]]
while compare[name[line] is_not constant[None]] begin[:]
for taget[name[token_type]] in starred[name[token_types]] begin[:]
if call[name[token_type].start, parameter[name[line]]] begin[:]
variable[result] assign[=] call[name[token_type].read, parameter[name[lines]]]
if compare[name[result] is_not constant[None]] begin[:]
call[name[parse_buffer].append, parameter[tuple[[<ast.Name object at 0x7da2043453c0>, <ast.Name object at 0x7da204345660>]]]]
break
variable[line] assign[=] call[name[lines].peek, parameter[]]
return[name[parse_buffer]]
|
keyword[def] identifier[tokenize_block] ( identifier[iterable] , identifier[token_types] ):
literal[string]
identifier[lines] = identifier[FileWrapper] ( identifier[iterable] )
identifier[parse_buffer] = identifier[ParseBuffer] ()
identifier[line] = identifier[lines] . identifier[peek] ()
keyword[while] identifier[line] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[token_type] keyword[in] identifier[token_types] :
keyword[if] identifier[token_type] . identifier[start] ( identifier[line] ):
identifier[result] = identifier[token_type] . identifier[read] ( identifier[lines] )
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
identifier[parse_buffer] . identifier[append] (( identifier[token_type] , identifier[result] ))
keyword[break]
keyword[else] :
identifier[next] ( identifier[lines] )
identifier[parse_buffer] . identifier[loose] = keyword[True]
identifier[line] = identifier[lines] . identifier[peek] ()
keyword[return] identifier[parse_buffer]
|
def tokenize_block(iterable, token_types):
"""
Returns a list of pairs (token_type, read_result).
Footnotes are parsed here, but span-level parsing has not
started yet.
"""
lines = FileWrapper(iterable)
parse_buffer = ParseBuffer()
line = lines.peek()
while line is not None:
for token_type in token_types:
if token_type.start(line):
result = token_type.read(lines)
if result is not None:
parse_buffer.append((token_type, result))
break # depends on [control=['if'], data=['result']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['token_type']]
else: # unmatched newlines
next(lines)
parse_buffer.loose = True
line = lines.peek() # depends on [control=['while'], data=['line']]
return parse_buffer
|
def move_dot(self):
"""Returns the DottedRule that results from moving the dot."""
return self.__class__(self.production, self.pos + 1, self.lookahead)
|
def function[move_dot, parameter[self]]:
constant[Returns the DottedRule that results from moving the dot.]
return[call[name[self].__class__, parameter[name[self].production, binary_operation[name[self].pos + constant[1]], name[self].lookahead]]]
|
keyword[def] identifier[move_dot] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[production] , identifier[self] . identifier[pos] + literal[int] , identifier[self] . identifier[lookahead] )
|
def move_dot(self):
"""Returns the DottedRule that results from moving the dot."""
return self.__class__(self.production, self.pos + 1, self.lookahead)
|
def config_lines_w_child(parent_regex, child_regex, source='running'):
r'''
.. versionadded:: 2019.2.0
Return the configuration lines that match the regular expressions from the
``parent_regex`` argument, having child lines matching ``child_regex``.
The configuration is read from the network device interrogated.
.. note::
This function is only available only when the underlying library
`ciscoconfparse <http://www.pennington.net/py/ciscoconfparse/index.html>`_
is installed. See
:py:func:`ciscoconfparse module <salt.modules.ciscoconfparse_mod>` for
more details.
parent_regex
The regular expression to match the parent configuration lines against.
child_regex
The regular expression to match the child configuration lines against.
source: ``running``
The configuration type to retrieve from the network device. Default:
``running``. Available options: ``running``, ``startup``, ``candidate``.
CLI Example:
.. code-block:: bash
salt '*' napalm.config_lines_w_child '^interface' 'ip address'
salt '*' napalm.config_lines_w_child '^interface' 'shutdown' source=candidate
'''
config_txt = __salt__['net.config'](source=source)['out'][source]
return __salt__['ciscoconfparse.find_lines_w_child'](config=config_txt,
parent_regex=parent_regex,
child_regex=child_regex)
|
def function[config_lines_w_child, parameter[parent_regex, child_regex, source]]:
constant[
.. versionadded:: 2019.2.0
Return the configuration lines that match the regular expressions from the
``parent_regex`` argument, having child lines matching ``child_regex``.
The configuration is read from the network device interrogated.
.. note::
This function is only available only when the underlying library
`ciscoconfparse <http://www.pennington.net/py/ciscoconfparse/index.html>`_
is installed. See
:py:func:`ciscoconfparse module <salt.modules.ciscoconfparse_mod>` for
more details.
parent_regex
The regular expression to match the parent configuration lines against.
child_regex
The regular expression to match the child configuration lines against.
source: ``running``
The configuration type to retrieve from the network device. Default:
``running``. Available options: ``running``, ``startup``, ``candidate``.
CLI Example:
.. code-block:: bash
salt '*' napalm.config_lines_w_child '^interface' 'ip address'
salt '*' napalm.config_lines_w_child '^interface' 'shutdown' source=candidate
]
variable[config_txt] assign[=] call[call[call[call[name[__salt__]][constant[net.config]], parameter[]]][constant[out]]][name[source]]
return[call[call[name[__salt__]][constant[ciscoconfparse.find_lines_w_child]], parameter[]]]
|
keyword[def] identifier[config_lines_w_child] ( identifier[parent_regex] , identifier[child_regex] , identifier[source] = literal[string] ):
literal[string]
identifier[config_txt] = identifier[__salt__] [ literal[string] ]( identifier[source] = identifier[source] )[ literal[string] ][ identifier[source] ]
keyword[return] identifier[__salt__] [ literal[string] ]( identifier[config] = identifier[config_txt] ,
identifier[parent_regex] = identifier[parent_regex] ,
identifier[child_regex] = identifier[child_regex] )
|
def config_lines_w_child(parent_regex, child_regex, source='running'):
"""
.. versionadded:: 2019.2.0
Return the configuration lines that match the regular expressions from the
``parent_regex`` argument, having child lines matching ``child_regex``.
The configuration is read from the network device interrogated.
.. note::
This function is only available only when the underlying library
`ciscoconfparse <http://www.pennington.net/py/ciscoconfparse/index.html>`_
is installed. See
:py:func:`ciscoconfparse module <salt.modules.ciscoconfparse_mod>` for
more details.
parent_regex
The regular expression to match the parent configuration lines against.
child_regex
The regular expression to match the child configuration lines against.
source: ``running``
The configuration type to retrieve from the network device. Default:
``running``. Available options: ``running``, ``startup``, ``candidate``.
CLI Example:
.. code-block:: bash
salt '*' napalm.config_lines_w_child '^interface' 'ip address'
salt '*' napalm.config_lines_w_child '^interface' 'shutdown' source=candidate
"""
config_txt = __salt__['net.config'](source=source)['out'][source]
return __salt__['ciscoconfparse.find_lines_w_child'](config=config_txt, parent_regex=parent_regex, child_regex=child_regex)
|
def _eig_complex_symmetric(M: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Diagonalize a complex symmetric matrix. The eigenvalues are
complex, and the eigenvectors form an orthogonal matrix.
Returns:
eigenvalues, eigenvectors
"""
if not np.allclose(M, M.transpose()):
raise np.linalg.LinAlgError('Not a symmetric matrix')
# The matrix of eigenvectors should be orthogonal.
# But the standard 'eig' method will fail to return an orthogonal
# eigenvector matrix when the eigenvalues are degenerate. However,
# both the real and
# imaginary part of M must be symmetric with the same orthogonal
# matrix of eigenvectors. But either the real or imaginary part could
# vanish. So we use a randomized algorithm where we diagonalize a
# random linear combination of real and imaginary parts to find the
# eigenvectors, taking advantage of the 'eigh' subroutine for
# diagonalizing symmetric matrices.
# This can fail if we're very unlucky with our random coefficient, so we
# give the algorithm a few chances to succeed.
# Empirically, never seems to fail on randomly sampled complex
# symmetric 4x4 matrices.
# If failure rate is less than 1 in a million, then 16 rounds
# will have overall failure rate less than 1 in a googol.
# However, cannot (yet) guarantee that there aren't special cases
# which have much higher failure rates.
# GEC 2018
max_attempts = 16
for _ in range(max_attempts):
c = np.random.uniform(0, 1)
matrix = c * M.real + (1-c) * M.imag
_, eigvecs = np.linalg.eigh(matrix)
eigvecs = np.array(eigvecs, dtype=complex)
eigvals = np.diag(eigvecs.transpose() @ M @ eigvecs)
# Finish if we got a correct answer.
reconstructed = eigvecs @ np.diag(eigvals) @ eigvecs.transpose()
if np.allclose(M, reconstructed):
return eigvals, eigvecs
# Should never happen. Hopefully.
raise np.linalg.LinAlgError(
'Cannot diagonalize complex symmetric matrix.')
|
def function[_eig_complex_symmetric, parameter[M]]:
constant[Diagonalize a complex symmetric matrix. The eigenvalues are
complex, and the eigenvectors form an orthogonal matrix.
Returns:
eigenvalues, eigenvectors
]
if <ast.UnaryOp object at 0x7da18f722f20> begin[:]
<ast.Raise object at 0x7da18f723d60>
variable[max_attempts] assign[=] constant[16]
for taget[name[_]] in starred[call[name[range], parameter[name[max_attempts]]]] begin[:]
variable[c] assign[=] call[name[np].random.uniform, parameter[constant[0], constant[1]]]
variable[matrix] assign[=] binary_operation[binary_operation[name[c] * name[M].real] + binary_operation[binary_operation[constant[1] - name[c]] * name[M].imag]]
<ast.Tuple object at 0x7da207f9a6b0> assign[=] call[name[np].linalg.eigh, parameter[name[matrix]]]
variable[eigvecs] assign[=] call[name[np].array, parameter[name[eigvecs]]]
variable[eigvals] assign[=] call[name[np].diag, parameter[binary_operation[binary_operation[call[name[eigvecs].transpose, parameter[]] <ast.MatMult object at 0x7da2590d6860> name[M]] <ast.MatMult object at 0x7da2590d6860> name[eigvecs]]]]
variable[reconstructed] assign[=] binary_operation[binary_operation[name[eigvecs] <ast.MatMult object at 0x7da2590d6860> call[name[np].diag, parameter[name[eigvals]]]] <ast.MatMult object at 0x7da2590d6860> call[name[eigvecs].transpose, parameter[]]]
if call[name[np].allclose, parameter[name[M], name[reconstructed]]] begin[:]
return[tuple[[<ast.Name object at 0x7da207f994b0>, <ast.Name object at 0x7da207f9b6d0>]]]
<ast.Raise object at 0x7da207f99720>
|
keyword[def] identifier[_eig_complex_symmetric] ( identifier[M] : identifier[np] . identifier[ndarray] )-> identifier[Tuple] [ identifier[np] . identifier[ndarray] , identifier[np] . identifier[ndarray] ]:
literal[string]
keyword[if] keyword[not] identifier[np] . identifier[allclose] ( identifier[M] , identifier[M] . identifier[transpose] ()):
keyword[raise] identifier[np] . identifier[linalg] . identifier[LinAlgError] ( literal[string] )
identifier[max_attempts] = literal[int]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[max_attempts] ):
identifier[c] = identifier[np] . identifier[random] . identifier[uniform] ( literal[int] , literal[int] )
identifier[matrix] = identifier[c] * identifier[M] . identifier[real] +( literal[int] - identifier[c] )* identifier[M] . identifier[imag]
identifier[_] , identifier[eigvecs] = identifier[np] . identifier[linalg] . identifier[eigh] ( identifier[matrix] )
identifier[eigvecs] = identifier[np] . identifier[array] ( identifier[eigvecs] , identifier[dtype] = identifier[complex] )
identifier[eigvals] = identifier[np] . identifier[diag] ( identifier[eigvecs] . identifier[transpose] ()@ identifier[M] @ identifier[eigvecs] )
identifier[reconstructed] = identifier[eigvecs] @ identifier[np] . identifier[diag] ( identifier[eigvals] )@ identifier[eigvecs] . identifier[transpose] ()
keyword[if] identifier[np] . identifier[allclose] ( identifier[M] , identifier[reconstructed] ):
keyword[return] identifier[eigvals] , identifier[eigvecs]
keyword[raise] identifier[np] . identifier[linalg] . identifier[LinAlgError] (
literal[string] )
|
def _eig_complex_symmetric(M: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Diagonalize a complex symmetric matrix. The eigenvalues are
complex, and the eigenvectors form an orthogonal matrix.
Returns:
eigenvalues, eigenvectors
"""
if not np.allclose(M, M.transpose()):
raise np.linalg.LinAlgError('Not a symmetric matrix') # depends on [control=['if'], data=[]]
# The matrix of eigenvectors should be orthogonal.
# But the standard 'eig' method will fail to return an orthogonal
# eigenvector matrix when the eigenvalues are degenerate. However,
# both the real and
# imaginary part of M must be symmetric with the same orthogonal
# matrix of eigenvectors. But either the real or imaginary part could
# vanish. So we use a randomized algorithm where we diagonalize a
# random linear combination of real and imaginary parts to find the
# eigenvectors, taking advantage of the 'eigh' subroutine for
# diagonalizing symmetric matrices.
# This can fail if we're very unlucky with our random coefficient, so we
# give the algorithm a few chances to succeed.
# Empirically, never seems to fail on randomly sampled complex
# symmetric 4x4 matrices.
# If failure rate is less than 1 in a million, then 16 rounds
# will have overall failure rate less than 1 in a googol.
# However, cannot (yet) guarantee that there aren't special cases
# which have much higher failure rates.
# GEC 2018
max_attempts = 16
for _ in range(max_attempts):
c = np.random.uniform(0, 1)
matrix = c * M.real + (1 - c) * M.imag
(_, eigvecs) = np.linalg.eigh(matrix)
eigvecs = np.array(eigvecs, dtype=complex)
eigvals = np.diag(eigvecs.transpose() @ M @ eigvecs)
# Finish if we got a correct answer.
reconstructed = eigvecs @ np.diag(eigvals) @ eigvecs.transpose()
if np.allclose(M, reconstructed):
return (eigvals, eigvecs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_']]
# Should never happen. Hopefully.
raise np.linalg.LinAlgError('Cannot diagonalize complex symmetric matrix.')
|
def _set_port_profile_port(self, v, load=False):
"""
Setter method for port_profile_port, mapped from YANG variable /interface/port_channel/port_profile_port (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_port_profile_port is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_port_profile_port() directly.
YANG Description: This specifies if a physical/logical port can be
enabled for port-profiling. The presence of this
leaf indicates that the port is enabled for
port-profiling. Else, it is not enabled.
Enabling a port for port-profiling results in to
application of network policies (as per PP-MAC mapping)
following MAC learning process.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'135'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """port_profile_port must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'135'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True)""",
})
self.__port_profile_port = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_port_profile_port, parameter[self, v, load]]:
constant[
Setter method for port_profile_port, mapped from YANG variable /interface/port_channel/port_profile_port (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_port_profile_port is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_port_profile_port() directly.
YANG Description: This specifies if a physical/logical port can be
enabled for port-profiling. The presence of this
leaf indicates that the port is enabled for
port-profiling. Else, it is not enabled.
Enabling a port for port-profiling results in to
application of network policies (as per PP-MAC mapping)
following MAC learning process.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f58ece0>
name[self].__port_profile_port assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_port_profile_port] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGBool] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__port_profile_port] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_port_profile_port(self, v, load=False):
"""
Setter method for port_profile_port, mapped from YANG variable /interface/port_channel/port_profile_port (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_port_profile_port is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_port_profile_port() directly.
YANG Description: This specifies if a physical/logical port can be
enabled for port-profiling. The presence of this
leaf indicates that the port is enabled for
port-profiling. Else, it is not enabled.
Enabling a port for port-profiling results in to
application of network policies (as per PP-MAC mapping)
following MAC learning process.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGBool, is_leaf=True, yang_name='port-profile-port', rest_name='port-profile-port', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'135'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'port_profile_port must be of a type compatible with empty', 'defined-type': 'empty', 'generated-type': 'YANGDynClass(base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Set the interface to AMPP profile mode\', u\'sort-priority\': u\'135\'}}, namespace=\'urn:brocade.com:mgmt:brocade-port-profile\', defining_module=\'brocade-port-profile\', yang_type=\'empty\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__port_profile_port = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def count_items(self, unique=True):
"""Count items in the cart.
Parameters
----------
unique : bool-convertible, optional
Returns
-------
int
If `unique` is truthy, then the result is the number of
items in the cart. Otherwise, it's the sum of all item
quantities.
"""
if unique:
return len(self.items)
return sum([item.quantity for item in self.items.values()])
|
def function[count_items, parameter[self, unique]]:
constant[Count items in the cart.
Parameters
----------
unique : bool-convertible, optional
Returns
-------
int
If `unique` is truthy, then the result is the number of
items in the cart. Otherwise, it's the sum of all item
quantities.
]
if name[unique] begin[:]
return[call[name[len], parameter[name[self].items]]]
return[call[name[sum], parameter[<ast.ListComp object at 0x7da18f09f310>]]]
|
keyword[def] identifier[count_items] ( identifier[self] , identifier[unique] = keyword[True] ):
literal[string]
keyword[if] identifier[unique] :
keyword[return] identifier[len] ( identifier[self] . identifier[items] )
keyword[return] identifier[sum] ([ identifier[item] . identifier[quantity] keyword[for] identifier[item] keyword[in] identifier[self] . identifier[items] . identifier[values] ()])
|
def count_items(self, unique=True):
"""Count items in the cart.
Parameters
----------
unique : bool-convertible, optional
Returns
-------
int
If `unique` is truthy, then the result is the number of
items in the cart. Otherwise, it's the sum of all item
quantities.
"""
if unique:
return len(self.items) # depends on [control=['if'], data=[]]
return sum([item.quantity for item in self.items.values()])
|
def add_deviation_element(keyword, element):
"""Add an element to the <keyword>'s list of deviations.
Can be used by plugins that add support for specific extension
statements."""
if keyword in _valid_deviations:
_valid_deviations[keyword].append(element)
else:
_valid_deviations[keyword] = [element]
|
def function[add_deviation_element, parameter[keyword, element]]:
constant[Add an element to the <keyword>'s list of deviations.
Can be used by plugins that add support for specific extension
statements.]
if compare[name[keyword] in name[_valid_deviations]] begin[:]
call[call[name[_valid_deviations]][name[keyword]].append, parameter[name[element]]]
|
keyword[def] identifier[add_deviation_element] ( identifier[keyword] , identifier[element] ):
literal[string]
keyword[if] identifier[keyword] keyword[in] identifier[_valid_deviations] :
identifier[_valid_deviations] [ identifier[keyword] ]. identifier[append] ( identifier[element] )
keyword[else] :
identifier[_valid_deviations] [ identifier[keyword] ]=[ identifier[element] ]
|
def add_deviation_element(keyword, element):
"""Add an element to the <keyword>'s list of deviations.
Can be used by plugins that add support for specific extension
statements."""
if keyword in _valid_deviations:
_valid_deviations[keyword].append(element) # depends on [control=['if'], data=['keyword', '_valid_deviations']]
else:
_valid_deviations[keyword] = [element]
|
def _get_host_details(self):
"""Get the system details."""
# Assuming only one system present as part of collection,
# as we are dealing with iLO's here.
status, headers, system = self._rest_get('/rest/v1/Systems/1')
if status < 300:
stype = self._get_type(system)
if stype not in ['ComputerSystem.0', 'ComputerSystem.1']:
msg = "%s is not a valid system type " % stype
raise exception.IloError(msg)
else:
msg = self._get_extended_error(system)
raise exception.IloError(msg)
return system
|
def function[_get_host_details, parameter[self]]:
constant[Get the system details.]
<ast.Tuple object at 0x7da1b19905e0> assign[=] call[name[self]._rest_get, parameter[constant[/rest/v1/Systems/1]]]
if compare[name[status] less[<] constant[300]] begin[:]
variable[stype] assign[=] call[name[self]._get_type, parameter[name[system]]]
if compare[name[stype] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da20c990790>, <ast.Constant object at 0x7da20c992380>]]] begin[:]
variable[msg] assign[=] binary_operation[constant[%s is not a valid system type ] <ast.Mod object at 0x7da2590d6920> name[stype]]
<ast.Raise object at 0x7da1b1990310>
return[name[system]]
|
keyword[def] identifier[_get_host_details] ( identifier[self] ):
literal[string]
identifier[status] , identifier[headers] , identifier[system] = identifier[self] . identifier[_rest_get] ( literal[string] )
keyword[if] identifier[status] < literal[int] :
identifier[stype] = identifier[self] . identifier[_get_type] ( identifier[system] )
keyword[if] identifier[stype] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
identifier[msg] = literal[string] % identifier[stype]
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
keyword[else] :
identifier[msg] = identifier[self] . identifier[_get_extended_error] ( identifier[system] )
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
keyword[return] identifier[system]
|
def _get_host_details(self):
"""Get the system details."""
# Assuming only one system present as part of collection,
# as we are dealing with iLO's here.
(status, headers, system) = self._rest_get('/rest/v1/Systems/1')
if status < 300:
stype = self._get_type(system)
if stype not in ['ComputerSystem.0', 'ComputerSystem.1']:
msg = '%s is not a valid system type ' % stype
raise exception.IloError(msg) # depends on [control=['if'], data=['stype']] # depends on [control=['if'], data=[]]
else:
msg = self._get_extended_error(system)
raise exception.IloError(msg)
return system
|
def _setup():
"""
Sets up the global import environment variables by registering the
sub-folders for projex as import locations. When defining your
custom manager, you will want to overload this method to do any
sort of global initialization that you wish before continuing.
:warning This method is called by the _setup method, and should
not be called directly.
"""
projex_path = os.getenv('PROJEX_PATH')
if not projex_path:
return
base_path = os.path.dirname(__file__)
logger.debug('Loading PROJEX_PATH: %s' % projex_path)
# load the defaults from the install directory
# load the paths from the environment
paths = projex_path.split(os.path.pathsep)
paths += [
os.path.join(base_path, 'userplug'),
os.path.join(base_path, 'stdplug'),
os.path.join(base_path, 'lib'),
]
sys.path = paths + sys.path
|
def function[_setup, parameter[]]:
constant[
Sets up the global import environment variables by registering the
sub-folders for projex as import locations. When defining your
custom manager, you will want to overload this method to do any
sort of global initialization that you wish before continuing.
:warning This method is called by the _setup method, and should
not be called directly.
]
variable[projex_path] assign[=] call[name[os].getenv, parameter[constant[PROJEX_PATH]]]
if <ast.UnaryOp object at 0x7da18f811030> begin[:]
return[None]
variable[base_path] assign[=] call[name[os].path.dirname, parameter[name[__file__]]]
call[name[logger].debug, parameter[binary_operation[constant[Loading PROJEX_PATH: %s] <ast.Mod object at 0x7da2590d6920> name[projex_path]]]]
variable[paths] assign[=] call[name[projex_path].split, parameter[name[os].path.pathsep]]
<ast.AugAssign object at 0x7da18f8129e0>
name[sys].path assign[=] binary_operation[name[paths] + name[sys].path]
|
keyword[def] identifier[_setup] ():
literal[string]
identifier[projex_path] = identifier[os] . identifier[getenv] ( literal[string] )
keyword[if] keyword[not] identifier[projex_path] :
keyword[return]
identifier[base_path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[projex_path] )
identifier[paths] = identifier[projex_path] . identifier[split] ( identifier[os] . identifier[path] . identifier[pathsep] )
identifier[paths] +=[
identifier[os] . identifier[path] . identifier[join] ( identifier[base_path] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[base_path] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[base_path] , literal[string] ),
]
identifier[sys] . identifier[path] = identifier[paths] + identifier[sys] . identifier[path]
|
def _setup():
"""
Sets up the global import environment variables by registering the
sub-folders for projex as import locations. When defining your
custom manager, you will want to overload this method to do any
sort of global initialization that you wish before continuing.
:warning This method is called by the _setup method, and should
not be called directly.
"""
projex_path = os.getenv('PROJEX_PATH')
if not projex_path:
return # depends on [control=['if'], data=[]]
base_path = os.path.dirname(__file__)
logger.debug('Loading PROJEX_PATH: %s' % projex_path)
# load the defaults from the install directory
# load the paths from the environment
paths = projex_path.split(os.path.pathsep)
paths += [os.path.join(base_path, 'userplug'), os.path.join(base_path, 'stdplug'), os.path.join(base_path, 'lib')]
sys.path = paths + sys.path
|
def state_engine_replay_block(existing_state_engine, new_state_engine, block_height, expected_snapshots={}):
"""
Extract the existing chain state transactions from the existing state engine at a particular block height,
parse them using the new state engine, and process them using the new state engine.
Returns the consensus hash of the block on success.
"""
assert new_state_engine.lastblock + 1 == block_height, 'Block height mismatch: {} + 1 != {}'.format(new_state_engine.lastblock, block_height)
db_con = StateEngine.db_open(existing_state_engine.impl, existing_state_engine.working_dir)
chainstate_block = existing_state_engine.db_chainstate_get_block(db_con, block_height)
db_con.close()
log.debug("{} transactions accepted at block {} in chainstate {}; replaying in {}".format(len(chainstate_block), block_height, existing_state_engine.working_dir, new_state_engine.working_dir))
parsed_txs = dict([(txdata['txid'], transactions.tx_parse(txdata['tx_hex'], blockchain=existing_state_engine.impl.get_blockchain())) for txdata in chainstate_block])
txs = [
{
'txid': txdata['txid'],
'txindex': txdata['txindex'],
'nulldata': '{}{}{}'.format(existing_state_engine.impl.get_magic_bytes().encode('hex'), txdata['opcode'].encode('hex'), txdata['data_hex']),
'ins': parsed_txs[txdata['txid']]['ins'],
'outs': parsed_txs[txdata['txid']]['outs'],
'senders': txdata['senders'],
'fee': txdata['fee'],
'hex': txdata['tx_hex'],
'tx_merkle_path': txdata['tx_merkle_path'],
}
for txdata in chainstate_block]
new_state_engine.db_set_indexing(True, new_state_engine.impl, new_state_engine.working_dir)
ops = new_state_engine.parse_block(block_height, txs)
consensus_hash = new_state_engine.process_block(block_height, ops, expected_snapshots=expected_snapshots)
new_state_engine.db_set_indexing(False, new_state_engine.impl, new_state_engine.working_dir)
return consensus_hash
|
def function[state_engine_replay_block, parameter[existing_state_engine, new_state_engine, block_height, expected_snapshots]]:
constant[
Extract the existing chain state transactions from the existing state engine at a particular block height,
parse them using the new state engine, and process them using the new state engine.
Returns the consensus hash of the block on success.
]
assert[compare[binary_operation[name[new_state_engine].lastblock + constant[1]] equal[==] name[block_height]]]
variable[db_con] assign[=] call[name[StateEngine].db_open, parameter[name[existing_state_engine].impl, name[existing_state_engine].working_dir]]
variable[chainstate_block] assign[=] call[name[existing_state_engine].db_chainstate_get_block, parameter[name[db_con], name[block_height]]]
call[name[db_con].close, parameter[]]
call[name[log].debug, parameter[call[constant[{} transactions accepted at block {} in chainstate {}; replaying in {}].format, parameter[call[name[len], parameter[name[chainstate_block]]], name[block_height], name[existing_state_engine].working_dir, name[new_state_engine].working_dir]]]]
variable[parsed_txs] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18f58dfc0>]]
variable[txs] assign[=] <ast.ListComp object at 0x7da1b2842b60>
call[name[new_state_engine].db_set_indexing, parameter[constant[True], name[new_state_engine].impl, name[new_state_engine].working_dir]]
variable[ops] assign[=] call[name[new_state_engine].parse_block, parameter[name[block_height], name[txs]]]
variable[consensus_hash] assign[=] call[name[new_state_engine].process_block, parameter[name[block_height], name[ops]]]
call[name[new_state_engine].db_set_indexing, parameter[constant[False], name[new_state_engine].impl, name[new_state_engine].working_dir]]
return[name[consensus_hash]]
|
keyword[def] identifier[state_engine_replay_block] ( identifier[existing_state_engine] , identifier[new_state_engine] , identifier[block_height] , identifier[expected_snapshots] ={}):
literal[string]
keyword[assert] identifier[new_state_engine] . identifier[lastblock] + literal[int] == identifier[block_height] , literal[string] . identifier[format] ( identifier[new_state_engine] . identifier[lastblock] , identifier[block_height] )
identifier[db_con] = identifier[StateEngine] . identifier[db_open] ( identifier[existing_state_engine] . identifier[impl] , identifier[existing_state_engine] . identifier[working_dir] )
identifier[chainstate_block] = identifier[existing_state_engine] . identifier[db_chainstate_get_block] ( identifier[db_con] , identifier[block_height] )
identifier[db_con] . identifier[close] ()
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[chainstate_block] ), identifier[block_height] , identifier[existing_state_engine] . identifier[working_dir] , identifier[new_state_engine] . identifier[working_dir] ))
identifier[parsed_txs] = identifier[dict] ([( identifier[txdata] [ literal[string] ], identifier[transactions] . identifier[tx_parse] ( identifier[txdata] [ literal[string] ], identifier[blockchain] = identifier[existing_state_engine] . identifier[impl] . identifier[get_blockchain] ())) keyword[for] identifier[txdata] keyword[in] identifier[chainstate_block] ])
identifier[txs] =[
{
literal[string] : identifier[txdata] [ literal[string] ],
literal[string] : identifier[txdata] [ literal[string] ],
literal[string] : literal[string] . identifier[format] ( identifier[existing_state_engine] . identifier[impl] . identifier[get_magic_bytes] (). identifier[encode] ( literal[string] ), identifier[txdata] [ literal[string] ]. identifier[encode] ( literal[string] ), identifier[txdata] [ literal[string] ]),
literal[string] : identifier[parsed_txs] [ identifier[txdata] [ literal[string] ]][ literal[string] ],
literal[string] : identifier[parsed_txs] [ identifier[txdata] [ literal[string] ]][ literal[string] ],
literal[string] : identifier[txdata] [ literal[string] ],
literal[string] : identifier[txdata] [ literal[string] ],
literal[string] : identifier[txdata] [ literal[string] ],
literal[string] : identifier[txdata] [ literal[string] ],
}
keyword[for] identifier[txdata] keyword[in] identifier[chainstate_block] ]
identifier[new_state_engine] . identifier[db_set_indexing] ( keyword[True] , identifier[new_state_engine] . identifier[impl] , identifier[new_state_engine] . identifier[working_dir] )
identifier[ops] = identifier[new_state_engine] . identifier[parse_block] ( identifier[block_height] , identifier[txs] )
identifier[consensus_hash] = identifier[new_state_engine] . identifier[process_block] ( identifier[block_height] , identifier[ops] , identifier[expected_snapshots] = identifier[expected_snapshots] )
identifier[new_state_engine] . identifier[db_set_indexing] ( keyword[False] , identifier[new_state_engine] . identifier[impl] , identifier[new_state_engine] . identifier[working_dir] )
keyword[return] identifier[consensus_hash]
|
def state_engine_replay_block(existing_state_engine, new_state_engine, block_height, expected_snapshots={}):
"""
Extract the existing chain state transactions from the existing state engine at a particular block height,
parse them using the new state engine, and process them using the new state engine.
Returns the consensus hash of the block on success.
"""
assert new_state_engine.lastblock + 1 == block_height, 'Block height mismatch: {} + 1 != {}'.format(new_state_engine.lastblock, block_height)
db_con = StateEngine.db_open(existing_state_engine.impl, existing_state_engine.working_dir)
chainstate_block = existing_state_engine.db_chainstate_get_block(db_con, block_height)
db_con.close()
log.debug('{} transactions accepted at block {} in chainstate {}; replaying in {}'.format(len(chainstate_block), block_height, existing_state_engine.working_dir, new_state_engine.working_dir))
parsed_txs = dict([(txdata['txid'], transactions.tx_parse(txdata['tx_hex'], blockchain=existing_state_engine.impl.get_blockchain())) for txdata in chainstate_block])
txs = [{'txid': txdata['txid'], 'txindex': txdata['txindex'], 'nulldata': '{}{}{}'.format(existing_state_engine.impl.get_magic_bytes().encode('hex'), txdata['opcode'].encode('hex'), txdata['data_hex']), 'ins': parsed_txs[txdata['txid']]['ins'], 'outs': parsed_txs[txdata['txid']]['outs'], 'senders': txdata['senders'], 'fee': txdata['fee'], 'hex': txdata['tx_hex'], 'tx_merkle_path': txdata['tx_merkle_path']} for txdata in chainstate_block]
new_state_engine.db_set_indexing(True, new_state_engine.impl, new_state_engine.working_dir)
ops = new_state_engine.parse_block(block_height, txs)
consensus_hash = new_state_engine.process_block(block_height, ops, expected_snapshots=expected_snapshots)
new_state_engine.db_set_indexing(False, new_state_engine.impl, new_state_engine.working_dir)
return consensus_hash
|
def coerce(self, value):
"""Convert text values into integer values.
Args:
value (str or int): The value to coerce.
Raises:
TypeError: If the value is not an int or string.
ValueError: If the value is not int or an acceptable value.
Returns:
int: The integer value represented.
"""
if isinstance(value, int) or isinstance(value, compat.long):
return value
return int(value)
|
def function[coerce, parameter[self, value]]:
constant[Convert text values into integer values.
Args:
value (str or int): The value to coerce.
Raises:
TypeError: If the value is not an int or string.
ValueError: If the value is not int or an acceptable value.
Returns:
int: The integer value represented.
]
if <ast.BoolOp object at 0x7da1b28f6440> begin[:]
return[name[value]]
return[call[name[int], parameter[name[value]]]]
|
keyword[def] identifier[coerce] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[int] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[compat] . identifier[long] ):
keyword[return] identifier[value]
keyword[return] identifier[int] ( identifier[value] )
|
def coerce(self, value):
"""Convert text values into integer values.
Args:
value (str or int): The value to coerce.
Raises:
TypeError: If the value is not an int or string.
ValueError: If the value is not int or an acceptable value.
Returns:
int: The integer value represented.
"""
if isinstance(value, int) or isinstance(value, compat.long):
return value # depends on [control=['if'], data=[]]
return int(value)
|
def Laliberte_heat_capacity(T, ws, CASRNs):
r'''Calculate the heat capacity of an aqueous electrolyte mixture using the
form proposed by [1]_.
Parameters are loaded by the function as needed.
.. math::
TODO
Parameters
----------
T : float
Temperature of fluid [K]
ws : array
Weight fractions of fluid components other than water
CASRNs : array
CAS numbers of the fluid components other than water
Returns
-------
Cp : float
Solution heat capacity, [J/kg/K]
Notes
-----
Temperature range check is not implemented.
Units are Kelvin and J/kg/K.
Examples
--------
>>> Laliberte_heat_capacity(273.15+1.5, [0.00398447], ['7647-14-5'])
4186.569908672113
References
----------
.. [1] Laliberte, Marc. "A Model for Calculating the Heat Capacity of
Aqueous Solutions, with Updated Density and Viscosity Data." Journal of
Chemical & Engineering Data 54, no. 6 (June 11, 2009): 1725-60.
doi:10.1021/je8008123
'''
Cp_w = Laliberte_heat_capacity_w(T)
w_w = 1 - sum(ws)
Cp = w_w*Cp_w
for i in range(len(CASRNs)):
d = _Laliberte_Heat_Capacity_ParametersDict[CASRNs[i]]
Cp_i = Laliberte_heat_capacity_i(T, w_w, d["A1"], d["A2"], d["A3"], d["A4"], d["A5"], d["A6"])
Cp = Cp + ws[i]*Cp_i
return Cp
|
def function[Laliberte_heat_capacity, parameter[T, ws, CASRNs]]:
constant[Calculate the heat capacity of an aqueous electrolyte mixture using the
form proposed by [1]_.
Parameters are loaded by the function as needed.
.. math::
TODO
Parameters
----------
T : float
Temperature of fluid [K]
ws : array
Weight fractions of fluid components other than water
CASRNs : array
CAS numbers of the fluid components other than water
Returns
-------
Cp : float
Solution heat capacity, [J/kg/K]
Notes
-----
Temperature range check is not implemented.
Units are Kelvin and J/kg/K.
Examples
--------
>>> Laliberte_heat_capacity(273.15+1.5, [0.00398447], ['7647-14-5'])
4186.569908672113
References
----------
.. [1] Laliberte, Marc. "A Model for Calculating the Heat Capacity of
Aqueous Solutions, with Updated Density and Viscosity Data." Journal of
Chemical & Engineering Data 54, no. 6 (June 11, 2009): 1725-60.
doi:10.1021/je8008123
]
variable[Cp_w] assign[=] call[name[Laliberte_heat_capacity_w], parameter[name[T]]]
variable[w_w] assign[=] binary_operation[constant[1] - call[name[sum], parameter[name[ws]]]]
variable[Cp] assign[=] binary_operation[name[w_w] * name[Cp_w]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[CASRNs]]]]]] begin[:]
variable[d] assign[=] call[name[_Laliberte_Heat_Capacity_ParametersDict]][call[name[CASRNs]][name[i]]]
variable[Cp_i] assign[=] call[name[Laliberte_heat_capacity_i], parameter[name[T], name[w_w], call[name[d]][constant[A1]], call[name[d]][constant[A2]], call[name[d]][constant[A3]], call[name[d]][constant[A4]], call[name[d]][constant[A5]], call[name[d]][constant[A6]]]]
variable[Cp] assign[=] binary_operation[name[Cp] + binary_operation[call[name[ws]][name[i]] * name[Cp_i]]]
return[name[Cp]]
|
keyword[def] identifier[Laliberte_heat_capacity] ( identifier[T] , identifier[ws] , identifier[CASRNs] ):
literal[string]
identifier[Cp_w] = identifier[Laliberte_heat_capacity_w] ( identifier[T] )
identifier[w_w] = literal[int] - identifier[sum] ( identifier[ws] )
identifier[Cp] = identifier[w_w] * identifier[Cp_w]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[CASRNs] )):
identifier[d] = identifier[_Laliberte_Heat_Capacity_ParametersDict] [ identifier[CASRNs] [ identifier[i] ]]
identifier[Cp_i] = identifier[Laliberte_heat_capacity_i] ( identifier[T] , identifier[w_w] , identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])
identifier[Cp] = identifier[Cp] + identifier[ws] [ identifier[i] ]* identifier[Cp_i]
keyword[return] identifier[Cp]
|
def Laliberte_heat_capacity(T, ws, CASRNs):
"""Calculate the heat capacity of an aqueous electrolyte mixture using the
form proposed by [1]_.
Parameters are loaded by the function as needed.
.. math::
TODO
Parameters
----------
T : float
Temperature of fluid [K]
ws : array
Weight fractions of fluid components other than water
CASRNs : array
CAS numbers of the fluid components other than water
Returns
-------
Cp : float
Solution heat capacity, [J/kg/K]
Notes
-----
Temperature range check is not implemented.
Units are Kelvin and J/kg/K.
Examples
--------
>>> Laliberte_heat_capacity(273.15+1.5, [0.00398447], ['7647-14-5'])
4186.569908672113
References
----------
.. [1] Laliberte, Marc. "A Model for Calculating the Heat Capacity of
Aqueous Solutions, with Updated Density and Viscosity Data." Journal of
Chemical & Engineering Data 54, no. 6 (June 11, 2009): 1725-60.
doi:10.1021/je8008123
"""
Cp_w = Laliberte_heat_capacity_w(T)
w_w = 1 - sum(ws)
Cp = w_w * Cp_w
for i in range(len(CASRNs)):
d = _Laliberte_Heat_Capacity_ParametersDict[CASRNs[i]]
Cp_i = Laliberte_heat_capacity_i(T, w_w, d['A1'], d['A2'], d['A3'], d['A4'], d['A5'], d['A6'])
Cp = Cp + ws[i] * Cp_i # depends on [control=['for'], data=['i']]
return Cp
|
def get(self, pid, stream=False, vendorSpecific=None):
"""Initiate a MNRead.get(). Return a Requests Response object from which the
object bytes can be retrieved.
When ``stream`` is False, Requests buffers the entire object in memory before
returning the Response. This can exhaust available memory on the local machine
when retrieving large science objects. The solution is to set ``stream`` to
True, which causes the returned Response object to contain a a stream. However,
see note below.
When ``stream`` = True, the Response object will contain a stream which can be
processed without buffering the entire science object in memory. However,
failure to read all data from the stream can cause connections to be blocked.
Due to this, the ``stream`` parameter is False by default.
Also see:
- http://docs.python-requests.org/en/master/user/advanced/body-content-workflow
- get_and_save() in this module.
"""
response = self.getResponse(pid, stream, vendorSpecific)
return self._read_stream_response(response)
|
def function[get, parameter[self, pid, stream, vendorSpecific]]:
constant[Initiate a MNRead.get(). Return a Requests Response object from which the
object bytes can be retrieved.
When ``stream`` is False, Requests buffers the entire object in memory before
returning the Response. This can exhaust available memory on the local machine
when retrieving large science objects. The solution is to set ``stream`` to
True, which causes the returned Response object to contain a a stream. However,
see note below.
When ``stream`` = True, the Response object will contain a stream which can be
processed without buffering the entire science object in memory. However,
failure to read all data from the stream can cause connections to be blocked.
Due to this, the ``stream`` parameter is False by default.
Also see:
- http://docs.python-requests.org/en/master/user/advanced/body-content-workflow
- get_and_save() in this module.
]
variable[response] assign[=] call[name[self].getResponse, parameter[name[pid], name[stream], name[vendorSpecific]]]
return[call[name[self]._read_stream_response, parameter[name[response]]]]
|
keyword[def] identifier[get] ( identifier[self] , identifier[pid] , identifier[stream] = keyword[False] , identifier[vendorSpecific] = keyword[None] ):
literal[string]
identifier[response] = identifier[self] . identifier[getResponse] ( identifier[pid] , identifier[stream] , identifier[vendorSpecific] )
keyword[return] identifier[self] . identifier[_read_stream_response] ( identifier[response] )
|
def get(self, pid, stream=False, vendorSpecific=None):
"""Initiate a MNRead.get(). Return a Requests Response object from which the
object bytes can be retrieved.
When ``stream`` is False, Requests buffers the entire object in memory before
returning the Response. This can exhaust available memory on the local machine
when retrieving large science objects. The solution is to set ``stream`` to
True, which causes the returned Response object to contain a a stream. However,
see note below.
When ``stream`` = True, the Response object will contain a stream which can be
processed without buffering the entire science object in memory. However,
failure to read all data from the stream can cause connections to be blocked.
Due to this, the ``stream`` parameter is False by default.
Also see:
- http://docs.python-requests.org/en/master/user/advanced/body-content-workflow
- get_and_save() in this module.
"""
response = self.getResponse(pid, stream, vendorSpecific)
return self._read_stream_response(response)
|
def align_bam(in_bam, ref_file, names, align_dir, data):
"""Perform direct alignment of an input BAM file with BWA using pipes.
This avoids disk IO by piping between processes:
- samtools sort of input BAM to queryname
- bedtools conversion to interleaved FASTQ
- bwa-mem alignment
- samtools conversion to BAM
- samtools sort to coordinate
"""
config = data["config"]
out_file = os.path.join(align_dir, "{0}-sort.bam".format(names["lane"]))
samtools = config_utils.get_program("samtools", config)
bedtools = config_utils.get_program("bedtools", config)
resources = config_utils.get_resources("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
# adjust memory for samtools since used for input and output
max_mem = config_utils.adjust_memory(resources.get("memory", "1G"),
3, "decrease").upper()
if not utils.file_exists(out_file):
with tx_tmpdir(data) as work_dir:
with postalign.tobam_cl(data, out_file, bam.is_paired(in_bam)) as (tobam_cl, tx_out_file):
bwa_cmd = _get_bwa_mem_cmd(data, out_file, ref_file, "-")
tx_out_prefix = os.path.splitext(tx_out_file)[0]
prefix1 = "%s-in1" % tx_out_prefix
cmd = ("unset JAVA_HOME && "
"{samtools} sort -n -o -l 1 -@ {num_cores} -m {max_mem} {in_bam} {prefix1} "
"| {bedtools} bamtofastq -i /dev/stdin -fq /dev/stdout -fq2 /dev/stdout "
"| {bwa_cmd} | ")
cmd = cmd.format(**locals()) + tobam_cl
do.run(cmd, "bwa mem alignment from BAM: %s" % names["sample"], None,
[do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, in_bam)])
return out_file
|
def function[align_bam, parameter[in_bam, ref_file, names, align_dir, data]]:
constant[Perform direct alignment of an input BAM file with BWA using pipes.
This avoids disk IO by piping between processes:
- samtools sort of input BAM to queryname
- bedtools conversion to interleaved FASTQ
- bwa-mem alignment
- samtools conversion to BAM
- samtools sort to coordinate
]
variable[config] assign[=] call[name[data]][constant[config]]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[align_dir], call[constant[{0}-sort.bam].format, parameter[call[name[names]][constant[lane]]]]]]
variable[samtools] assign[=] call[name[config_utils].get_program, parameter[constant[samtools], name[config]]]
variable[bedtools] assign[=] call[name[config_utils].get_program, parameter[constant[bedtools], name[config]]]
variable[resources] assign[=] call[name[config_utils].get_resources, parameter[constant[samtools], name[config]]]
variable[num_cores] assign[=] call[call[name[config]][constant[algorithm]].get, parameter[constant[num_cores], constant[1]]]
variable[max_mem] assign[=] call[call[name[config_utils].adjust_memory, parameter[call[name[resources].get, parameter[constant[memory], constant[1G]]], constant[3], constant[decrease]]].upper, parameter[]]
if <ast.UnaryOp object at 0x7da20c6e64a0> begin[:]
with call[name[tx_tmpdir], parameter[name[data]]] begin[:]
with call[name[postalign].tobam_cl, parameter[name[data], name[out_file], call[name[bam].is_paired, parameter[name[in_bam]]]]] begin[:]
variable[bwa_cmd] assign[=] call[name[_get_bwa_mem_cmd], parameter[name[data], name[out_file], name[ref_file], constant[-]]]
variable[tx_out_prefix] assign[=] call[call[name[os].path.splitext, parameter[name[tx_out_file]]]][constant[0]]
variable[prefix1] assign[=] binary_operation[constant[%s-in1] <ast.Mod object at 0x7da2590d6920> name[tx_out_prefix]]
variable[cmd] assign[=] constant[unset JAVA_HOME && {samtools} sort -n -o -l 1 -@ {num_cores} -m {max_mem} {in_bam} {prefix1} | {bedtools} bamtofastq -i /dev/stdin -fq /dev/stdout -fq2 /dev/stdout | {bwa_cmd} | ]
variable[cmd] assign[=] binary_operation[call[name[cmd].format, parameter[]] + name[tobam_cl]]
call[name[do].run, parameter[name[cmd], binary_operation[constant[bwa mem alignment from BAM: %s] <ast.Mod object at 0x7da2590d6920> call[name[names]][constant[sample]]], constant[None], list[[<ast.Call object at 0x7da20c6e57b0>, <ast.Call object at 0x7da20c6e6770>]]]]
return[name[out_file]]
|
keyword[def] identifier[align_bam] ( identifier[in_bam] , identifier[ref_file] , identifier[names] , identifier[align_dir] , identifier[data] ):
literal[string]
identifier[config] = identifier[data] [ literal[string] ]
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[align_dir] , literal[string] . identifier[format] ( identifier[names] [ literal[string] ]))
identifier[samtools] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[config] )
identifier[bedtools] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[config] )
identifier[resources] = identifier[config_utils] . identifier[get_resources] ( literal[string] , identifier[config] )
identifier[num_cores] = identifier[config] [ literal[string] ]. identifier[get] ( literal[string] , literal[int] )
identifier[max_mem] = identifier[config_utils] . identifier[adjust_memory] ( identifier[resources] . identifier[get] ( literal[string] , literal[string] ),
literal[int] , literal[string] ). identifier[upper] ()
keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] ):
keyword[with] identifier[tx_tmpdir] ( identifier[data] ) keyword[as] identifier[work_dir] :
keyword[with] identifier[postalign] . identifier[tobam_cl] ( identifier[data] , identifier[out_file] , identifier[bam] . identifier[is_paired] ( identifier[in_bam] )) keyword[as] ( identifier[tobam_cl] , identifier[tx_out_file] ):
identifier[bwa_cmd] = identifier[_get_bwa_mem_cmd] ( identifier[data] , identifier[out_file] , identifier[ref_file] , literal[string] )
identifier[tx_out_prefix] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[tx_out_file] )[ literal[int] ]
identifier[prefix1] = literal[string] % identifier[tx_out_prefix]
identifier[cmd] =( literal[string]
literal[string]
literal[string]
literal[string] )
identifier[cmd] = identifier[cmd] . identifier[format] (** identifier[locals] ())+ identifier[tobam_cl]
identifier[do] . identifier[run] ( identifier[cmd] , literal[string] % identifier[names] [ literal[string] ], keyword[None] ,
[ identifier[do] . identifier[file_nonempty] ( identifier[tx_out_file] ), identifier[do] . identifier[file_reasonable_size] ( identifier[tx_out_file] , identifier[in_bam] )])
keyword[return] identifier[out_file]
|
def align_bam(in_bam, ref_file, names, align_dir, data):
"""Perform direct alignment of an input BAM file with BWA using pipes.
This avoids disk IO by piping between processes:
- samtools sort of input BAM to queryname
- bedtools conversion to interleaved FASTQ
- bwa-mem alignment
- samtools conversion to BAM
- samtools sort to coordinate
"""
config = data['config']
out_file = os.path.join(align_dir, '{0}-sort.bam'.format(names['lane']))
samtools = config_utils.get_program('samtools', config)
bedtools = config_utils.get_program('bedtools', config)
resources = config_utils.get_resources('samtools', config)
num_cores = config['algorithm'].get('num_cores', 1)
# adjust memory for samtools since used for input and output
max_mem = config_utils.adjust_memory(resources.get('memory', '1G'), 3, 'decrease').upper()
if not utils.file_exists(out_file):
with tx_tmpdir(data) as work_dir:
with postalign.tobam_cl(data, out_file, bam.is_paired(in_bam)) as (tobam_cl, tx_out_file):
bwa_cmd = _get_bwa_mem_cmd(data, out_file, ref_file, '-')
tx_out_prefix = os.path.splitext(tx_out_file)[0]
prefix1 = '%s-in1' % tx_out_prefix
cmd = 'unset JAVA_HOME && {samtools} sort -n -o -l 1 -@ {num_cores} -m {max_mem} {in_bam} {prefix1} | {bedtools} bamtofastq -i /dev/stdin -fq /dev/stdout -fq2 /dev/stdout | {bwa_cmd} | '
cmd = cmd.format(**locals()) + tobam_cl
do.run(cmd, 'bwa mem alignment from BAM: %s' % names['sample'], None, [do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, in_bam)]) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
return out_file
|
def mutate(self):
"""Mutates code."""
# Choose a random position
if len(self.code) == 0:
return
index = random.randint(0, len(self.code)-1)
mutation_type = random.random()
if mutation_type < 0.5:
# Change
self.code[index] = self.new().randomize().code[0]
elif mutation_type < 0.75:
# Deletion
del self.code[index]
else:
# Insertion
self.code.insert(index, self.new().randomize().code[0])
|
def function[mutate, parameter[self]]:
constant[Mutates code.]
if compare[call[name[len], parameter[name[self].code]] equal[==] constant[0]] begin[:]
return[None]
variable[index] assign[=] call[name[random].randint, parameter[constant[0], binary_operation[call[name[len], parameter[name[self].code]] - constant[1]]]]
variable[mutation_type] assign[=] call[name[random].random, parameter[]]
if compare[name[mutation_type] less[<] constant[0.5]] begin[:]
call[name[self].code][name[index]] assign[=] call[call[call[name[self].new, parameter[]].randomize, parameter[]].code][constant[0]]
|
keyword[def] identifier[mutate] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[code] )== literal[int] :
keyword[return]
identifier[index] = identifier[random] . identifier[randint] ( literal[int] , identifier[len] ( identifier[self] . identifier[code] )- literal[int] )
identifier[mutation_type] = identifier[random] . identifier[random] ()
keyword[if] identifier[mutation_type] < literal[int] :
identifier[self] . identifier[code] [ identifier[index] ]= identifier[self] . identifier[new] (). identifier[randomize] (). identifier[code] [ literal[int] ]
keyword[elif] identifier[mutation_type] < literal[int] :
keyword[del] identifier[self] . identifier[code] [ identifier[index] ]
keyword[else] :
identifier[self] . identifier[code] . identifier[insert] ( identifier[index] , identifier[self] . identifier[new] (). identifier[randomize] (). identifier[code] [ literal[int] ])
|
def mutate(self):
"""Mutates code."""
# Choose a random position
if len(self.code) == 0:
return # depends on [control=['if'], data=[]]
index = random.randint(0, len(self.code) - 1)
mutation_type = random.random()
if mutation_type < 0.5:
# Change
self.code[index] = self.new().randomize().code[0] # depends on [control=['if'], data=[]]
elif mutation_type < 0.75:
# Deletion
del self.code[index] # depends on [control=['if'], data=[]]
else:
# Insertion
self.code.insert(index, self.new().randomize().code[0])
|
def and_inverter_synth(net):
"""
Transforms a decomposed block into one consisting of ands and inverters in place
:param block: The block to synthesize
"""
if net.op in '~&rwcsm@':
return True
def arg(num):
return net.args[num]
dest = net.dests[0]
if net.op == '|':
dest <<= ~(~arg(0) & ~arg(1))
elif net.op == '^':
all_1 = arg(0) & arg(1)
all_0 = ~arg(0) & ~arg(1)
dest <<= all_0 & ~all_1
elif net.op == 'n':
dest <<= ~(arg(0) & arg(1))
else:
raise PyrtlError("Op, '{}' is not supported in and_inv_synth".format(net.op))
|
def function[and_inverter_synth, parameter[net]]:
constant[
Transforms a decomposed block into one consisting of ands and inverters in place
:param block: The block to synthesize
]
if compare[name[net].op in constant[~&rwcsm@]] begin[:]
return[constant[True]]
def function[arg, parameter[num]]:
return[call[name[net].args][name[num]]]
variable[dest] assign[=] call[name[net].dests][constant[0]]
if compare[name[net].op equal[==] constant[|]] begin[:]
<ast.AugAssign object at 0x7da20c796140>
|
keyword[def] identifier[and_inverter_synth] ( identifier[net] ):
literal[string]
keyword[if] identifier[net] . identifier[op] keyword[in] literal[string] :
keyword[return] keyword[True]
keyword[def] identifier[arg] ( identifier[num] ):
keyword[return] identifier[net] . identifier[args] [ identifier[num] ]
identifier[dest] = identifier[net] . identifier[dests] [ literal[int] ]
keyword[if] identifier[net] . identifier[op] == literal[string] :
identifier[dest] <<=~(~ identifier[arg] ( literal[int] )&~ identifier[arg] ( literal[int] ))
keyword[elif] identifier[net] . identifier[op] == literal[string] :
identifier[all_1] = identifier[arg] ( literal[int] )& identifier[arg] ( literal[int] )
identifier[all_0] =~ identifier[arg] ( literal[int] )&~ identifier[arg] ( literal[int] )
identifier[dest] <<= identifier[all_0] &~ identifier[all_1]
keyword[elif] identifier[net] . identifier[op] == literal[string] :
identifier[dest] <<=~( identifier[arg] ( literal[int] )& identifier[arg] ( literal[int] ))
keyword[else] :
keyword[raise] identifier[PyrtlError] ( literal[string] . identifier[format] ( identifier[net] . identifier[op] ))
|
def and_inverter_synth(net):
"""
Transforms a decomposed block into one consisting of ands and inverters in place
:param block: The block to synthesize
"""
if net.op in '~&rwcsm@':
return True # depends on [control=['if'], data=[]]
def arg(num):
return net.args[num]
dest = net.dests[0]
if net.op == '|':
dest <<= ~(~arg(0) & ~arg(1)) # depends on [control=['if'], data=[]]
elif net.op == '^':
all_1 = arg(0) & arg(1)
all_0 = ~arg(0) & ~arg(1)
dest <<= all_0 & ~all_1 # depends on [control=['if'], data=[]]
elif net.op == 'n':
dest <<= ~(arg(0) & arg(1)) # depends on [control=['if'], data=[]]
else:
raise PyrtlError("Op, '{}' is not supported in and_inv_synth".format(net.op))
|
def purge_bad_timestamp_files(file_list):
"Given a list of image files, find bad frames, remove them and modify file_list"
MAX_INITIAL_BAD_FRAMES = 15
bad_ts = Kinect.detect_bad_timestamps(Kinect.timestamps_from_file_list(file_list))
# Trivial case
if not bad_ts:
return file_list
# No bad frames after the initial allowed
last_bad = max(bad_ts)
if last_bad >= MAX_INITIAL_BAD_FRAMES:
raise Exception('Only 15 initial bad frames are allowed, but last bad frame is %d' % last_bad)
# Remove all frames up to the last bad frame
for i in range(last_bad + 1):
os.remove(file_list[i])
# Purge from the list
file_list = file_list[last_bad+1:]
return file_list
|
def function[purge_bad_timestamp_files, parameter[file_list]]:
constant[Given a list of image files, find bad frames, remove them and modify file_list]
variable[MAX_INITIAL_BAD_FRAMES] assign[=] constant[15]
variable[bad_ts] assign[=] call[name[Kinect].detect_bad_timestamps, parameter[call[name[Kinect].timestamps_from_file_list, parameter[name[file_list]]]]]
if <ast.UnaryOp object at 0x7da18f58e500> begin[:]
return[name[file_list]]
variable[last_bad] assign[=] call[name[max], parameter[name[bad_ts]]]
if compare[name[last_bad] greater_or_equal[>=] name[MAX_INITIAL_BAD_FRAMES]] begin[:]
<ast.Raise object at 0x7da1b0625930>
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[last_bad] + constant[1]]]]] begin[:]
call[name[os].remove, parameter[call[name[file_list]][name[i]]]]
variable[file_list] assign[=] call[name[file_list]][<ast.Slice object at 0x7da1b0626950>]
return[name[file_list]]
|
keyword[def] identifier[purge_bad_timestamp_files] ( identifier[file_list] ):
literal[string]
identifier[MAX_INITIAL_BAD_FRAMES] = literal[int]
identifier[bad_ts] = identifier[Kinect] . identifier[detect_bad_timestamps] ( identifier[Kinect] . identifier[timestamps_from_file_list] ( identifier[file_list] ))
keyword[if] keyword[not] identifier[bad_ts] :
keyword[return] identifier[file_list]
identifier[last_bad] = identifier[max] ( identifier[bad_ts] )
keyword[if] identifier[last_bad] >= identifier[MAX_INITIAL_BAD_FRAMES] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[last_bad] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[last_bad] + literal[int] ):
identifier[os] . identifier[remove] ( identifier[file_list] [ identifier[i] ])
identifier[file_list] = identifier[file_list] [ identifier[last_bad] + literal[int] :]
keyword[return] identifier[file_list]
|
def purge_bad_timestamp_files(file_list):
"""Given a list of image files, find bad frames, remove them and modify file_list"""
MAX_INITIAL_BAD_FRAMES = 15
bad_ts = Kinect.detect_bad_timestamps(Kinect.timestamps_from_file_list(file_list))
# Trivial case
if not bad_ts:
return file_list # depends on [control=['if'], data=[]]
# No bad frames after the initial allowed
last_bad = max(bad_ts)
if last_bad >= MAX_INITIAL_BAD_FRAMES:
raise Exception('Only 15 initial bad frames are allowed, but last bad frame is %d' % last_bad) # depends on [control=['if'], data=['last_bad']]
# Remove all frames up to the last bad frame
for i in range(last_bad + 1):
os.remove(file_list[i]) # depends on [control=['for'], data=['i']]
# Purge from the list
file_list = file_list[last_bad + 1:]
return file_list
|
def addCategory(self,name):
"""
Adds a new texture category with the given name.
If the category already exists, it will be overridden.
"""
self.categories[name]={}
self.categoriesTexCache[name]={}
self.categoriesTexBin[name]=pyglet.image.atlas.TextureBin(self.texsize,self.texsize)
self.peng.sendEvent("peng3d:rsrc.category.add",{"peng":self.peng,"category":name})
|
def function[addCategory, parameter[self, name]]:
constant[
Adds a new texture category with the given name.
If the category already exists, it will be overridden.
]
call[name[self].categories][name[name]] assign[=] dictionary[[], []]
call[name[self].categoriesTexCache][name[name]] assign[=] dictionary[[], []]
call[name[self].categoriesTexBin][name[name]] assign[=] call[name[pyglet].image.atlas.TextureBin, parameter[name[self].texsize, name[self].texsize]]
call[name[self].peng.sendEvent, parameter[constant[peng3d:rsrc.category.add], dictionary[[<ast.Constant object at 0x7da1b016eaa0>, <ast.Constant object at 0x7da1b016f820>], [<ast.Attribute object at 0x7da1b016edd0>, <ast.Name object at 0x7da1b016e6b0>]]]]
|
keyword[def] identifier[addCategory] ( identifier[self] , identifier[name] ):
literal[string]
identifier[self] . identifier[categories] [ identifier[name] ]={}
identifier[self] . identifier[categoriesTexCache] [ identifier[name] ]={}
identifier[self] . identifier[categoriesTexBin] [ identifier[name] ]= identifier[pyglet] . identifier[image] . identifier[atlas] . identifier[TextureBin] ( identifier[self] . identifier[texsize] , identifier[self] . identifier[texsize] )
identifier[self] . identifier[peng] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] . identifier[peng] , literal[string] : identifier[name] })
|
def addCategory(self, name):
"""
Adds a new texture category with the given name.
If the category already exists, it will be overridden.
"""
self.categories[name] = {}
self.categoriesTexCache[name] = {}
self.categoriesTexBin[name] = pyglet.image.atlas.TextureBin(self.texsize, self.texsize)
self.peng.sendEvent('peng3d:rsrc.category.add', {'peng': self.peng, 'category': name})
|
def fatal(self, correlation_id, error, message, *args, **kwargs):
"""
Logs fatal (unrecoverable) message that caused the process to crash.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param error: an error object associated with this message.
:param message: a human-readable message to log.
:param args: arguments to parameterize the message.
:param kwargs: arguments to parameterize the message.
"""
self._format_and_write(LogLevel.Fatal, correlation_id, error, message, args, kwargs)
|
def function[fatal, parameter[self, correlation_id, error, message]]:
constant[
Logs fatal (unrecoverable) message that caused the process to crash.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param error: an error object associated with this message.
:param message: a human-readable message to log.
:param args: arguments to parameterize the message.
:param kwargs: arguments to parameterize the message.
]
call[name[self]._format_and_write, parameter[name[LogLevel].Fatal, name[correlation_id], name[error], name[message], name[args], name[kwargs]]]
|
keyword[def] identifier[fatal] ( identifier[self] , identifier[correlation_id] , identifier[error] , identifier[message] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_format_and_write] ( identifier[LogLevel] . identifier[Fatal] , identifier[correlation_id] , identifier[error] , identifier[message] , identifier[args] , identifier[kwargs] )
|
def fatal(self, correlation_id, error, message, *args, **kwargs):
"""
Logs fatal (unrecoverable) message that caused the process to crash.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param error: an error object associated with this message.
:param message: a human-readable message to log.
:param args: arguments to parameterize the message.
:param kwargs: arguments to parameterize the message.
"""
self._format_and_write(LogLevel.Fatal, correlation_id, error, message, args, kwargs)
|
def set_form_field_order(form, field_order):
"""
This function is a verbatim copy of django.forms.Form.order_fields() to
support field ordering below Django 1.9.
field_order is a list of field names specifying the order. Append fields
not included in the list in the default order for backward compatibility
with subclasses not overriding field_order. If field_order is None, keep
all fields in the order defined in the class. Ignore unknown fields in
field_order to allow disabling fields in form subclasses without
redefining ordering.
"""
if field_order is None:
return
fields = OrderedDict()
for key in field_order:
try:
fields[key] = form.fields.pop(key)
except KeyError: # ignore unknown fields
pass
fields.update(form.fields) # add remaining fields in original order
form.fields = fields
|
def function[set_form_field_order, parameter[form, field_order]]:
constant[
This function is a verbatim copy of django.forms.Form.order_fields() to
support field ordering below Django 1.9.
field_order is a list of field names specifying the order. Append fields
not included in the list in the default order for backward compatibility
with subclasses not overriding field_order. If field_order is None, keep
all fields in the order defined in the class. Ignore unknown fields in
field_order to allow disabling fields in form subclasses without
redefining ordering.
]
if compare[name[field_order] is constant[None]] begin[:]
return[None]
variable[fields] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[key]] in starred[name[field_order]] begin[:]
<ast.Try object at 0x7da18f810e50>
call[name[fields].update, parameter[name[form].fields]]
name[form].fields assign[=] name[fields]
|
keyword[def] identifier[set_form_field_order] ( identifier[form] , identifier[field_order] ):
literal[string]
keyword[if] identifier[field_order] keyword[is] keyword[None] :
keyword[return]
identifier[fields] = identifier[OrderedDict] ()
keyword[for] identifier[key] keyword[in] identifier[field_order] :
keyword[try] :
identifier[fields] [ identifier[key] ]= identifier[form] . identifier[fields] . identifier[pop] ( identifier[key] )
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[fields] . identifier[update] ( identifier[form] . identifier[fields] )
identifier[form] . identifier[fields] = identifier[fields]
|
def set_form_field_order(form, field_order):
"""
This function is a verbatim copy of django.forms.Form.order_fields() to
support field ordering below Django 1.9.
field_order is a list of field names specifying the order. Append fields
not included in the list in the default order for backward compatibility
with subclasses not overriding field_order. If field_order is None, keep
all fields in the order defined in the class. Ignore unknown fields in
field_order to allow disabling fields in form subclasses without
redefining ordering.
"""
if field_order is None:
return # depends on [control=['if'], data=[]]
fields = OrderedDict()
for key in field_order:
try:
fields[key] = form.fields.pop(key) # depends on [control=['try'], data=[]]
except KeyError: # ignore unknown fields
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']]
fields.update(form.fields) # add remaining fields in original order
form.fields = fields
|
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
self.stream.close()
# get the time that this sequence started at and make it a TimeTuple
t = self.rolloverAt - self.interval
timeTuple = time.localtime(t)
dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
if os.path.exists(dfn):
os.remove(dfn)
os.rename(self.baseFilename, dfn)
if self.backupCount > 0:
# find the oldest log file and delete it
s = glob.glob(self.baseFilename + ".20*")
if len(s) > self.backupCount:
s.sort()
os.remove(s[0])
#print "%s -> %s" % (self.baseFilename, dfn)
if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
else:
self.stream = open(self.baseFilename, 'w')
self.rolloverAt = self.rolloverAt + self.interval
if os.path.exists(dfn + ".zip"):
os.remove(dfn + ".zip")
file = zipfile.ZipFile(dfn + ".zip", "w")
file.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED)
file.close()
os.remove(dfn)
|
def function[doRollover, parameter[self]]:
constant[
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
]
call[name[self].stream.close, parameter[]]
variable[t] assign[=] binary_operation[name[self].rolloverAt - name[self].interval]
variable[timeTuple] assign[=] call[name[time].localtime, parameter[name[t]]]
variable[dfn] assign[=] binary_operation[binary_operation[name[self].baseFilename + constant[.]] + call[name[time].strftime, parameter[name[self].suffix, name[timeTuple]]]]
if call[name[os].path.exists, parameter[name[dfn]]] begin[:]
call[name[os].remove, parameter[name[dfn]]]
call[name[os].rename, parameter[name[self].baseFilename, name[dfn]]]
if compare[name[self].backupCount greater[>] constant[0]] begin[:]
variable[s] assign[=] call[name[glob].glob, parameter[binary_operation[name[self].baseFilename + constant[.20*]]]]
if compare[call[name[len], parameter[name[s]]] greater[>] name[self].backupCount] begin[:]
call[name[s].sort, parameter[]]
call[name[os].remove, parameter[call[name[s]][constant[0]]]]
if name[self].encoding begin[:]
name[self].stream assign[=] call[name[codecs].open, parameter[name[self].baseFilename, constant[w], name[self].encoding]]
name[self].rolloverAt assign[=] binary_operation[name[self].rolloverAt + name[self].interval]
if call[name[os].path.exists, parameter[binary_operation[name[dfn] + constant[.zip]]]] begin[:]
call[name[os].remove, parameter[binary_operation[name[dfn] + constant[.zip]]]]
variable[file] assign[=] call[name[zipfile].ZipFile, parameter[binary_operation[name[dfn] + constant[.zip]], constant[w]]]
call[name[file].write, parameter[name[dfn], call[name[os].path.basename, parameter[name[dfn]]], name[zipfile].ZIP_DEFLATED]]
call[name[file].close, parameter[]]
call[name[os].remove, parameter[name[dfn]]]
|
keyword[def] identifier[doRollover] ( identifier[self] ):
literal[string]
identifier[self] . identifier[stream] . identifier[close] ()
identifier[t] = identifier[self] . identifier[rolloverAt] - identifier[self] . identifier[interval]
identifier[timeTuple] = identifier[time] . identifier[localtime] ( identifier[t] )
identifier[dfn] = identifier[self] . identifier[baseFilename] + literal[string] + identifier[time] . identifier[strftime] ( identifier[self] . identifier[suffix] , identifier[timeTuple] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[dfn] ):
identifier[os] . identifier[remove] ( identifier[dfn] )
identifier[os] . identifier[rename] ( identifier[self] . identifier[baseFilename] , identifier[dfn] )
keyword[if] identifier[self] . identifier[backupCount] > literal[int] :
identifier[s] = identifier[glob] . identifier[glob] ( identifier[self] . identifier[baseFilename] + literal[string] )
keyword[if] identifier[len] ( identifier[s] )> identifier[self] . identifier[backupCount] :
identifier[s] . identifier[sort] ()
identifier[os] . identifier[remove] ( identifier[s] [ literal[int] ])
keyword[if] identifier[self] . identifier[encoding] :
identifier[self] . identifier[stream] = identifier[codecs] . identifier[open] ( identifier[self] . identifier[baseFilename] , literal[string] , identifier[self] . identifier[encoding] )
keyword[else] :
identifier[self] . identifier[stream] = identifier[open] ( identifier[self] . identifier[baseFilename] , literal[string] )
identifier[self] . identifier[rolloverAt] = identifier[self] . identifier[rolloverAt] + identifier[self] . identifier[interval]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[dfn] + literal[string] ):
identifier[os] . identifier[remove] ( identifier[dfn] + literal[string] )
identifier[file] = identifier[zipfile] . identifier[ZipFile] ( identifier[dfn] + literal[string] , literal[string] )
identifier[file] . identifier[write] ( identifier[dfn] , identifier[os] . identifier[path] . identifier[basename] ( identifier[dfn] ), identifier[zipfile] . identifier[ZIP_DEFLATED] )
identifier[file] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[dfn] )
|
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
self.stream.close()
# get the time that this sequence started at and make it a TimeTuple
t = self.rolloverAt - self.interval
timeTuple = time.localtime(t)
dfn = self.baseFilename + '.' + time.strftime(self.suffix, timeTuple)
if os.path.exists(dfn):
os.remove(dfn) # depends on [control=['if'], data=[]]
os.rename(self.baseFilename, dfn)
if self.backupCount > 0:
# find the oldest log file and delete it
s = glob.glob(self.baseFilename + '.20*')
if len(s) > self.backupCount:
s.sort()
os.remove(s[0]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
#print "%s -> %s" % (self.baseFilename, dfn)
if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding) # depends on [control=['if'], data=[]]
else:
self.stream = open(self.baseFilename, 'w')
self.rolloverAt = self.rolloverAt + self.interval
if os.path.exists(dfn + '.zip'):
os.remove(dfn + '.zip') # depends on [control=['if'], data=[]]
file = zipfile.ZipFile(dfn + '.zip', 'w')
file.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED)
file.close()
os.remove(dfn)
|
def render_badge(user):
''' Renders a single user's badge. '''
data = {
"user": user,
}
t = loader.get_template('registrasion/badge.svg')
return t.render(data)
|
def function[render_badge, parameter[user]]:
constant[ Renders a single user's badge. ]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18eb56290>], [<ast.Name object at 0x7da18eb54550>]]
variable[t] assign[=] call[name[loader].get_template, parameter[constant[registrasion/badge.svg]]]
return[call[name[t].render, parameter[name[data]]]]
|
keyword[def] identifier[render_badge] ( identifier[user] ):
literal[string]
identifier[data] ={
literal[string] : identifier[user] ,
}
identifier[t] = identifier[loader] . identifier[get_template] ( literal[string] )
keyword[return] identifier[t] . identifier[render] ( identifier[data] )
|
def render_badge(user):
""" Renders a single user's badge. """
data = {'user': user}
t = loader.get_template('registrasion/badge.svg')
return t.render(data)
|
def _intersected_edge(self, edges, cut_edge):
""" Given a list of *edges*, return the first that is intersected by
*cut_edge*.
"""
for edge in edges:
if self._edges_intersect(edge, cut_edge):
return edge
|
def function[_intersected_edge, parameter[self, edges, cut_edge]]:
constant[ Given a list of *edges*, return the first that is intersected by
*cut_edge*.
]
for taget[name[edge]] in starred[name[edges]] begin[:]
if call[name[self]._edges_intersect, parameter[name[edge], name[cut_edge]]] begin[:]
return[name[edge]]
|
keyword[def] identifier[_intersected_edge] ( identifier[self] , identifier[edges] , identifier[cut_edge] ):
literal[string]
keyword[for] identifier[edge] keyword[in] identifier[edges] :
keyword[if] identifier[self] . identifier[_edges_intersect] ( identifier[edge] , identifier[cut_edge] ):
keyword[return] identifier[edge]
|
def _intersected_edge(self, edges, cut_edge):
""" Given a list of *edges*, return the first that is intersected by
*cut_edge*.
"""
for edge in edges:
if self._edges_intersect(edge, cut_edge):
return edge # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['edge']]
|
def open_telemetry_logs(logpath_telem, logpath_telem_raw):
'''open log files'''
if opts.append_log or opts.continue_mode:
mode = 'a'
else:
mode = 'w'
mpstate.logfile = open(logpath_telem, mode=mode)
mpstate.logfile_raw = open(logpath_telem_raw, mode=mode)
print("Log Directory: %s" % mpstate.status.logdir)
print("Telemetry log: %s" % logpath_telem)
# use a separate thread for writing to the logfile to prevent
# delays during disk writes (important as delays can be long if camera
# app is running)
t = threading.Thread(target=log_writer, name='log_writer')
t.daemon = True
t.start()
|
def function[open_telemetry_logs, parameter[logpath_telem, logpath_telem_raw]]:
constant[open log files]
if <ast.BoolOp object at 0x7da2041d9990> begin[:]
variable[mode] assign[=] constant[a]
name[mpstate].logfile assign[=] call[name[open], parameter[name[logpath_telem]]]
name[mpstate].logfile_raw assign[=] call[name[open], parameter[name[logpath_telem_raw]]]
call[name[print], parameter[binary_operation[constant[Log Directory: %s] <ast.Mod object at 0x7da2590d6920> name[mpstate].status.logdir]]]
call[name[print], parameter[binary_operation[constant[Telemetry log: %s] <ast.Mod object at 0x7da2590d6920> name[logpath_telem]]]]
variable[t] assign[=] call[name[threading].Thread, parameter[]]
name[t].daemon assign[=] constant[True]
call[name[t].start, parameter[]]
|
keyword[def] identifier[open_telemetry_logs] ( identifier[logpath_telem] , identifier[logpath_telem_raw] ):
literal[string]
keyword[if] identifier[opts] . identifier[append_log] keyword[or] identifier[opts] . identifier[continue_mode] :
identifier[mode] = literal[string]
keyword[else] :
identifier[mode] = literal[string]
identifier[mpstate] . identifier[logfile] = identifier[open] ( identifier[logpath_telem] , identifier[mode] = identifier[mode] )
identifier[mpstate] . identifier[logfile_raw] = identifier[open] ( identifier[logpath_telem_raw] , identifier[mode] = identifier[mode] )
identifier[print] ( literal[string] % identifier[mpstate] . identifier[status] . identifier[logdir] )
identifier[print] ( literal[string] % identifier[logpath_telem] )
identifier[t] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[log_writer] , identifier[name] = literal[string] )
identifier[t] . identifier[daemon] = keyword[True]
identifier[t] . identifier[start] ()
|
def open_telemetry_logs(logpath_telem, logpath_telem_raw):
"""open log files"""
if opts.append_log or opts.continue_mode:
mode = 'a' # depends on [control=['if'], data=[]]
else:
mode = 'w'
mpstate.logfile = open(logpath_telem, mode=mode)
mpstate.logfile_raw = open(logpath_telem_raw, mode=mode)
print('Log Directory: %s' % mpstate.status.logdir)
print('Telemetry log: %s' % logpath_telem)
# use a separate thread for writing to the logfile to prevent
# delays during disk writes (important as delays can be long if camera
# app is running)
t = threading.Thread(target=log_writer, name='log_writer')
t.daemon = True
t.start()
|
def ensureModelData(self,obj):
"""
Ensures that the given ``obj`` has been initialized to be used with this model.
If the object is found to not be initialized, it will be initialized.
"""
if not hasattr(obj,"_modeldata"):
self.create(obj,cache=True)
if "_modelcache" not in obj._modeldata:
# Assume all initialization is missing, simply reinitialize
self.create(obj,cache=True)
|
def function[ensureModelData, parameter[self, obj]]:
constant[
Ensures that the given ``obj`` has been initialized to be used with this model.
If the object is found to not be initialized, it will be initialized.
]
if <ast.UnaryOp object at 0x7da1b012d270> begin[:]
call[name[self].create, parameter[name[obj]]]
if compare[constant[_modelcache] <ast.NotIn object at 0x7da2590d7190> name[obj]._modeldata] begin[:]
call[name[self].create, parameter[name[obj]]]
|
keyword[def] identifier[ensureModelData] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , literal[string] ):
identifier[self] . identifier[create] ( identifier[obj] , identifier[cache] = keyword[True] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[obj] . identifier[_modeldata] :
identifier[self] . identifier[create] ( identifier[obj] , identifier[cache] = keyword[True] )
|
def ensureModelData(self, obj):
"""
Ensures that the given ``obj`` has been initialized to be used with this model.
If the object is found to not be initialized, it will be initialized.
"""
if not hasattr(obj, '_modeldata'):
self.create(obj, cache=True) # depends on [control=['if'], data=[]]
if '_modelcache' not in obj._modeldata:
# Assume all initialization is missing, simply reinitialize
self.create(obj, cache=True) # depends on [control=['if'], data=[]]
|
def group_members_remove(self, device_group_id, body, **kwargs): # noqa: E501
"""Remove a device from a group # noqa: E501
Remove one device from a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.group_members_remove(device_group_id, body, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str device_group_id: The ID of the group (required)
:param DeviceGroupManipulation body: Body of the request (required)
:return: DevicePage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.group_members_remove_with_http_info(device_group_id, body, **kwargs) # noqa: E501
else:
(data) = self.group_members_remove_with_http_info(device_group_id, body, **kwargs) # noqa: E501
return data
|
def function[group_members_remove, parameter[self, device_group_id, body]]:
constant[Remove a device from a group # noqa: E501
Remove one device from a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.group_members_remove(device_group_id, body, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str device_group_id: The ID of the group (required)
:param DeviceGroupManipulation body: Body of the request (required)
:return: DevicePage
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].group_members_remove_with_http_info, parameter[name[device_group_id], name[body]]]]
|
keyword[def] identifier[group_members_remove] ( identifier[self] , identifier[device_group_id] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[group_members_remove_with_http_info] ( identifier[device_group_id] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[group_members_remove_with_http_info] ( identifier[device_group_id] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def group_members_remove(self, device_group_id, body, **kwargs): # noqa: E501
'Remove a device from a group # noqa: E501\n\n Remove one device from a group # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.group_members_remove(device_group_id, body, asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str device_group_id: The ID of the group (required)\n :param DeviceGroupManipulation body: Body of the request (required)\n :return: DevicePage\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.group_members_remove_with_http_info(device_group_id, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.group_members_remove_with_http_info(device_group_id, body, **kwargs) # noqa: E501
return data
|
def _process_module_needs(self, modules):
"""Adds the module and its dependencies to the result list in dependency order."""
result = list(modules)
for i, module in enumerate(modules):
#It is possible that the parser couldn't find it, if so
#we can't create the executable!
if module in self.module.parent.modules:
modneeds = self.module.parent.modules[module].needs
for modn in modneeds:
if modn not in result:
#Since this module depends on the other, insert the other
#above it in the list.
result.insert(i, modn)
else:
x = result.index(modn)
if x > i:
#We need to move this module higher up in the food chain
#because it is needed sooner.
result.remove(modn)
result.insert(i, modn)
newi = result.index(modn)
else:
raise ValueError("Unable to find module {}.".format(module))
return result
|
def function[_process_module_needs, parameter[self, modules]]:
constant[Adds the module and its dependencies to the result list in dependency order.]
variable[result] assign[=] call[name[list], parameter[name[modules]]]
for taget[tuple[[<ast.Name object at 0x7da1b255f640>, <ast.Name object at 0x7da1b255e890>]]] in starred[call[name[enumerate], parameter[name[modules]]]] begin[:]
if compare[name[module] in name[self].module.parent.modules] begin[:]
variable[modneeds] assign[=] call[name[self].module.parent.modules][name[module]].needs
for taget[name[modn]] in starred[name[modneeds]] begin[:]
if compare[name[modn] <ast.NotIn object at 0x7da2590d7190> name[result]] begin[:]
call[name[result].insert, parameter[name[i], name[modn]]]
variable[newi] assign[=] call[name[result].index, parameter[name[modn]]]
return[name[result]]
|
keyword[def] identifier[_process_module_needs] ( identifier[self] , identifier[modules] ):
literal[string]
identifier[result] = identifier[list] ( identifier[modules] )
keyword[for] identifier[i] , identifier[module] keyword[in] identifier[enumerate] ( identifier[modules] ):
keyword[if] identifier[module] keyword[in] identifier[self] . identifier[module] . identifier[parent] . identifier[modules] :
identifier[modneeds] = identifier[self] . identifier[module] . identifier[parent] . identifier[modules] [ identifier[module] ]. identifier[needs]
keyword[for] identifier[modn] keyword[in] identifier[modneeds] :
keyword[if] identifier[modn] keyword[not] keyword[in] identifier[result] :
identifier[result] . identifier[insert] ( identifier[i] , identifier[modn] )
keyword[else] :
identifier[x] = identifier[result] . identifier[index] ( identifier[modn] )
keyword[if] identifier[x] > identifier[i] :
identifier[result] . identifier[remove] ( identifier[modn] )
identifier[result] . identifier[insert] ( identifier[i] , identifier[modn] )
identifier[newi] = identifier[result] . identifier[index] ( identifier[modn] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[module] ))
keyword[return] identifier[result]
|
def _process_module_needs(self, modules):
"""Adds the module and its dependencies to the result list in dependency order."""
result = list(modules)
for (i, module) in enumerate(modules):
#It is possible that the parser couldn't find it, if so
#we can't create the executable!
if module in self.module.parent.modules:
modneeds = self.module.parent.modules[module].needs
for modn in modneeds:
if modn not in result:
#Since this module depends on the other, insert the other
#above it in the list.
result.insert(i, modn) # depends on [control=['if'], data=['modn', 'result']]
else:
x = result.index(modn)
if x > i:
#We need to move this module higher up in the food chain
#because it is needed sooner.
result.remove(modn)
result.insert(i, modn) # depends on [control=['if'], data=['i']]
newi = result.index(modn) # depends on [control=['for'], data=['modn']] # depends on [control=['if'], data=['module']]
else:
raise ValueError('Unable to find module {}.'.format(module)) # depends on [control=['for'], data=[]]
return result
|
def get_analysis_config(self, group_name):
""" Gets any config data saved for the analysis.
:param group_name: The name of the analysis group.
:returns: A dictionary of dictionaries. Each key represents
an analysis step. Each value is a dictionary containing the
analysis parameters as key/value pairs. Returns None if no
configuration exists for the analysis.
"""
self.assert_open()
group = 'Analyses/{}/Configuration'.format(group_name)
config = None
if group in self.handle:
config = self._parse_attribute_tree(group)
return config
|
def function[get_analysis_config, parameter[self, group_name]]:
constant[ Gets any config data saved for the analysis.
:param group_name: The name of the analysis group.
:returns: A dictionary of dictionaries. Each key represents
an analysis step. Each value is a dictionary containing the
analysis parameters as key/value pairs. Returns None if no
configuration exists for the analysis.
]
call[name[self].assert_open, parameter[]]
variable[group] assign[=] call[constant[Analyses/{}/Configuration].format, parameter[name[group_name]]]
variable[config] assign[=] constant[None]
if compare[name[group] in name[self].handle] begin[:]
variable[config] assign[=] call[name[self]._parse_attribute_tree, parameter[name[group]]]
return[name[config]]
|
keyword[def] identifier[get_analysis_config] ( identifier[self] , identifier[group_name] ):
literal[string]
identifier[self] . identifier[assert_open] ()
identifier[group] = literal[string] . identifier[format] ( identifier[group_name] )
identifier[config] = keyword[None]
keyword[if] identifier[group] keyword[in] identifier[self] . identifier[handle] :
identifier[config] = identifier[self] . identifier[_parse_attribute_tree] ( identifier[group] )
keyword[return] identifier[config]
|
def get_analysis_config(self, group_name):
""" Gets any config data saved for the analysis.
:param group_name: The name of the analysis group.
:returns: A dictionary of dictionaries. Each key represents
an analysis step. Each value is a dictionary containing the
analysis parameters as key/value pairs. Returns None if no
configuration exists for the analysis.
"""
self.assert_open()
group = 'Analyses/{}/Configuration'.format(group_name)
config = None
if group in self.handle:
config = self._parse_attribute_tree(group) # depends on [control=['if'], data=['group']]
return config
|
def __parse_child(self, node):
'''for rpc-style map each message part to a class in typesmodule
'''
try:
tc = self.gettypecode(self.typesmodule, node)
except:
self.logger.debug('didnt find typecode for "%s" in typesmodule: %s',
node.localName, self.typesmodule)
tc = TC.Any(aslist=1)
return tc.parse(node, self.ps)
self.logger.debug('parse child with typecode : %s', tc)
try:
return tc.parse(node, self.ps)
except Exception:
self.logger.debug('parse failed try Any : %s', tc)
tc = TC.Any(aslist=1)
return tc.parse(node, self.ps)
|
def function[__parse_child, parameter[self, node]]:
constant[for rpc-style map each message part to a class in typesmodule
]
<ast.Try object at 0x7da1b1595150>
call[name[self].logger.debug, parameter[constant[parse child with typecode : %s], name[tc]]]
<ast.Try object at 0x7da1b13347c0>
variable[tc] assign[=] call[name[TC].Any, parameter[]]
return[call[name[tc].parse, parameter[name[node], name[self].ps]]]
|
keyword[def] identifier[__parse_child] ( identifier[self] , identifier[node] ):
literal[string]
keyword[try] :
identifier[tc] = identifier[self] . identifier[gettypecode] ( identifier[self] . identifier[typesmodule] , identifier[node] )
keyword[except] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ,
identifier[node] . identifier[localName] , identifier[self] . identifier[typesmodule] )
identifier[tc] = identifier[TC] . identifier[Any] ( identifier[aslist] = literal[int] )
keyword[return] identifier[tc] . identifier[parse] ( identifier[node] , identifier[self] . identifier[ps] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[tc] )
keyword[try] :
keyword[return] identifier[tc] . identifier[parse] ( identifier[node] , identifier[self] . identifier[ps] )
keyword[except] identifier[Exception] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[tc] )
identifier[tc] = identifier[TC] . identifier[Any] ( identifier[aslist] = literal[int] )
keyword[return] identifier[tc] . identifier[parse] ( identifier[node] , identifier[self] . identifier[ps] )
|
def __parse_child(self, node):
"""for rpc-style map each message part to a class in typesmodule
"""
try:
tc = self.gettypecode(self.typesmodule, node) # depends on [control=['try'], data=[]]
except:
self.logger.debug('didnt find typecode for "%s" in typesmodule: %s', node.localName, self.typesmodule)
tc = TC.Any(aslist=1)
return tc.parse(node, self.ps) # depends on [control=['except'], data=[]]
self.logger.debug('parse child with typecode : %s', tc)
try:
return tc.parse(node, self.ps) # depends on [control=['try'], data=[]]
except Exception:
self.logger.debug('parse failed try Any : %s', tc) # depends on [control=['except'], data=[]]
tc = TC.Any(aslist=1)
return tc.parse(node, self.ps)
|
def stitch(images):
"""Stitch regular spaced images.
Parameters
----------
images : ImageCollection or list of tuple(path, row, column)
Each image-tuple should contain path, row and column. Row 0,
column 0 is top left image.
Example:
>>> images = [('1.png', 0, 0), ('2.png', 0, 1)]
Returns
-------
tuple (stitched, offset)
Stitched image and registered offset (y, x).
"""
if type(images) != ImageCollection:
images = ImageCollection(images)
calc_translations_parallel(images)
_translation_warn(images)
yoffset, xoffset = images.median_translation()
if xoffset != yoffset:
warn('yoffset != xoffset: %s != %s' % (yoffset, xoffset))
# assume all images have the same shape
y, x = imread(images[0].path).shape
height = y*len(images.rows) + yoffset*(len(images.rows)-1)
width = x*len(images.cols) + xoffset*(len(images.cols)-1)
# last dimension is number of images on top of each other
merged = np.zeros((height, width, 2), dtype=np.int)
for image in images:
r, c = image.row, image.col
mask = _merge_slice(r, c, y, x, yoffset, xoffset)
# last dim is used for averaging the seam
img = _add_ones_dim(imread(image.path))
merged[mask] += img
# average seam, possible improvement: use gradient
merged[..., 0] /= merged[..., 1]
return merged[..., 0].astype(np.uint8), (yoffset, xoffset)
|
def function[stitch, parameter[images]]:
constant[Stitch regular spaced images.
Parameters
----------
images : ImageCollection or list of tuple(path, row, column)
Each image-tuple should contain path, row and column. Row 0,
column 0 is top left image.
Example:
>>> images = [('1.png', 0, 0), ('2.png', 0, 1)]
Returns
-------
tuple (stitched, offset)
Stitched image and registered offset (y, x).
]
if compare[call[name[type], parameter[name[images]]] not_equal[!=] name[ImageCollection]] begin[:]
variable[images] assign[=] call[name[ImageCollection], parameter[name[images]]]
call[name[calc_translations_parallel], parameter[name[images]]]
call[name[_translation_warn], parameter[name[images]]]
<ast.Tuple object at 0x7da1b01793f0> assign[=] call[name[images].median_translation, parameter[]]
if compare[name[xoffset] not_equal[!=] name[yoffset]] begin[:]
call[name[warn], parameter[binary_operation[constant[yoffset != xoffset: %s != %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0213c40>, <ast.Name object at 0x7da1b0212ec0>]]]]]
<ast.Tuple object at 0x7da1b0211000> assign[=] call[name[imread], parameter[call[name[images]][constant[0]].path]].shape
variable[height] assign[=] binary_operation[binary_operation[name[y] * call[name[len], parameter[name[images].rows]]] + binary_operation[name[yoffset] * binary_operation[call[name[len], parameter[name[images].rows]] - constant[1]]]]
variable[width] assign[=] binary_operation[binary_operation[name[x] * call[name[len], parameter[name[images].cols]]] + binary_operation[name[xoffset] * binary_operation[call[name[len], parameter[name[images].cols]] - constant[1]]]]
variable[merged] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0210a00>, <ast.Name object at 0x7da1b0210760>, <ast.Constant object at 0x7da1b02114b0>]]]]
for taget[name[image]] in starred[name[images]] begin[:]
<ast.Tuple object at 0x7da1b0212200> assign[=] tuple[[<ast.Attribute object at 0x7da1b0213610>, <ast.Attribute object at 0x7da1b0213580>]]
variable[mask] assign[=] call[name[_merge_slice], parameter[name[r], name[c], name[y], name[x], name[yoffset], name[xoffset]]]
variable[img] assign[=] call[name[_add_ones_dim], parameter[call[name[imread], parameter[name[image].path]]]]
<ast.AugAssign object at 0x7da1b0213190>
<ast.AugAssign object at 0x7da1b02131c0>
return[tuple[[<ast.Call object at 0x7da1b0168c40>, <ast.Tuple object at 0x7da1b0168df0>]]]
|
keyword[def] identifier[stitch] ( identifier[images] ):
literal[string]
keyword[if] identifier[type] ( identifier[images] )!= identifier[ImageCollection] :
identifier[images] = identifier[ImageCollection] ( identifier[images] )
identifier[calc_translations_parallel] ( identifier[images] )
identifier[_translation_warn] ( identifier[images] )
identifier[yoffset] , identifier[xoffset] = identifier[images] . identifier[median_translation] ()
keyword[if] identifier[xoffset] != identifier[yoffset] :
identifier[warn] ( literal[string] %( identifier[yoffset] , identifier[xoffset] ))
identifier[y] , identifier[x] = identifier[imread] ( identifier[images] [ literal[int] ]. identifier[path] ). identifier[shape]
identifier[height] = identifier[y] * identifier[len] ( identifier[images] . identifier[rows] )+ identifier[yoffset] *( identifier[len] ( identifier[images] . identifier[rows] )- literal[int] )
identifier[width] = identifier[x] * identifier[len] ( identifier[images] . identifier[cols] )+ identifier[xoffset] *( identifier[len] ( identifier[images] . identifier[cols] )- literal[int] )
identifier[merged] = identifier[np] . identifier[zeros] (( identifier[height] , identifier[width] , literal[int] ), identifier[dtype] = identifier[np] . identifier[int] )
keyword[for] identifier[image] keyword[in] identifier[images] :
identifier[r] , identifier[c] = identifier[image] . identifier[row] , identifier[image] . identifier[col]
identifier[mask] = identifier[_merge_slice] ( identifier[r] , identifier[c] , identifier[y] , identifier[x] , identifier[yoffset] , identifier[xoffset] )
identifier[img] = identifier[_add_ones_dim] ( identifier[imread] ( identifier[image] . identifier[path] ))
identifier[merged] [ identifier[mask] ]+= identifier[img]
identifier[merged] [..., literal[int] ]/= identifier[merged] [..., literal[int] ]
keyword[return] identifier[merged] [..., literal[int] ]. identifier[astype] ( identifier[np] . identifier[uint8] ),( identifier[yoffset] , identifier[xoffset] )
|
def stitch(images):
"""Stitch regular spaced images.
Parameters
----------
images : ImageCollection or list of tuple(path, row, column)
Each image-tuple should contain path, row and column. Row 0,
column 0 is top left image.
Example:
>>> images = [('1.png', 0, 0), ('2.png', 0, 1)]
Returns
-------
tuple (stitched, offset)
Stitched image and registered offset (y, x).
"""
if type(images) != ImageCollection:
images = ImageCollection(images) # depends on [control=['if'], data=['ImageCollection']]
calc_translations_parallel(images)
_translation_warn(images)
(yoffset, xoffset) = images.median_translation()
if xoffset != yoffset:
warn('yoffset != xoffset: %s != %s' % (yoffset, xoffset)) # depends on [control=['if'], data=['xoffset', 'yoffset']]
# assume all images have the same shape
(y, x) = imread(images[0].path).shape
height = y * len(images.rows) + yoffset * (len(images.rows) - 1)
width = x * len(images.cols) + xoffset * (len(images.cols) - 1)
# last dimension is number of images on top of each other
merged = np.zeros((height, width, 2), dtype=np.int)
for image in images:
(r, c) = (image.row, image.col)
mask = _merge_slice(r, c, y, x, yoffset, xoffset)
# last dim is used for averaging the seam
img = _add_ones_dim(imread(image.path))
merged[mask] += img # depends on [control=['for'], data=['image']]
# average seam, possible improvement: use gradient
merged[..., 0] /= merged[..., 1]
return (merged[..., 0].astype(np.uint8), (yoffset, xoffset))
|
def QA_util_dict_remove_key(dicts, key):
"""
输入一个dict 返回删除后的
"""
if isinstance(key, list):
for item in key:
try:
dicts.pop(item)
except:
pass
else:
try:
dicts.pop(key)
except:
pass
return dicts
|
def function[QA_util_dict_remove_key, parameter[dicts, key]]:
constant[
输入一个dict 返回删除后的
]
if call[name[isinstance], parameter[name[key], name[list]]] begin[:]
for taget[name[item]] in starred[name[key]] begin[:]
<ast.Try object at 0x7da1b1f45ae0>
return[name[dicts]]
|
keyword[def] identifier[QA_util_dict_remove_key] ( identifier[dicts] , identifier[key] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[key] , identifier[list] ):
keyword[for] identifier[item] keyword[in] identifier[key] :
keyword[try] :
identifier[dicts] . identifier[pop] ( identifier[item] )
keyword[except] :
keyword[pass]
keyword[else] :
keyword[try] :
identifier[dicts] . identifier[pop] ( identifier[key] )
keyword[except] :
keyword[pass]
keyword[return] identifier[dicts]
|
def QA_util_dict_remove_key(dicts, key):
"""
输入一个dict 返回删除后的
"""
if isinstance(key, list):
for item in key:
try:
dicts.pop(item) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
else:
try:
dicts.pop(key) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
return dicts
|
def should_use(intersection):
"""Check if an intersection can be used as part of a curved polygon.
Will return :data:`True` if the intersection is classified as
:attr:`~.IntersectionClassification.FIRST`,
:attr:`~.IntersectionClassification.SECOND` or
:attr:`~.IntersectionClassification.COINCIDENT` or if the intersection
is classified is a corner / edge end which is classified as
:attr:`~.IntersectionClassification.TANGENT_FIRST` or
:attr:`~.IntersectionClassification.TANGENT_SECOND`.
Args:
intersection (.Intersection): An intersection to be added.
Returns:
bool: Indicating if the intersection will be used.
"""
if intersection.interior_curve in ACCEPTABLE_CLASSIFICATIONS:
return True
if intersection.interior_curve in TANGENT_CLASSIFICATIONS:
return intersection.s == 0.0 or intersection.t == 0.0
return False
|
def function[should_use, parameter[intersection]]:
constant[Check if an intersection can be used as part of a curved polygon.
Will return :data:`True` if the intersection is classified as
:attr:`~.IntersectionClassification.FIRST`,
:attr:`~.IntersectionClassification.SECOND` or
:attr:`~.IntersectionClassification.COINCIDENT` or if the intersection
is classified is a corner / edge end which is classified as
:attr:`~.IntersectionClassification.TANGENT_FIRST` or
:attr:`~.IntersectionClassification.TANGENT_SECOND`.
Args:
intersection (.Intersection): An intersection to be added.
Returns:
bool: Indicating if the intersection will be used.
]
if compare[name[intersection].interior_curve in name[ACCEPTABLE_CLASSIFICATIONS]] begin[:]
return[constant[True]]
if compare[name[intersection].interior_curve in name[TANGENT_CLASSIFICATIONS]] begin[:]
return[<ast.BoolOp object at 0x7da2054a4df0>]
return[constant[False]]
|
keyword[def] identifier[should_use] ( identifier[intersection] ):
literal[string]
keyword[if] identifier[intersection] . identifier[interior_curve] keyword[in] identifier[ACCEPTABLE_CLASSIFICATIONS] :
keyword[return] keyword[True]
keyword[if] identifier[intersection] . identifier[interior_curve] keyword[in] identifier[TANGENT_CLASSIFICATIONS] :
keyword[return] identifier[intersection] . identifier[s] == literal[int] keyword[or] identifier[intersection] . identifier[t] == literal[int]
keyword[return] keyword[False]
|
def should_use(intersection):
"""Check if an intersection can be used as part of a curved polygon.
Will return :data:`True` if the intersection is classified as
:attr:`~.IntersectionClassification.FIRST`,
:attr:`~.IntersectionClassification.SECOND` or
:attr:`~.IntersectionClassification.COINCIDENT` or if the intersection
is classified is a corner / edge end which is classified as
:attr:`~.IntersectionClassification.TANGENT_FIRST` or
:attr:`~.IntersectionClassification.TANGENT_SECOND`.
Args:
intersection (.Intersection): An intersection to be added.
Returns:
bool: Indicating if the intersection will be used.
"""
if intersection.interior_curve in ACCEPTABLE_CLASSIFICATIONS:
return True # depends on [control=['if'], data=[]]
if intersection.interior_curve in TANGENT_CLASSIFICATIONS:
return intersection.s == 0.0 or intersection.t == 0.0 # depends on [control=['if'], data=[]]
return False
|
def skip_http_error(statuses):
'''
A decorator to wrap with try..except to swallow
specific HTTP errors.
@skip_http_error((404, 503))
def fetch():
...
'''
assert isinstance(statuses, tuple)
def decorator(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
status_code = e.response.status_code
if status_code in statuses:
log.warn(str(e))
else:
raise
return wrapper
return decorator
|
def function[skip_http_error, parameter[statuses]]:
constant[
A decorator to wrap with try..except to swallow
specific HTTP errors.
@skip_http_error((404, 503))
def fetch():
...
]
assert[call[name[isinstance], parameter[name[statuses], name[tuple]]]]
def function[decorator, parameter[func]]:
def function[wrapper, parameter[]]:
<ast.Try object at 0x7da204621e70>
return[name[wrapper]]
return[name[decorator]]
|
keyword[def] identifier[skip_http_error] ( identifier[statuses] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[statuses] , identifier[tuple] )
keyword[def] identifier[decorator] ( identifier[func] ):
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[try] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[HTTPError] keyword[as] identifier[e] :
identifier[status_code] = identifier[e] . identifier[response] . identifier[status_code]
keyword[if] identifier[status_code] keyword[in] identifier[statuses] :
identifier[log] . identifier[warn] ( identifier[str] ( identifier[e] ))
keyword[else] :
keyword[raise]
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator]
|
def skip_http_error(statuses):
"""
A decorator to wrap with try..except to swallow
specific HTTP errors.
@skip_http_error((404, 503))
def fetch():
...
"""
assert isinstance(statuses, tuple)
def decorator(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs) # depends on [control=['try'], data=[]]
except HTTPError as e:
status_code = e.response.status_code
if status_code in statuses:
log.warn(str(e)) # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']]
return wrapper
return decorator
|
def get_or_set_hash(uri,
length=8,
chars='abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'):
'''
Perform a one-time generation of a hash and write it to sdb.
If that value has already been set return the value instead.
This is useful for generating passwords or keys that are specific to
multiple minions that need to be stored somewhere centrally.
State Example:
.. code-block:: yaml
some_mysql_user:
mysql_user:
- present
- host: localhost
- password: '{{ salt['sdb.get_or_set_hash']('some_mysql_user_pass') }}'
CLI Example:
.. code-block:: bash
salt '*' sdb.get_or_set_hash 'SECRET_KEY' 50
.. warning::
This function could return strings which may contain characters which are reserved
as directives by the YAML parser, such as strings beginning with ``%``. To avoid
issues when using the output of this function in an SLS file containing YAML+Jinja,
surround the call with single quotes.
'''
return salt.utils.sdb.sdb_get_or_set_hash(uri, __opts__, length, chars, __utils__)
|
def function[get_or_set_hash, parameter[uri, length, chars]]:
constant[
Perform a one-time generation of a hash and write it to sdb.
If that value has already been set return the value instead.
This is useful for generating passwords or keys that are specific to
multiple minions that need to be stored somewhere centrally.
State Example:
.. code-block:: yaml
some_mysql_user:
mysql_user:
- present
- host: localhost
- password: '{{ salt['sdb.get_or_set_hash']('some_mysql_user_pass') }}'
CLI Example:
.. code-block:: bash
salt '*' sdb.get_or_set_hash 'SECRET_KEY' 50
.. warning::
This function could return strings which may contain characters which are reserved
as directives by the YAML parser, such as strings beginning with ``%``. To avoid
issues when using the output of this function in an SLS file containing YAML+Jinja,
surround the call with single quotes.
]
return[call[name[salt].utils.sdb.sdb_get_or_set_hash, parameter[name[uri], name[__opts__], name[length], name[chars], name[__utils__]]]]
|
keyword[def] identifier[get_or_set_hash] ( identifier[uri] ,
identifier[length] = literal[int] ,
identifier[chars] = literal[string] ):
literal[string]
keyword[return] identifier[salt] . identifier[utils] . identifier[sdb] . identifier[sdb_get_or_set_hash] ( identifier[uri] , identifier[__opts__] , identifier[length] , identifier[chars] , identifier[__utils__] )
|
def get_or_set_hash(uri, length=8, chars='abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'):
"""
Perform a one-time generation of a hash and write it to sdb.
If that value has already been set return the value instead.
This is useful for generating passwords or keys that are specific to
multiple minions that need to be stored somewhere centrally.
State Example:
.. code-block:: yaml
some_mysql_user:
mysql_user:
- present
- host: localhost
- password: '{{ salt['sdb.get_or_set_hash']('some_mysql_user_pass') }}'
CLI Example:
.. code-block:: bash
salt '*' sdb.get_or_set_hash 'SECRET_KEY' 50
.. warning::
This function could return strings which may contain characters which are reserved
as directives by the YAML parser, such as strings beginning with ``%``. To avoid
issues when using the output of this function in an SLS file containing YAML+Jinja,
surround the call with single quotes.
"""
return salt.utils.sdb.sdb_get_or_set_hash(uri, __opts__, length, chars, __utils__)
|
def logpdf(self, mu):
"""
Log PDF for Exponential prior
Parameters
----------
mu : float
Latent variable for which the prior is being formed over
Returns
----------
- log(p(mu))
"""
if self.transform is not None:
mu = self.transform(mu)
return ss.expon.logpdf(mu, self.lmd0)
|
def function[logpdf, parameter[self, mu]]:
constant[
Log PDF for Exponential prior
Parameters
----------
mu : float
Latent variable for which the prior is being formed over
Returns
----------
- log(p(mu))
]
if compare[name[self].transform is_not constant[None]] begin[:]
variable[mu] assign[=] call[name[self].transform, parameter[name[mu]]]
return[call[name[ss].expon.logpdf, parameter[name[mu], name[self].lmd0]]]
|
keyword[def] identifier[logpdf] ( identifier[self] , identifier[mu] ):
literal[string]
keyword[if] identifier[self] . identifier[transform] keyword[is] keyword[not] keyword[None] :
identifier[mu] = identifier[self] . identifier[transform] ( identifier[mu] )
keyword[return] identifier[ss] . identifier[expon] . identifier[logpdf] ( identifier[mu] , identifier[self] . identifier[lmd0] )
|
def logpdf(self, mu):
"""
Log PDF for Exponential prior
Parameters
----------
mu : float
Latent variable for which the prior is being formed over
Returns
----------
- log(p(mu))
"""
if self.transform is not None:
mu = self.transform(mu) # depends on [control=['if'], data=[]]
return ss.expon.logpdf(mu, self.lmd0)
|
def ParseOptions(self, options):
"""Parses the options.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
"""
# The output modules options are dependent on the preferred language
# and preferred time zone options.
self._ParseTimezoneOption(options)
names = ['analysis_plugins', 'language', 'profiling']
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=names)
self.list_analysis_plugins = self._analysis_plugins == 'list'
self.list_language_identifiers = self._preferred_language == 'list'
self.list_profilers = self._profilers == 'list'
if (self.list_analysis_plugins or self.list_language_identifiers or
self.list_profilers or self.list_timezones):
return
# Check output modules after the other listable options, otherwise
# it could raise with "requires an output file".
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=['output_modules'])
self.list_output_modules = self._output_format == 'list'
if self.list_output_modules:
return
self._ParseInformationalOptions(options)
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=['data_location'])
self._ParseLogFileOptions(options)
self._ParseProcessingOptions(options)
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=['event_filters'])
self._deduplicate_events = getattr(options, 'dedup', True)
if self._data_location:
# Update the data location with the calculated value.
options.data_location = self._data_location
else:
logger.warning('Unable to automatically determine data location.')
self._command_line_arguments = self.GetCommandLineArguments()
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=['storage_file'])
# TODO: move check into _CheckStorageFile.
if not self._storage_file_path:
raise errors.BadConfigOption('Missing storage file option.')
if not os.path.isfile(self._storage_file_path):
raise errors.BadConfigOption(
'No such storage file: {0:s}.'.format(self._storage_file_path))
self._EnforceProcessMemoryLimit(self._process_memory_limit)
self._analysis_plugins = self._CreateAnalysisPlugins(options)
self._output_module = self._CreateOutputModule(options)
|
def function[ParseOptions, parameter[self, options]]:
constant[Parses the options.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
]
call[name[self]._ParseTimezoneOption, parameter[name[options]]]
variable[names] assign[=] list[[<ast.Constant object at 0x7da18c4ce5c0>, <ast.Constant object at 0x7da18c4cfcd0>, <ast.Constant object at 0x7da18c4cd870>]]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
name[self].list_analysis_plugins assign[=] compare[name[self]._analysis_plugins equal[==] constant[list]]
name[self].list_language_identifiers assign[=] compare[name[self]._preferred_language equal[==] constant[list]]
name[self].list_profilers assign[=] compare[name[self]._profilers equal[==] constant[list]]
if <ast.BoolOp object at 0x7da18c4cc550> begin[:]
return[None]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
name[self].list_output_modules assign[=] compare[name[self]._output_format equal[==] constant[list]]
if name[self].list_output_modules begin[:]
return[None]
call[name[self]._ParseInformationalOptions, parameter[name[options]]]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
call[name[self]._ParseLogFileOptions, parameter[name[options]]]
call[name[self]._ParseProcessingOptions, parameter[name[options]]]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
name[self]._deduplicate_events assign[=] call[name[getattr], parameter[name[options], constant[dedup], constant[True]]]
if name[self]._data_location begin[:]
name[options].data_location assign[=] name[self]._data_location
name[self]._command_line_arguments assign[=] call[name[self].GetCommandLineArguments, parameter[]]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
if <ast.UnaryOp object at 0x7da18fe91810> begin[:]
<ast.Raise object at 0x7da18fe92f80>
if <ast.UnaryOp object at 0x7da18fe92bf0> begin[:]
<ast.Raise object at 0x7da18fe910c0>
call[name[self]._EnforceProcessMemoryLimit, parameter[name[self]._process_memory_limit]]
name[self]._analysis_plugins assign[=] call[name[self]._CreateAnalysisPlugins, parameter[name[options]]]
name[self]._output_module assign[=] call[name[self]._CreateOutputModule, parameter[name[options]]]
|
keyword[def] identifier[ParseOptions] ( identifier[self] , identifier[options] ):
literal[string]
identifier[self] . identifier[_ParseTimezoneOption] ( identifier[options] )
identifier[names] =[ literal[string] , literal[string] , literal[string] ]
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] = identifier[names] )
identifier[self] . identifier[list_analysis_plugins] = identifier[self] . identifier[_analysis_plugins] == literal[string]
identifier[self] . identifier[list_language_identifiers] = identifier[self] . identifier[_preferred_language] == literal[string]
identifier[self] . identifier[list_profilers] = identifier[self] . identifier[_profilers] == literal[string]
keyword[if] ( identifier[self] . identifier[list_analysis_plugins] keyword[or] identifier[self] . identifier[list_language_identifiers] keyword[or]
identifier[self] . identifier[list_profilers] keyword[or] identifier[self] . identifier[list_timezones] ):
keyword[return]
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] =[ literal[string] ])
identifier[self] . identifier[list_output_modules] = identifier[self] . identifier[_output_format] == literal[string]
keyword[if] identifier[self] . identifier[list_output_modules] :
keyword[return]
identifier[self] . identifier[_ParseInformationalOptions] ( identifier[options] )
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] =[ literal[string] ])
identifier[self] . identifier[_ParseLogFileOptions] ( identifier[options] )
identifier[self] . identifier[_ParseProcessingOptions] ( identifier[options] )
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] =[ literal[string] ])
identifier[self] . identifier[_deduplicate_events] = identifier[getattr] ( identifier[options] , literal[string] , keyword[True] )
keyword[if] identifier[self] . identifier[_data_location] :
identifier[options] . identifier[data_location] = identifier[self] . identifier[_data_location]
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[self] . identifier[_command_line_arguments] = identifier[self] . identifier[GetCommandLineArguments] ()
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] =[ literal[string] ])
keyword[if] keyword[not] identifier[self] . identifier[_storage_file_path] :
keyword[raise] identifier[errors] . identifier[BadConfigOption] ( literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[_storage_file_path] ):
keyword[raise] identifier[errors] . identifier[BadConfigOption] (
literal[string] . identifier[format] ( identifier[self] . identifier[_storage_file_path] ))
identifier[self] . identifier[_EnforceProcessMemoryLimit] ( identifier[self] . identifier[_process_memory_limit] )
identifier[self] . identifier[_analysis_plugins] = identifier[self] . identifier[_CreateAnalysisPlugins] ( identifier[options] )
identifier[self] . identifier[_output_module] = identifier[self] . identifier[_CreateOutputModule] ( identifier[options] )
|
def ParseOptions(self, options):
"""Parses the options.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
"""
# The output modules options are dependent on the preferred language
# and preferred time zone options.
self._ParseTimezoneOption(options)
names = ['analysis_plugins', 'language', 'profiling']
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=names)
self.list_analysis_plugins = self._analysis_plugins == 'list'
self.list_language_identifiers = self._preferred_language == 'list'
self.list_profilers = self._profilers == 'list'
if self.list_analysis_plugins or self.list_language_identifiers or self.list_profilers or self.list_timezones:
return # depends on [control=['if'], data=[]]
# Check output modules after the other listable options, otherwise
# it could raise with "requires an output file".
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=['output_modules'])
self.list_output_modules = self._output_format == 'list'
if self.list_output_modules:
return # depends on [control=['if'], data=[]]
self._ParseInformationalOptions(options)
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=['data_location'])
self._ParseLogFileOptions(options)
self._ParseProcessingOptions(options)
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=['event_filters'])
self._deduplicate_events = getattr(options, 'dedup', True)
if self._data_location:
# Update the data location with the calculated value.
options.data_location = self._data_location # depends on [control=['if'], data=[]]
else:
logger.warning('Unable to automatically determine data location.')
self._command_line_arguments = self.GetCommandLineArguments()
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=['storage_file'])
# TODO: move check into _CheckStorageFile.
if not self._storage_file_path:
raise errors.BadConfigOption('Missing storage file option.') # depends on [control=['if'], data=[]]
if not os.path.isfile(self._storage_file_path):
raise errors.BadConfigOption('No such storage file: {0:s}.'.format(self._storage_file_path)) # depends on [control=['if'], data=[]]
self._EnforceProcessMemoryLimit(self._process_memory_limit)
self._analysis_plugins = self._CreateAnalysisPlugins(options)
self._output_module = self._CreateOutputModule(options)
|
def remove(self, id):
""" Remove pool.
"""
p = Pool.get(int(id))
p.remove()
redirect(url(controller = 'pool', action = 'list'))
|
def function[remove, parameter[self, id]]:
constant[ Remove pool.
]
variable[p] assign[=] call[name[Pool].get, parameter[call[name[int], parameter[name[id]]]]]
call[name[p].remove, parameter[]]
call[name[redirect], parameter[call[name[url], parameter[]]]]
|
keyword[def] identifier[remove] ( identifier[self] , identifier[id] ):
literal[string]
identifier[p] = identifier[Pool] . identifier[get] ( identifier[int] ( identifier[id] ))
identifier[p] . identifier[remove] ()
identifier[redirect] ( identifier[url] ( identifier[controller] = literal[string] , identifier[action] = literal[string] ))
|
def remove(self, id):
""" Remove pool.
"""
p = Pool.get(int(id))
p.remove()
redirect(url(controller='pool', action='list'))
|
def kernel_gaussian(size=100, sigma=None, forwardOnly=False):
"""
return a 1d gassuan array of a given size and sigma.
If sigma isn't given, it will be 1/10 of the size, which is usually good.
"""
if sigma is None:sigma=size/10
points=np.exp(-np.power(np.arange(size)-size/2,2)/(2*np.power(sigma,2)))
if forwardOnly:
points[:int(len(points)/2)]=0
return points/sum(points)
|
def function[kernel_gaussian, parameter[size, sigma, forwardOnly]]:
constant[
return a 1d gassuan array of a given size and sigma.
If sigma isn't given, it will be 1/10 of the size, which is usually good.
]
if compare[name[sigma] is constant[None]] begin[:]
variable[sigma] assign[=] binary_operation[name[size] / constant[10]]
variable[points] assign[=] call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1afea5d20> / binary_operation[constant[2] * call[name[np].power, parameter[name[sigma], constant[2]]]]]]]
if name[forwardOnly] begin[:]
call[name[points]][<ast.Slice object at 0x7da1afea5ab0>] assign[=] constant[0]
return[binary_operation[name[points] / call[name[sum], parameter[name[points]]]]]
|
keyword[def] identifier[kernel_gaussian] ( identifier[size] = literal[int] , identifier[sigma] = keyword[None] , identifier[forwardOnly] = keyword[False] ):
literal[string]
keyword[if] identifier[sigma] keyword[is] keyword[None] : identifier[sigma] = identifier[size] / literal[int]
identifier[points] = identifier[np] . identifier[exp] (- identifier[np] . identifier[power] ( identifier[np] . identifier[arange] ( identifier[size] )- identifier[size] / literal[int] , literal[int] )/( literal[int] * identifier[np] . identifier[power] ( identifier[sigma] , literal[int] )))
keyword[if] identifier[forwardOnly] :
identifier[points] [: identifier[int] ( identifier[len] ( identifier[points] )/ literal[int] )]= literal[int]
keyword[return] identifier[points] / identifier[sum] ( identifier[points] )
|
def kernel_gaussian(size=100, sigma=None, forwardOnly=False):
"""
return a 1d gassuan array of a given size and sigma.
If sigma isn't given, it will be 1/10 of the size, which is usually good.
"""
if sigma is None:
sigma = size / 10 # depends on [control=['if'], data=['sigma']]
points = np.exp(-np.power(np.arange(size) - size / 2, 2) / (2 * np.power(sigma, 2)))
if forwardOnly:
points[:int(len(points) / 2)] = 0 # depends on [control=['if'], data=[]]
return points / sum(points)
|
def main():
"""Main method."""
args = parse_cmd_arguments()
html_file = args.file
try:
json.loads(args.add_tags or '{}')
json.loads(args.exc_tags or '{}')
except ValueError:
print('\033[91m' + 'Invalid json string: please provide a valid json '
'string e.g {}'.format('\'{"img": "data-url"}\'') + '\033[0m')
sys.exit(1)
staticfied = staticfy(html_file, args=args).encode('utf-8')
file_ops(staticfied, args=args)
|
def function[main, parameter[]]:
constant[Main method.]
variable[args] assign[=] call[name[parse_cmd_arguments], parameter[]]
variable[html_file] assign[=] name[args].file
<ast.Try object at 0x7da18dc04040>
variable[staticfied] assign[=] call[call[name[staticfy], parameter[name[html_file]]].encode, parameter[constant[utf-8]]]
call[name[file_ops], parameter[name[staticfied]]]
|
keyword[def] identifier[main] ():
literal[string]
identifier[args] = identifier[parse_cmd_arguments] ()
identifier[html_file] = identifier[args] . identifier[file]
keyword[try] :
identifier[json] . identifier[loads] ( identifier[args] . identifier[add_tags] keyword[or] literal[string] )
identifier[json] . identifier[loads] ( identifier[args] . identifier[exc_tags] keyword[or] literal[string] )
keyword[except] identifier[ValueError] :
identifier[print] ( literal[string] + literal[string]
literal[string] . identifier[format] ( literal[string] )+ literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[staticfied] = identifier[staticfy] ( identifier[html_file] , identifier[args] = identifier[args] ). identifier[encode] ( literal[string] )
identifier[file_ops] ( identifier[staticfied] , identifier[args] = identifier[args] )
|
def main():
"""Main method."""
args = parse_cmd_arguments()
html_file = args.file
try:
json.loads(args.add_tags or '{}')
json.loads(args.exc_tags or '{}') # depends on [control=['try'], data=[]]
except ValueError:
print('\x1b[91m' + 'Invalid json string: please provide a valid json string e.g {}'.format('\'{"img": "data-url"}\'') + '\x1b[0m')
sys.exit(1) # depends on [control=['except'], data=[]]
staticfied = staticfy(html_file, args=args).encode('utf-8')
file_ops(staticfied, args=args)
|
def start(self) -> None:
"""
Start monitoring the base unit.
"""
self._shutdown = False
# Start listening (if server) / Open connection (if client)
if isinstance(self._protocol, Server):
self.create_task(self._async_listen)
elif isinstance(self._protocol, Client):
self.create_task(self._async_open)
else:
raise NotImplementedError
|
def function[start, parameter[self]]:
constant[
Start monitoring the base unit.
]
name[self]._shutdown assign[=] constant[False]
if call[name[isinstance], parameter[name[self]._protocol, name[Server]]] begin[:]
call[name[self].create_task, parameter[name[self]._async_listen]]
|
keyword[def] identifier[start] ( identifier[self] )-> keyword[None] :
literal[string]
identifier[self] . identifier[_shutdown] = keyword[False]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_protocol] , identifier[Server] ):
identifier[self] . identifier[create_task] ( identifier[self] . identifier[_async_listen] )
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[_protocol] , identifier[Client] ):
identifier[self] . identifier[create_task] ( identifier[self] . identifier[_async_open] )
keyword[else] :
keyword[raise] identifier[NotImplementedError]
|
def start(self) -> None:
"""
Start monitoring the base unit.
"""
self._shutdown = False
# Start listening (if server) / Open connection (if client)
if isinstance(self._protocol, Server):
self.create_task(self._async_listen) # depends on [control=['if'], data=[]]
elif isinstance(self._protocol, Client):
self.create_task(self._async_open) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError
|
def _restore_auto_increment(self, table):
"""restore the auto increment value for the table to what it was previously"""
query, seq_table, seq_column, seq_name = self._get_auto_increment_info(table)
if query:
queries = [query, "select nextval('{}')".format(seq_name)]
return self._run_queries(queries)
|
def function[_restore_auto_increment, parameter[self, table]]:
constant[restore the auto increment value for the table to what it was previously]
<ast.Tuple object at 0x7da1b2839bd0> assign[=] call[name[self]._get_auto_increment_info, parameter[name[table]]]
if name[query] begin[:]
variable[queries] assign[=] list[[<ast.Name object at 0x7da1b283b730>, <ast.Call object at 0x7da1b283bfa0>]]
return[call[name[self]._run_queries, parameter[name[queries]]]]
|
keyword[def] identifier[_restore_auto_increment] ( identifier[self] , identifier[table] ):
literal[string]
identifier[query] , identifier[seq_table] , identifier[seq_column] , identifier[seq_name] = identifier[self] . identifier[_get_auto_increment_info] ( identifier[table] )
keyword[if] identifier[query] :
identifier[queries] =[ identifier[query] , literal[string] . identifier[format] ( identifier[seq_name] )]
keyword[return] identifier[self] . identifier[_run_queries] ( identifier[queries] )
|
def _restore_auto_increment(self, table):
"""restore the auto increment value for the table to what it was previously"""
(query, seq_table, seq_column, seq_name) = self._get_auto_increment_info(table)
if query:
queries = [query, "select nextval('{}')".format(seq_name)]
return self._run_queries(queries) # depends on [control=['if'], data=[]]
|
def parse(self, text):
"""Parse the given text, return a list of Keywords."""
token_stream = self.lexer.tokenize(text)
return self.expr(token_stream, next(token_stream))
|
def function[parse, parameter[self, text]]:
constant[Parse the given text, return a list of Keywords.]
variable[token_stream] assign[=] call[name[self].lexer.tokenize, parameter[name[text]]]
return[call[name[self].expr, parameter[name[token_stream], call[name[next], parameter[name[token_stream]]]]]]
|
keyword[def] identifier[parse] ( identifier[self] , identifier[text] ):
literal[string]
identifier[token_stream] = identifier[self] . identifier[lexer] . identifier[tokenize] ( identifier[text] )
keyword[return] identifier[self] . identifier[expr] ( identifier[token_stream] , identifier[next] ( identifier[token_stream] ))
|
def parse(self, text):
"""Parse the given text, return a list of Keywords."""
token_stream = self.lexer.tokenize(text)
return self.expr(token_stream, next(token_stream))
|
def certificate(self):
"""
Retrieves the certificate used to sign the bounce message.
TODO: Cache the certificate based on the cert URL so we don't have to
retrieve it for each bounce message. *We would need to do it in a
secure way so that the cert couldn't be overwritten in the cache*
"""
if not hasattr(self, '_certificate'):
cert_url = self._get_cert_url()
# Only load certificates from a certain domain?
# Without some kind of trusted domain check, any old joe could
# craft a bounce message and sign it using his own certificate
# and we would happily load and verify it.
if not cert_url:
self._certificate = None
return self._certificate
try:
import requests
except ImportError:
raise ImproperlyConfigured("requests is required for bounce message verification.")
try:
import M2Crypto
except ImportError:
raise ImproperlyConfigured("M2Crypto is required for bounce message verification.")
# We use requests because it verifies the https certificate
# when retrieving the signing certificate. If https was somehow
# hijacked then all bets are off.
response = requests.get(cert_url)
if response.status_code != 200:
logger.warning(u'Could not download certificate from %s: "%s"', cert_url, response.status_code)
self._certificate = None
return self._certificate
# Handle errors loading the certificate.
# If the certificate is invalid then return
# false as we couldn't verify the message.
try:
self._certificate = M2Crypto.X509.load_cert_string(response.content)
except M2Crypto.X509.X509Error as e:
logger.warning(u'Could not load certificate from %s: "%s"', cert_url, e)
self._certificate = None
return self._certificate
|
def function[certificate, parameter[self]]:
constant[
Retrieves the certificate used to sign the bounce message.
TODO: Cache the certificate based on the cert URL so we don't have to
retrieve it for each bounce message. *We would need to do it in a
secure way so that the cert couldn't be overwritten in the cache*
]
if <ast.UnaryOp object at 0x7da18eb568c0> begin[:]
variable[cert_url] assign[=] call[name[self]._get_cert_url, parameter[]]
if <ast.UnaryOp object at 0x7da18eb571c0> begin[:]
name[self]._certificate assign[=] constant[None]
return[name[self]._certificate]
<ast.Try object at 0x7da18eb55c30>
<ast.Try object at 0x7da18eb541f0>
variable[response] assign[=] call[name[requests].get, parameter[name[cert_url]]]
if compare[name[response].status_code not_equal[!=] constant[200]] begin[:]
call[name[logger].warning, parameter[constant[Could not download certificate from %s: "%s"], name[cert_url], name[response].status_code]]
name[self]._certificate assign[=] constant[None]
return[name[self]._certificate]
<ast.Try object at 0x7da18eb57b80>
return[name[self]._certificate]
|
keyword[def] identifier[certificate] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[cert_url] = identifier[self] . identifier[_get_cert_url] ()
keyword[if] keyword[not] identifier[cert_url] :
identifier[self] . identifier[_certificate] = keyword[None]
keyword[return] identifier[self] . identifier[_certificate]
keyword[try] :
keyword[import] identifier[requests]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImproperlyConfigured] ( literal[string] )
keyword[try] :
keyword[import] identifier[M2Crypto]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImproperlyConfigured] ( literal[string] )
identifier[response] = identifier[requests] . identifier[get] ( identifier[cert_url] )
keyword[if] identifier[response] . identifier[status_code] != literal[int] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[cert_url] , identifier[response] . identifier[status_code] )
identifier[self] . identifier[_certificate] = keyword[None]
keyword[return] identifier[self] . identifier[_certificate]
keyword[try] :
identifier[self] . identifier[_certificate] = identifier[M2Crypto] . identifier[X509] . identifier[load_cert_string] ( identifier[response] . identifier[content] )
keyword[except] identifier[M2Crypto] . identifier[X509] . identifier[X509Error] keyword[as] identifier[e] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[cert_url] , identifier[e] )
identifier[self] . identifier[_certificate] = keyword[None]
keyword[return] identifier[self] . identifier[_certificate]
|
def certificate(self):
"""
Retrieves the certificate used to sign the bounce message.
TODO: Cache the certificate based on the cert URL so we don't have to
retrieve it for each bounce message. *We would need to do it in a
secure way so that the cert couldn't be overwritten in the cache*
"""
if not hasattr(self, '_certificate'):
cert_url = self._get_cert_url()
# Only load certificates from a certain domain?
# Without some kind of trusted domain check, any old joe could
# craft a bounce message and sign it using his own certificate
# and we would happily load and verify it.
if not cert_url:
self._certificate = None
return self._certificate # depends on [control=['if'], data=[]]
try:
import requests # depends on [control=['try'], data=[]]
except ImportError:
raise ImproperlyConfigured('requests is required for bounce message verification.') # depends on [control=['except'], data=[]]
try:
import M2Crypto # depends on [control=['try'], data=[]]
except ImportError:
raise ImproperlyConfigured('M2Crypto is required for bounce message verification.') # depends on [control=['except'], data=[]]
# We use requests because it verifies the https certificate
# when retrieving the signing certificate. If https was somehow
# hijacked then all bets are off.
response = requests.get(cert_url)
if response.status_code != 200:
logger.warning(u'Could not download certificate from %s: "%s"', cert_url, response.status_code)
self._certificate = None
return self._certificate # depends on [control=['if'], data=[]]
# Handle errors loading the certificate.
# If the certificate is invalid then return
# false as we couldn't verify the message.
try:
self._certificate = M2Crypto.X509.load_cert_string(response.content) # depends on [control=['try'], data=[]]
except M2Crypto.X509.X509Error as e:
logger.warning(u'Could not load certificate from %s: "%s"', cert_url, e)
self._certificate = None # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
return self._certificate
|
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
is expected to be an RSA key in PEM format.
Returns:
Verifier instance.
Raises:
OpenSSL.crypto.Error: if the key_pem can't be parsed.
"""
key_pem = _helpers._to_bytes(key_pem)
if is_x509_cert:
pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
else:
pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
return OpenSSLVerifier(pubkey)
|
def function[from_string, parameter[key_pem, is_x509_cert]]:
constant[Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
is expected to be an RSA key in PEM format.
Returns:
Verifier instance.
Raises:
OpenSSL.crypto.Error: if the key_pem can't be parsed.
]
variable[key_pem] assign[=] call[name[_helpers]._to_bytes, parameter[name[key_pem]]]
if name[is_x509_cert] begin[:]
variable[pubkey] assign[=] call[name[crypto].load_certificate, parameter[name[crypto].FILETYPE_PEM, name[key_pem]]]
return[call[name[OpenSSLVerifier], parameter[name[pubkey]]]]
|
keyword[def] identifier[from_string] ( identifier[key_pem] , identifier[is_x509_cert] ):
literal[string]
identifier[key_pem] = identifier[_helpers] . identifier[_to_bytes] ( identifier[key_pem] )
keyword[if] identifier[is_x509_cert] :
identifier[pubkey] = identifier[crypto] . identifier[load_certificate] ( identifier[crypto] . identifier[FILETYPE_PEM] , identifier[key_pem] )
keyword[else] :
identifier[pubkey] = identifier[crypto] . identifier[load_privatekey] ( identifier[crypto] . identifier[FILETYPE_PEM] , identifier[key_pem] )
keyword[return] identifier[OpenSSLVerifier] ( identifier[pubkey] )
|
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
is expected to be an RSA key in PEM format.
Returns:
Verifier instance.
Raises:
OpenSSL.crypto.Error: if the key_pem can't be parsed.
"""
key_pem = _helpers._to_bytes(key_pem)
if is_x509_cert:
pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem) # depends on [control=['if'], data=[]]
else:
pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
return OpenSSLVerifier(pubkey)
|
def before_run(self):
"""Initialize the scheduling process"""
# Actions and checks counters
self.nb_checks = 0
self.nb_internal_checks = 0
self.nb_checks_launched = 0
self.nb_actions_launched = 0
self.nb_checks_results = 0
self.nb_checks_results_timeout = 0
self.nb_checks_results_passive = 0
self.nb_checks_results_active = 0
self.nb_actions_results = 0
self.nb_actions_results_timeout = 0
self.nb_actions_results_passive = 0
self.nb_broks_dropped = 0
self.nb_checks_dropped = 0
self.nb_actions_dropped = 0
# Broks, notifications, ... counters
self.nb_broks = 0
self.nb_notifications = 0
self.nb_event_handlers = 0
self.nb_external_commands = 0
self.ticks = 0
|
def function[before_run, parameter[self]]:
constant[Initialize the scheduling process]
name[self].nb_checks assign[=] constant[0]
name[self].nb_internal_checks assign[=] constant[0]
name[self].nb_checks_launched assign[=] constant[0]
name[self].nb_actions_launched assign[=] constant[0]
name[self].nb_checks_results assign[=] constant[0]
name[self].nb_checks_results_timeout assign[=] constant[0]
name[self].nb_checks_results_passive assign[=] constant[0]
name[self].nb_checks_results_active assign[=] constant[0]
name[self].nb_actions_results assign[=] constant[0]
name[self].nb_actions_results_timeout assign[=] constant[0]
name[self].nb_actions_results_passive assign[=] constant[0]
name[self].nb_broks_dropped assign[=] constant[0]
name[self].nb_checks_dropped assign[=] constant[0]
name[self].nb_actions_dropped assign[=] constant[0]
name[self].nb_broks assign[=] constant[0]
name[self].nb_notifications assign[=] constant[0]
name[self].nb_event_handlers assign[=] constant[0]
name[self].nb_external_commands assign[=] constant[0]
name[self].ticks assign[=] constant[0]
|
keyword[def] identifier[before_run] ( identifier[self] ):
literal[string]
identifier[self] . identifier[nb_checks] = literal[int]
identifier[self] . identifier[nb_internal_checks] = literal[int]
identifier[self] . identifier[nb_checks_launched] = literal[int]
identifier[self] . identifier[nb_actions_launched] = literal[int]
identifier[self] . identifier[nb_checks_results] = literal[int]
identifier[self] . identifier[nb_checks_results_timeout] = literal[int]
identifier[self] . identifier[nb_checks_results_passive] = literal[int]
identifier[self] . identifier[nb_checks_results_active] = literal[int]
identifier[self] . identifier[nb_actions_results] = literal[int]
identifier[self] . identifier[nb_actions_results_timeout] = literal[int]
identifier[self] . identifier[nb_actions_results_passive] = literal[int]
identifier[self] . identifier[nb_broks_dropped] = literal[int]
identifier[self] . identifier[nb_checks_dropped] = literal[int]
identifier[self] . identifier[nb_actions_dropped] = literal[int]
identifier[self] . identifier[nb_broks] = literal[int]
identifier[self] . identifier[nb_notifications] = literal[int]
identifier[self] . identifier[nb_event_handlers] = literal[int]
identifier[self] . identifier[nb_external_commands] = literal[int]
identifier[self] . identifier[ticks] = literal[int]
|
def before_run(self):
"""Initialize the scheduling process"""
# Actions and checks counters
self.nb_checks = 0
self.nb_internal_checks = 0
self.nb_checks_launched = 0
self.nb_actions_launched = 0
self.nb_checks_results = 0
self.nb_checks_results_timeout = 0
self.nb_checks_results_passive = 0
self.nb_checks_results_active = 0
self.nb_actions_results = 0
self.nb_actions_results_timeout = 0
self.nb_actions_results_passive = 0
self.nb_broks_dropped = 0
self.nb_checks_dropped = 0
self.nb_actions_dropped = 0
# Broks, notifications, ... counters
self.nb_broks = 0
self.nb_notifications = 0
self.nb_event_handlers = 0
self.nb_external_commands = 0
self.ticks = 0
|
def _get_linear_lookup_table_and_weight(nbits, wp):
"""
Generate a linear lookup table.
:param nbits: int
Number of bits to represent a quantized weight value
:param wp: numpy.array
Weight blob to be quantized
Returns
-------
lookup_table: numpy.array
Lookup table of shape (2^nbits, )
qw: numpy.array
Decomposed bit stream as a list of 0/1s of length (len(arr) * 8)
"""
w = wp.reshape(1, -1)
qw, scales, biases = _quantize_channelwise_linear(w, nbits, axis=0)
indices = _np.array(range(0, 2**nbits))
lookup_table = indices * scales[0] + biases[0]
return lookup_table, qw
|
def function[_get_linear_lookup_table_and_weight, parameter[nbits, wp]]:
constant[
Generate a linear lookup table.
:param nbits: int
Number of bits to represent a quantized weight value
:param wp: numpy.array
Weight blob to be quantized
Returns
-------
lookup_table: numpy.array
Lookup table of shape (2^nbits, )
qw: numpy.array
Decomposed bit stream as a list of 0/1s of length (len(arr) * 8)
]
variable[w] assign[=] call[name[wp].reshape, parameter[constant[1], <ast.UnaryOp object at 0x7da1b20ec3d0>]]
<ast.Tuple object at 0x7da1b20ee0e0> assign[=] call[name[_quantize_channelwise_linear], parameter[name[w], name[nbits]]]
variable[indices] assign[=] call[name[_np].array, parameter[call[name[range], parameter[constant[0], binary_operation[constant[2] ** name[nbits]]]]]]
variable[lookup_table] assign[=] binary_operation[binary_operation[name[indices] * call[name[scales]][constant[0]]] + call[name[biases]][constant[0]]]
return[tuple[[<ast.Name object at 0x7da1b208c8e0>, <ast.Name object at 0x7da1b208c430>]]]
|
keyword[def] identifier[_get_linear_lookup_table_and_weight] ( identifier[nbits] , identifier[wp] ):
literal[string]
identifier[w] = identifier[wp] . identifier[reshape] ( literal[int] ,- literal[int] )
identifier[qw] , identifier[scales] , identifier[biases] = identifier[_quantize_channelwise_linear] ( identifier[w] , identifier[nbits] , identifier[axis] = literal[int] )
identifier[indices] = identifier[_np] . identifier[array] ( identifier[range] ( literal[int] , literal[int] ** identifier[nbits] ))
identifier[lookup_table] = identifier[indices] * identifier[scales] [ literal[int] ]+ identifier[biases] [ literal[int] ]
keyword[return] identifier[lookup_table] , identifier[qw]
|
def _get_linear_lookup_table_and_weight(nbits, wp):
"""
Generate a linear lookup table.
:param nbits: int
Number of bits to represent a quantized weight value
:param wp: numpy.array
Weight blob to be quantized
Returns
-------
lookup_table: numpy.array
Lookup table of shape (2^nbits, )
qw: numpy.array
Decomposed bit stream as a list of 0/1s of length (len(arr) * 8)
"""
w = wp.reshape(1, -1)
(qw, scales, biases) = _quantize_channelwise_linear(w, nbits, axis=0)
indices = _np.array(range(0, 2 ** nbits))
lookup_table = indices * scales[0] + biases[0]
return (lookup_table, qw)
|
def is_union(type_: Type[Any]) -> bool:
'''
Union[A, B]
Union
Optional[A]
'''
if HAS_UNIONSUBCLASS:
# Old python
return _issubclass(type_, Union)
else:
return getattr(type_, '__origin__', None) == Union
|
def function[is_union, parameter[type_]]:
constant[
Union[A, B]
Union
Optional[A]
]
if name[HAS_UNIONSUBCLASS] begin[:]
return[call[name[_issubclass], parameter[name[type_], name[Union]]]]
|
keyword[def] identifier[is_union] ( identifier[type_] : identifier[Type] [ identifier[Any] ])-> identifier[bool] :
literal[string]
keyword[if] identifier[HAS_UNIONSUBCLASS] :
keyword[return] identifier[_issubclass] ( identifier[type_] , identifier[Union] )
keyword[else] :
keyword[return] identifier[getattr] ( identifier[type_] , literal[string] , keyword[None] )== identifier[Union]
|
def is_union(type_: Type[Any]) -> bool:
"""
Union[A, B]
Union
Optional[A]
"""
if HAS_UNIONSUBCLASS:
# Old python
return _issubclass(type_, Union) # depends on [control=['if'], data=[]]
else:
return getattr(type_, '__origin__', None) == Union
|
def main(arguments=None):
"""Main command line entry point."""
if not arguments:
arguments = sys.argv[1:]
wordlist, sowpods, by_length, start, end = argument_parser(arguments)
for word in wordlist:
pretty_print(
word,
anagrams_in_word(word, sowpods, start, end),
by_length,
)
|
def function[main, parameter[arguments]]:
constant[Main command line entry point.]
if <ast.UnaryOp object at 0x7da1b01a6920> begin[:]
variable[arguments] assign[=] call[name[sys].argv][<ast.Slice object at 0x7da1b01a48e0>]
<ast.Tuple object at 0x7da18fe916f0> assign[=] call[name[argument_parser], parameter[name[arguments]]]
for taget[name[word]] in starred[name[wordlist]] begin[:]
call[name[pretty_print], parameter[name[word], call[name[anagrams_in_word], parameter[name[word], name[sowpods], name[start], name[end]]], name[by_length]]]
|
keyword[def] identifier[main] ( identifier[arguments] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[arguments] :
identifier[arguments] = identifier[sys] . identifier[argv] [ literal[int] :]
identifier[wordlist] , identifier[sowpods] , identifier[by_length] , identifier[start] , identifier[end] = identifier[argument_parser] ( identifier[arguments] )
keyword[for] identifier[word] keyword[in] identifier[wordlist] :
identifier[pretty_print] (
identifier[word] ,
identifier[anagrams_in_word] ( identifier[word] , identifier[sowpods] , identifier[start] , identifier[end] ),
identifier[by_length] ,
)
|
def main(arguments=None):
"""Main command line entry point."""
if not arguments:
arguments = sys.argv[1:] # depends on [control=['if'], data=[]]
(wordlist, sowpods, by_length, start, end) = argument_parser(arguments)
for word in wordlist:
pretty_print(word, anagrams_in_word(word, sowpods, start, end), by_length) # depends on [control=['for'], data=['word']]
|
def slots_class_sealer(fields, defaults):
"""
This sealer makes a container class that uses ``__slots__`` (it uses :func:`class_sealer` internally).
The resulting class has a metaclass that forcibly sets ``__slots__`` on subclasses.
"""
class __slots_meta__(type):
def __new__(mcs, name, bases, namespace):
if "__slots__" not in namespace:
namespace["__slots__"] = fields
return type.__new__(mcs, name, bases, namespace)
class __slots_base__(_with_metaclass(__slots_meta__, object)):
__slots__ = ()
def __init__(self, *args, **kwargs):
pass
return class_sealer(fields, defaults, base=__slots_base__)
|
def function[slots_class_sealer, parameter[fields, defaults]]:
constant[
This sealer makes a container class that uses ``__slots__`` (it uses :func:`class_sealer` internally).
The resulting class has a metaclass that forcibly sets ``__slots__`` on subclasses.
]
class class[__slots_meta__, parameter[]] begin[:]
def function[__new__, parameter[mcs, name, bases, namespace]]:
if compare[constant[__slots__] <ast.NotIn object at 0x7da2590d7190> name[namespace]] begin[:]
call[name[namespace]][constant[__slots__]] assign[=] name[fields]
return[call[name[type].__new__, parameter[name[mcs], name[name], name[bases], name[namespace]]]]
class class[__slots_base__, parameter[]] begin[:]
variable[__slots__] assign[=] tuple[[]]
def function[__init__, parameter[self]]:
pass
return[call[name[class_sealer], parameter[name[fields], name[defaults]]]]
|
keyword[def] identifier[slots_class_sealer] ( identifier[fields] , identifier[defaults] ):
literal[string]
keyword[class] identifier[__slots_meta__] ( identifier[type] ):
keyword[def] identifier[__new__] ( identifier[mcs] , identifier[name] , identifier[bases] , identifier[namespace] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[namespace] :
identifier[namespace] [ literal[string] ]= identifier[fields]
keyword[return] identifier[type] . identifier[__new__] ( identifier[mcs] , identifier[name] , identifier[bases] , identifier[namespace] )
keyword[class] identifier[__slots_base__] ( identifier[_with_metaclass] ( identifier[__slots_meta__] , identifier[object] )):
identifier[__slots__] =()
keyword[def] identifier[__init__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
keyword[pass]
keyword[return] identifier[class_sealer] ( identifier[fields] , identifier[defaults] , identifier[base] = identifier[__slots_base__] )
|
def slots_class_sealer(fields, defaults):
"""
This sealer makes a container class that uses ``__slots__`` (it uses :func:`class_sealer` internally).
The resulting class has a metaclass that forcibly sets ``__slots__`` on subclasses.
"""
class __slots_meta__(type):
def __new__(mcs, name, bases, namespace):
if '__slots__' not in namespace:
namespace['__slots__'] = fields # depends on [control=['if'], data=['namespace']]
return type.__new__(mcs, name, bases, namespace)
class __slots_base__(_with_metaclass(__slots_meta__, object)):
__slots__ = ()
def __init__(self, *args, **kwargs):
pass
return class_sealer(fields, defaults, base=__slots_base__)
|
def get_vault_form_for_update(self, vault_id):
"""Gets the vault form for updating an existing vault.
A new vault form should be requested for each update
transaction.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
return: (osid.authorization.VaultForm) - the vault form
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.get_bin_form_for_update_template
if self._catalog_session is not None:
return self._catalog_session.get_catalog_form_for_update(catalog_id=vault_id)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
if not isinstance(vault_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
result = collection.find_one({'_id': ObjectId(vault_id.get_identifier())})
cat_form = objects.VaultForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
self._forms[cat_form.get_id().get_identifier()] = not UPDATED
return cat_form
|
def function[get_vault_form_for_update, parameter[self, vault_id]]:
constant[Gets the vault form for updating an existing vault.
A new vault form should be requested for each update
transaction.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
return: (osid.authorization.VaultForm) - the vault form
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.get_catalog_form_for_update, parameter[]]]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[authorization]]]
if <ast.UnaryOp object at 0x7da2046229e0> begin[:]
<ast.Raise object at 0x7da204620520>
variable[result] assign[=] call[name[collection].find_one, parameter[dictionary[[<ast.Constant object at 0x7da204623e80>], [<ast.Call object at 0x7da204623ca0>]]]]
variable[cat_form] assign[=] call[name[objects].VaultForm, parameter[]]
call[name[self]._forms][call[call[name[cat_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] <ast.UnaryOp object at 0x7da204623700>
return[name[cat_form]]
|
keyword[def] identifier[get_vault_form_for_update] ( identifier[self] , identifier[vault_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[get_catalog_form_for_update] ( identifier[catalog_id] = identifier[vault_id] )
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[vault_id] , identifier[ABCId] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
identifier[result] = identifier[collection] . identifier[find_one] ({ literal[string] : identifier[ObjectId] ( identifier[vault_id] . identifier[get_identifier] ())})
identifier[cat_form] = identifier[objects] . identifier[VaultForm] ( identifier[osid_object_map] = identifier[result] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] )
identifier[self] . identifier[_forms] [ identifier[cat_form] . identifier[get_id] (). identifier[get_identifier] ()]= keyword[not] identifier[UPDATED]
keyword[return] identifier[cat_form]
|
def get_vault_form_for_update(self, vault_id):
"""Gets the vault form for updating an existing vault.
A new vault form should be requested for each update
transaction.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
return: (osid.authorization.VaultForm) - the vault form
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.get_bin_form_for_update_template
if self._catalog_session is not None:
return self._catalog_session.get_catalog_form_for_update(catalog_id=vault_id) # depends on [control=['if'], data=[]]
collection = JSONClientValidated('authorization', collection='Vault', runtime=self._runtime)
if not isinstance(vault_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id') # depends on [control=['if'], data=[]]
result = collection.find_one({'_id': ObjectId(vault_id.get_identifier())})
cat_form = objects.VaultForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
self._forms[cat_form.get_id().get_identifier()] = not UPDATED
return cat_form
|
def main(args=None):
""" The main routine. """
cfg.configureLogger()
wireHandlers(cfg)
# get config from a flask standard place not our config yml
app.run(debug=cfg.runInDebug(), host='0.0.0.0', port=cfg.getPort())
|
def function[main, parameter[args]]:
constant[ The main routine. ]
call[name[cfg].configureLogger, parameter[]]
call[name[wireHandlers], parameter[name[cfg]]]
call[name[app].run, parameter[]]
|
keyword[def] identifier[main] ( identifier[args] = keyword[None] ):
literal[string]
identifier[cfg] . identifier[configureLogger] ()
identifier[wireHandlers] ( identifier[cfg] )
identifier[app] . identifier[run] ( identifier[debug] = identifier[cfg] . identifier[runInDebug] (), identifier[host] = literal[string] , identifier[port] = identifier[cfg] . identifier[getPort] ())
|
def main(args=None):
""" The main routine. """
cfg.configureLogger()
wireHandlers(cfg)
# get config from a flask standard place not our config yml
app.run(debug=cfg.runInDebug(), host='0.0.0.0', port=cfg.getPort())
|
def expand_path_cfg(path_cfg, alias_dict={ }, overriding_kargs={ }):
"""expand a path config
Args:
path_cfg (str, tuple, dict): a config for path
alias_dict (dict): a dict for aliases
overriding_kargs (dict): to be used for recursive call
"""
if isinstance(path_cfg, str):
return _expand_str(path_cfg, alias_dict, overriding_kargs)
if isinstance(path_cfg, dict):
return _expand_dict(path_cfg, alias_dict)
# assume tuple or list
return _expand_tuple(path_cfg, alias_dict, overriding_kargs)
|
def function[expand_path_cfg, parameter[path_cfg, alias_dict, overriding_kargs]]:
constant[expand a path config
Args:
path_cfg (str, tuple, dict): a config for path
alias_dict (dict): a dict for aliases
overriding_kargs (dict): to be used for recursive call
]
if call[name[isinstance], parameter[name[path_cfg], name[str]]] begin[:]
return[call[name[_expand_str], parameter[name[path_cfg], name[alias_dict], name[overriding_kargs]]]]
if call[name[isinstance], parameter[name[path_cfg], name[dict]]] begin[:]
return[call[name[_expand_dict], parameter[name[path_cfg], name[alias_dict]]]]
return[call[name[_expand_tuple], parameter[name[path_cfg], name[alias_dict], name[overriding_kargs]]]]
|
keyword[def] identifier[expand_path_cfg] ( identifier[path_cfg] , identifier[alias_dict] ={}, identifier[overriding_kargs] ={}):
literal[string]
keyword[if] identifier[isinstance] ( identifier[path_cfg] , identifier[str] ):
keyword[return] identifier[_expand_str] ( identifier[path_cfg] , identifier[alias_dict] , identifier[overriding_kargs] )
keyword[if] identifier[isinstance] ( identifier[path_cfg] , identifier[dict] ):
keyword[return] identifier[_expand_dict] ( identifier[path_cfg] , identifier[alias_dict] )
keyword[return] identifier[_expand_tuple] ( identifier[path_cfg] , identifier[alias_dict] , identifier[overriding_kargs] )
|
def expand_path_cfg(path_cfg, alias_dict={}, overriding_kargs={}):
"""expand a path config
Args:
path_cfg (str, tuple, dict): a config for path
alias_dict (dict): a dict for aliases
overriding_kargs (dict): to be used for recursive call
"""
if isinstance(path_cfg, str):
return _expand_str(path_cfg, alias_dict, overriding_kargs) # depends on [control=['if'], data=[]]
if isinstance(path_cfg, dict):
return _expand_dict(path_cfg, alias_dict) # depends on [control=['if'], data=[]]
# assume tuple or list
return _expand_tuple(path_cfg, alias_dict, overriding_kargs)
|
def per_file_type_data(self):
"""
Return download data by file type.
:return: dict of cache data; keys are datetime objects, values are
dict of file type (str) to count (int)
:rtype: dict
"""
ret = {}
for cache_date in self.cache_dates:
data = self._cache_get(cache_date)
if len(data['by_file_type']) == 0:
data['by_file_type'] = {'other': 0}
ret[cache_date] = data['by_file_type']
return ret
|
def function[per_file_type_data, parameter[self]]:
constant[
Return download data by file type.
:return: dict of cache data; keys are datetime objects, values are
dict of file type (str) to count (int)
:rtype: dict
]
variable[ret] assign[=] dictionary[[], []]
for taget[name[cache_date]] in starred[name[self].cache_dates] begin[:]
variable[data] assign[=] call[name[self]._cache_get, parameter[name[cache_date]]]
if compare[call[name[len], parameter[call[name[data]][constant[by_file_type]]]] equal[==] constant[0]] begin[:]
call[name[data]][constant[by_file_type]] assign[=] dictionary[[<ast.Constant object at 0x7da2041d9780>], [<ast.Constant object at 0x7da2041d9b10>]]
call[name[ret]][name[cache_date]] assign[=] call[name[data]][constant[by_file_type]]
return[name[ret]]
|
keyword[def] identifier[per_file_type_data] ( identifier[self] ):
literal[string]
identifier[ret] ={}
keyword[for] identifier[cache_date] keyword[in] identifier[self] . identifier[cache_dates] :
identifier[data] = identifier[self] . identifier[_cache_get] ( identifier[cache_date] )
keyword[if] identifier[len] ( identifier[data] [ literal[string] ])== literal[int] :
identifier[data] [ literal[string] ]={ literal[string] : literal[int] }
identifier[ret] [ identifier[cache_date] ]= identifier[data] [ literal[string] ]
keyword[return] identifier[ret]
|
def per_file_type_data(self):
"""
Return download data by file type.
:return: dict of cache data; keys are datetime objects, values are
dict of file type (str) to count (int)
:rtype: dict
"""
ret = {}
for cache_date in self.cache_dates:
data = self._cache_get(cache_date)
if len(data['by_file_type']) == 0:
data['by_file_type'] = {'other': 0} # depends on [control=['if'], data=[]]
ret[cache_date] = data['by_file_type'] # depends on [control=['for'], data=['cache_date']]
return ret
|
def process_task(self):
"""This function is called when the client has failed to send all of the
segments of a segmented request, the application has taken too long to
complete the request, or the client failed to ack the segments of a
segmented response."""
if _debug: ServerSSM._debug("process_task")
if self.state == SEGMENTED_REQUEST:
self.segmented_request_timeout()
elif self.state == AWAIT_RESPONSE:
self.await_response_timeout()
elif self.state == SEGMENTED_RESPONSE:
self.segmented_response_timeout()
elif self.state == COMPLETED:
pass
elif self.state == ABORTED:
pass
else:
if _debug: ServerSSM._debug("invalid state")
raise RuntimeError("invalid state")
|
def function[process_task, parameter[self]]:
constant[This function is called when the client has failed to send all of the
segments of a segmented request, the application has taken too long to
complete the request, or the client failed to ack the segments of a
segmented response.]
if name[_debug] begin[:]
call[name[ServerSSM]._debug, parameter[constant[process_task]]]
if compare[name[self].state equal[==] name[SEGMENTED_REQUEST]] begin[:]
call[name[self].segmented_request_timeout, parameter[]]
|
keyword[def] identifier[process_task] ( identifier[self] ):
literal[string]
keyword[if] identifier[_debug] : identifier[ServerSSM] . identifier[_debug] ( literal[string] )
keyword[if] identifier[self] . identifier[state] == identifier[SEGMENTED_REQUEST] :
identifier[self] . identifier[segmented_request_timeout] ()
keyword[elif] identifier[self] . identifier[state] == identifier[AWAIT_RESPONSE] :
identifier[self] . identifier[await_response_timeout] ()
keyword[elif] identifier[self] . identifier[state] == identifier[SEGMENTED_RESPONSE] :
identifier[self] . identifier[segmented_response_timeout] ()
keyword[elif] identifier[self] . identifier[state] == identifier[COMPLETED] :
keyword[pass]
keyword[elif] identifier[self] . identifier[state] == identifier[ABORTED] :
keyword[pass]
keyword[else] :
keyword[if] identifier[_debug] : identifier[ServerSSM] . identifier[_debug] ( literal[string] )
keyword[raise] identifier[RuntimeError] ( literal[string] )
|
def process_task(self):
"""This function is called when the client has failed to send all of the
segments of a segmented request, the application has taken too long to
complete the request, or the client failed to ack the segments of a
segmented response."""
if _debug:
ServerSSM._debug('process_task') # depends on [control=['if'], data=[]]
if self.state == SEGMENTED_REQUEST:
self.segmented_request_timeout() # depends on [control=['if'], data=[]]
elif self.state == AWAIT_RESPONSE:
self.await_response_timeout() # depends on [control=['if'], data=[]]
elif self.state == SEGMENTED_RESPONSE:
self.segmented_response_timeout() # depends on [control=['if'], data=[]]
elif self.state == COMPLETED:
pass # depends on [control=['if'], data=[]]
elif self.state == ABORTED:
pass # depends on [control=['if'], data=[]]
else:
if _debug:
ServerSSM._debug('invalid state') # depends on [control=['if'], data=[]]
raise RuntimeError('invalid state')
|
def _fake_openenumerateinstancepaths(self, namespace, **params):
# pylint: disable=invalid-name
"""
Implements WBEM server responder for
:meth:`~pywbem.WBEMConnection.OpenEnumerationInstancePaths`
with data from the instance repository.
"""
self._validate_namespace(namespace)
self._validate_open_params(**params)
result_t = self._fake_enumerateinstancenames(namespace, **params)
return self._open_response(result_t[0][2], namespace,
'PullInstancePaths', **params)
|
def function[_fake_openenumerateinstancepaths, parameter[self, namespace]]:
constant[
Implements WBEM server responder for
:meth:`~pywbem.WBEMConnection.OpenEnumerationInstancePaths`
with data from the instance repository.
]
call[name[self]._validate_namespace, parameter[name[namespace]]]
call[name[self]._validate_open_params, parameter[]]
variable[result_t] assign[=] call[name[self]._fake_enumerateinstancenames, parameter[name[namespace]]]
return[call[name[self]._open_response, parameter[call[call[name[result_t]][constant[0]]][constant[2]], name[namespace], constant[PullInstancePaths]]]]
|
keyword[def] identifier[_fake_openenumerateinstancepaths] ( identifier[self] , identifier[namespace] ,** identifier[params] ):
literal[string]
identifier[self] . identifier[_validate_namespace] ( identifier[namespace] )
identifier[self] . identifier[_validate_open_params] (** identifier[params] )
identifier[result_t] = identifier[self] . identifier[_fake_enumerateinstancenames] ( identifier[namespace] ,** identifier[params] )
keyword[return] identifier[self] . identifier[_open_response] ( identifier[result_t] [ literal[int] ][ literal[int] ], identifier[namespace] ,
literal[string] ,** identifier[params] )
|
def _fake_openenumerateinstancepaths(self, namespace, **params):
# pylint: disable=invalid-name
'\n Implements WBEM server responder for\n :meth:`~pywbem.WBEMConnection.OpenEnumerationInstancePaths`\n with data from the instance repository.\n '
self._validate_namespace(namespace)
self._validate_open_params(**params)
result_t = self._fake_enumerateinstancenames(namespace, **params)
return self._open_response(result_t[0][2], namespace, 'PullInstancePaths', **params)
|
def set_fc_volume(self, port_id,
target_wwn, target_lun=0, boot_prio=1,
initiator_wwnn=None, initiator_wwpn=None):
"""Set FibreChannel volume information to configuration.
:param port_id: Physical port ID.
:param target_wwn: WWN of target.
:param target_lun: LUN number of target.
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
:param initiator_wwnn: Virtual WWNN for initiator if necessary.
:param initiator_wwpn: Virtual WWPN for initiator if necessary.
"""
port_handler = _parse_physical_port_id(port_id)
fc_target = elcm.FCTarget(target_wwn, target_lun)
fc_boot = elcm.FCBoot(boot_prio=boot_prio, boot_enable=True)
fc_boot.add_target(fc_target)
port = self._find_port(port_handler)
if port:
port_handler.set_fc_port(port, fc_boot,
wwnn=initiator_wwnn, wwpn=initiator_wwpn)
else:
port = port_handler.create_fc_port(fc_boot,
wwnn=initiator_wwnn,
wwpn=initiator_wwpn)
self._add_port(port_handler, port)
|
def function[set_fc_volume, parameter[self, port_id, target_wwn, target_lun, boot_prio, initiator_wwnn, initiator_wwpn]]:
constant[Set FibreChannel volume information to configuration.
:param port_id: Physical port ID.
:param target_wwn: WWN of target.
:param target_lun: LUN number of target.
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
:param initiator_wwnn: Virtual WWNN for initiator if necessary.
:param initiator_wwpn: Virtual WWPN for initiator if necessary.
]
variable[port_handler] assign[=] call[name[_parse_physical_port_id], parameter[name[port_id]]]
variable[fc_target] assign[=] call[name[elcm].FCTarget, parameter[name[target_wwn], name[target_lun]]]
variable[fc_boot] assign[=] call[name[elcm].FCBoot, parameter[]]
call[name[fc_boot].add_target, parameter[name[fc_target]]]
variable[port] assign[=] call[name[self]._find_port, parameter[name[port_handler]]]
if name[port] begin[:]
call[name[port_handler].set_fc_port, parameter[name[port], name[fc_boot]]]
|
keyword[def] identifier[set_fc_volume] ( identifier[self] , identifier[port_id] ,
identifier[target_wwn] , identifier[target_lun] = literal[int] , identifier[boot_prio] = literal[int] ,
identifier[initiator_wwnn] = keyword[None] , identifier[initiator_wwpn] = keyword[None] ):
literal[string]
identifier[port_handler] = identifier[_parse_physical_port_id] ( identifier[port_id] )
identifier[fc_target] = identifier[elcm] . identifier[FCTarget] ( identifier[target_wwn] , identifier[target_lun] )
identifier[fc_boot] = identifier[elcm] . identifier[FCBoot] ( identifier[boot_prio] = identifier[boot_prio] , identifier[boot_enable] = keyword[True] )
identifier[fc_boot] . identifier[add_target] ( identifier[fc_target] )
identifier[port] = identifier[self] . identifier[_find_port] ( identifier[port_handler] )
keyword[if] identifier[port] :
identifier[port_handler] . identifier[set_fc_port] ( identifier[port] , identifier[fc_boot] ,
identifier[wwnn] = identifier[initiator_wwnn] , identifier[wwpn] = identifier[initiator_wwpn] )
keyword[else] :
identifier[port] = identifier[port_handler] . identifier[create_fc_port] ( identifier[fc_boot] ,
identifier[wwnn] = identifier[initiator_wwnn] ,
identifier[wwpn] = identifier[initiator_wwpn] )
identifier[self] . identifier[_add_port] ( identifier[port_handler] , identifier[port] )
|
def set_fc_volume(self, port_id, target_wwn, target_lun=0, boot_prio=1, initiator_wwnn=None, initiator_wwpn=None):
"""Set FibreChannel volume information to configuration.
:param port_id: Physical port ID.
:param target_wwn: WWN of target.
:param target_lun: LUN number of target.
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
:param initiator_wwnn: Virtual WWNN for initiator if necessary.
:param initiator_wwpn: Virtual WWPN for initiator if necessary.
"""
port_handler = _parse_physical_port_id(port_id)
fc_target = elcm.FCTarget(target_wwn, target_lun)
fc_boot = elcm.FCBoot(boot_prio=boot_prio, boot_enable=True)
fc_boot.add_target(fc_target)
port = self._find_port(port_handler)
if port:
port_handler.set_fc_port(port, fc_boot, wwnn=initiator_wwnn, wwpn=initiator_wwpn) # depends on [control=['if'], data=[]]
else:
port = port_handler.create_fc_port(fc_boot, wwnn=initiator_wwnn, wwpn=initiator_wwpn)
self._add_port(port_handler, port)
|
def apply_transformation(self, structure):
"""
:param structure (bulk structure to be scaled up - typically conventional unit cell)
:return:
defect_structure, with charge applied
"""
if structure != self.defect.bulk_structure:
raise ValueError("Defect bulk_structure is not the same as input structure.")
def_structure = self.defect.generate_defect_structure(self.scaling_matrix)
return def_structure
|
def function[apply_transformation, parameter[self, structure]]:
constant[
:param structure (bulk structure to be scaled up - typically conventional unit cell)
:return:
defect_structure, with charge applied
]
if compare[name[structure] not_equal[!=] name[self].defect.bulk_structure] begin[:]
<ast.Raise object at 0x7da20c6c6410>
variable[def_structure] assign[=] call[name[self].defect.generate_defect_structure, parameter[name[self].scaling_matrix]]
return[name[def_structure]]
|
keyword[def] identifier[apply_transformation] ( identifier[self] , identifier[structure] ):
literal[string]
keyword[if] identifier[structure] != identifier[self] . identifier[defect] . identifier[bulk_structure] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[def_structure] = identifier[self] . identifier[defect] . identifier[generate_defect_structure] ( identifier[self] . identifier[scaling_matrix] )
keyword[return] identifier[def_structure]
|
def apply_transformation(self, structure):
"""
:param structure (bulk structure to be scaled up - typically conventional unit cell)
:return:
defect_structure, with charge applied
"""
if structure != self.defect.bulk_structure:
raise ValueError('Defect bulk_structure is not the same as input structure.') # depends on [control=['if'], data=[]]
def_structure = self.defect.generate_defect_structure(self.scaling_matrix)
return def_structure
|
def run_cli(cli_parser, run_sample, known_args=None):
"""Run sampling with CLI arguments.
Parameters
----------
cli_parser : function
Function to add method specific arguments to parser
run_sample: function
Method specific function that runs the sampling
known_args: list [optional]
Additional arguments to parse
Returns
----------
argparse object
"""
parser = create(cli_parser)
args = parser.parse_args(known_args)
run_sample(args)
|
def function[run_cli, parameter[cli_parser, run_sample, known_args]]:
constant[Run sampling with CLI arguments.
Parameters
----------
cli_parser : function
Function to add method specific arguments to parser
run_sample: function
Method specific function that runs the sampling
known_args: list [optional]
Additional arguments to parse
Returns
----------
argparse object
]
variable[parser] assign[=] call[name[create], parameter[name[cli_parser]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[known_args]]]
call[name[run_sample], parameter[name[args]]]
|
keyword[def] identifier[run_cli] ( identifier[cli_parser] , identifier[run_sample] , identifier[known_args] = keyword[None] ):
literal[string]
identifier[parser] = identifier[create] ( identifier[cli_parser] )
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[known_args] )
identifier[run_sample] ( identifier[args] )
|
def run_cli(cli_parser, run_sample, known_args=None):
"""Run sampling with CLI arguments.
Parameters
----------
cli_parser : function
Function to add method specific arguments to parser
run_sample: function
Method specific function that runs the sampling
known_args: list [optional]
Additional arguments to parse
Returns
----------
argparse object
"""
parser = create(cli_parser)
args = parser.parse_args(known_args)
run_sample(args)
|
def pisaParser(src, context, default_css="", xhtml=False, encoding=None, xml_output=None):
"""
- Parse HTML and get miniDOM
- Extract CSS informations, add default CSS, parse CSS
- Handle the document DOM itself and build reportlab story
- Return Context object
"""
global CSSAttrCache
CSSAttrCache = {}
if xhtml:
# TODO: XHTMLParser doesn't see to exist...
parser = html5lib.XHTMLParser(tree=treebuilders.getTreeBuilder("dom"))
else:
parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder("dom"))
if isinstance(src, six.text_type):
# If an encoding was provided, do not change it.
if not encoding:
encoding = "utf-8"
src = src.encode(encoding)
src = pisaTempFile(src, capacity=context.capacity)
# # Test for the restrictions of html5lib
# if encoding:
# # Workaround for html5lib<0.11.1
# if hasattr(inputstream, "isValidEncoding"):
# if encoding.strip().lower() == "utf8":
# encoding = "utf-8"
# if not inputstream.isValidEncoding(encoding):
# log.error("%r is not a valid encoding e.g. 'utf8' is not valid but 'utf-8' is!", encoding)
# else:
# if inputstream.codecName(encoding) is None:
# log.error("%r is not a valid encoding", encoding)
document = parser.parse(
src,
) # encoding=encoding)
if xml_output:
if encoding:
xml_output.write(document.toprettyxml(encoding=encoding))
else:
xml_output.write(document.toprettyxml(encoding="utf8"))
if default_css:
context.addDefaultCSS(default_css)
pisaPreLoop(document, context)
# try:
context.parseCSS()
# except:
# context.cssText = DEFAULT_CSS
# context.parseCSS()
# context.debug(9, pprint.pformat(context.css))
pisaLoop(document, context)
return context
|
def function[pisaParser, parameter[src, context, default_css, xhtml, encoding, xml_output]]:
constant[
- Parse HTML and get miniDOM
- Extract CSS informations, add default CSS, parse CSS
- Handle the document DOM itself and build reportlab story
- Return Context object
]
<ast.Global object at 0x7da18bc71720>
variable[CSSAttrCache] assign[=] dictionary[[], []]
if name[xhtml] begin[:]
variable[parser] assign[=] call[name[html5lib].XHTMLParser, parameter[]]
if call[name[isinstance], parameter[name[src], name[six].text_type]] begin[:]
if <ast.UnaryOp object at 0x7da18bc722f0> begin[:]
variable[encoding] assign[=] constant[utf-8]
variable[src] assign[=] call[name[src].encode, parameter[name[encoding]]]
variable[src] assign[=] call[name[pisaTempFile], parameter[name[src]]]
variable[document] assign[=] call[name[parser].parse, parameter[name[src]]]
if name[xml_output] begin[:]
if name[encoding] begin[:]
call[name[xml_output].write, parameter[call[name[document].toprettyxml, parameter[]]]]
if name[default_css] begin[:]
call[name[context].addDefaultCSS, parameter[name[default_css]]]
call[name[pisaPreLoop], parameter[name[document], name[context]]]
call[name[context].parseCSS, parameter[]]
call[name[pisaLoop], parameter[name[document], name[context]]]
return[name[context]]
|
keyword[def] identifier[pisaParser] ( identifier[src] , identifier[context] , identifier[default_css] = literal[string] , identifier[xhtml] = keyword[False] , identifier[encoding] = keyword[None] , identifier[xml_output] = keyword[None] ):
literal[string]
keyword[global] identifier[CSSAttrCache]
identifier[CSSAttrCache] ={}
keyword[if] identifier[xhtml] :
identifier[parser] = identifier[html5lib] . identifier[XHTMLParser] ( identifier[tree] = identifier[treebuilders] . identifier[getTreeBuilder] ( literal[string] ))
keyword[else] :
identifier[parser] = identifier[html5lib] . identifier[HTMLParser] ( identifier[tree] = identifier[treebuilders] . identifier[getTreeBuilder] ( literal[string] ))
keyword[if] identifier[isinstance] ( identifier[src] , identifier[six] . identifier[text_type] ):
keyword[if] keyword[not] identifier[encoding] :
identifier[encoding] = literal[string]
identifier[src] = identifier[src] . identifier[encode] ( identifier[encoding] )
identifier[src] = identifier[pisaTempFile] ( identifier[src] , identifier[capacity] = identifier[context] . identifier[capacity] )
identifier[document] = identifier[parser] . identifier[parse] (
identifier[src] ,
)
keyword[if] identifier[xml_output] :
keyword[if] identifier[encoding] :
identifier[xml_output] . identifier[write] ( identifier[document] . identifier[toprettyxml] ( identifier[encoding] = identifier[encoding] ))
keyword[else] :
identifier[xml_output] . identifier[write] ( identifier[document] . identifier[toprettyxml] ( identifier[encoding] = literal[string] ))
keyword[if] identifier[default_css] :
identifier[context] . identifier[addDefaultCSS] ( identifier[default_css] )
identifier[pisaPreLoop] ( identifier[document] , identifier[context] )
identifier[context] . identifier[parseCSS] ()
identifier[pisaLoop] ( identifier[document] , identifier[context] )
keyword[return] identifier[context]
|
def pisaParser(src, context, default_css='', xhtml=False, encoding=None, xml_output=None):
"""
- Parse HTML and get miniDOM
- Extract CSS informations, add default CSS, parse CSS
- Handle the document DOM itself and build reportlab story
- Return Context object
"""
global CSSAttrCache
CSSAttrCache = {}
if xhtml:
# TODO: XHTMLParser doesn't see to exist...
parser = html5lib.XHTMLParser(tree=treebuilders.getTreeBuilder('dom')) # depends on [control=['if'], data=[]]
else:
parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder('dom'))
if isinstance(src, six.text_type):
# If an encoding was provided, do not change it.
if not encoding:
encoding = 'utf-8' # depends on [control=['if'], data=[]]
src = src.encode(encoding)
src = pisaTempFile(src, capacity=context.capacity) # depends on [control=['if'], data=[]]
# # Test for the restrictions of html5lib
# if encoding:
# # Workaround for html5lib<0.11.1
# if hasattr(inputstream, "isValidEncoding"):
# if encoding.strip().lower() == "utf8":
# encoding = "utf-8"
# if not inputstream.isValidEncoding(encoding):
# log.error("%r is not a valid encoding e.g. 'utf8' is not valid but 'utf-8' is!", encoding)
# else:
# if inputstream.codecName(encoding) is None:
# log.error("%r is not a valid encoding", encoding)
document = parser.parse(src) # encoding=encoding)
if xml_output:
if encoding:
xml_output.write(document.toprettyxml(encoding=encoding)) # depends on [control=['if'], data=[]]
else:
xml_output.write(document.toprettyxml(encoding='utf8')) # depends on [control=['if'], data=[]]
if default_css:
context.addDefaultCSS(default_css) # depends on [control=['if'], data=[]]
pisaPreLoop(document, context)
# try:
context.parseCSS()
# except:
# context.cssText = DEFAULT_CSS
# context.parseCSS()
# context.debug(9, pprint.pformat(context.css))
pisaLoop(document, context)
return context
|
def bulk_upsert(
queryset, model_objs, unique_fields, update_fields=None, return_upserts=False, return_upserts_distinct=False,
sync=False, native=False
):
"""
Performs a bulk update or insert on a list of model objects. Matches all objects in the queryset
with the objs provided using the field values in unique_fields.
If an existing object is matched, it is updated with the values from the provided objects. Objects
that don't match anything are bulk created.
A user can provide a list update_fields so that any changed values on those fields will be updated.
However, if update_fields is not provided, this function reduces down to performing a bulk_create
on any non extant objects.
:type model_objs: list of dict
:param model_objs: A list of dictionaries that have fields corresponding to the model in the manager.
:type unique_fields: list of str
:param unique_fields: A list of fields that are used to determine if an object in objs matches a model
from the queryset.
:type update_fields: list of str
:param update_fields: A list of fields used from the objects in objs as fields when updating existing
models. If None, this function will only perform a bulk create for model_objs that do not
currently exist in the database.
:type return_upserts_distinct: bool
:param return_upserts_distinct: A flag specifying whether to return the upserted values as a list of distinct lists,
one containing the updated models and the other containing the new models. If True, this performs an
additional query to fetch any bulk created values.
:type return_upserts: bool
:param return_upserts: A flag specifying whether to return the upserted values. If True, this performs
an additional query to fetch any bulk created values.
:type sync: bool
:param sync: A flag specifying whether a sync operation should be applied to the bulk_upsert. If this
is True, all values in the queryset that were not updated will be deleted such that the
entire list of model objects is synced to the queryset.
:type native: bool
:param native: A flag specifying whether to use postgres insert on conflict (upsert).
:signals: Emits a post_bulk_operation when a bulk_update or a bulk_create occurs.
Examples:
.. code-block:: python
# Start off with no objects in the database. Call a bulk_upsert on the TestModel, which includes
# a char_field, int_field, and float_field
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
], ['int_field'], ['char_field'])
# All objects should have been created
print(TestModel.objects.count())
3
# Now perform a bulk upsert on all the char_field values. Since the objects existed previously
# (known by the int_field uniqueness constraint), the char fields should be updated
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='0', int_field=1),
TestModel(float_field=2.0, char_field='0', int_field=2),
TestModel(float_field=3.0, char_field='0', int_field=3),
], ['int_field'], ['char_field'])
# No more new objects should have been created, and every char field should be 0
print(TestModel.objects.count(), TestModel.objects.filter(char_field='-1').count())
3, 3
# Do the exact same operation, but this time add an additional object that is not already
# stored. It will be created.
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
TestModel(float_field=4.0, char_field='4', int_field=4),
], ['int_field'], ['char_field'])
# There should be one more object
print(TestModel.objects.count())
4
# Note that one can also do the upsert on a queryset. Perform the same data upsert on a
# filter for int_field=1. In this case, only one object has the ability to be updated.
# All of the other objects will be created
bulk_upsert(TestModel.objects.filter(int_field=1), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
TestModel(float_field=4.0, char_field='4', int_field=4),
], ['int_field'], ['char_field'])
# There should be three more objects
print(TestModel.objects.count())
7
"""
if not unique_fields:
raise ValueError('Must provide unique_fields argument')
update_fields = update_fields or []
if native:
if return_upserts_distinct:
raise NotImplementedError('return upserts distinct not supported with native postgres upsert')
return_value = Query().from_table(table=queryset.model).upsert(
model_objs, unique_fields, update_fields, return_models=return_upserts or sync
) or []
if sync:
orig_ids = frozenset(queryset.values_list('pk', flat=True))
queryset.filter(pk__in=orig_ids - frozenset([m.pk for m in return_value])).delete()
post_bulk_operation.send(sender=queryset.model, model=queryset.model)
return return_value
# Create a look up table for all of the objects in the queryset keyed on the unique_fields
extant_model_objs = {
tuple(getattr(extant_model_obj, field) for field in unique_fields): extant_model_obj
for extant_model_obj in queryset
}
# Find all of the objects to update and all of the objects to create
model_objs_to_update, model_objs_to_create = _get_model_objs_to_update_and_create(
model_objs, unique_fields, update_fields, extant_model_objs)
# Find all objects in the queryset that will not be updated. These will be deleted if the sync option is
# True
if sync:
model_objs_to_update_set = frozenset(model_objs_to_update)
model_objs_to_delete = [
model_obj.pk for model_obj in extant_model_objs.values() if model_obj not in model_objs_to_update_set
]
if model_objs_to_delete:
queryset.filter(pk__in=model_objs_to_delete).delete()
# Apply bulk updates and creates
if update_fields:
bulk_update(queryset, model_objs_to_update, update_fields)
queryset.bulk_create(model_objs_to_create)
# Optionally return the bulk upserted values
if return_upserts_distinct:
# return a list of lists, the first being the updated models, the second being the newly created objects
return _get_upserts_distinct(queryset, model_objs_to_update, model_objs_to_create, unique_fields)
if return_upserts:
return _get_upserts(queryset, model_objs_to_update, model_objs_to_create, unique_fields)
|
def function[bulk_upsert, parameter[queryset, model_objs, unique_fields, update_fields, return_upserts, return_upserts_distinct, sync, native]]:
constant[
Performs a bulk update or insert on a list of model objects. Matches all objects in the queryset
with the objs provided using the field values in unique_fields.
If an existing object is matched, it is updated with the values from the provided objects. Objects
that don't match anything are bulk created.
A user can provide a list update_fields so that any changed values on those fields will be updated.
However, if update_fields is not provided, this function reduces down to performing a bulk_create
on any non extant objects.
:type model_objs: list of dict
:param model_objs: A list of dictionaries that have fields corresponding to the model in the manager.
:type unique_fields: list of str
:param unique_fields: A list of fields that are used to determine if an object in objs matches a model
from the queryset.
:type update_fields: list of str
:param update_fields: A list of fields used from the objects in objs as fields when updating existing
models. If None, this function will only perform a bulk create for model_objs that do not
currently exist in the database.
:type return_upserts_distinct: bool
:param return_upserts_distinct: A flag specifying whether to return the upserted values as a list of distinct lists,
one containing the updated models and the other containing the new models. If True, this performs an
additional query to fetch any bulk created values.
:type return_upserts: bool
:param return_upserts: A flag specifying whether to return the upserted values. If True, this performs
an additional query to fetch any bulk created values.
:type sync: bool
:param sync: A flag specifying whether a sync operation should be applied to the bulk_upsert. If this
is True, all values in the queryset that were not updated will be deleted such that the
entire list of model objects is synced to the queryset.
:type native: bool
:param native: A flag specifying whether to use postgres insert on conflict (upsert).
:signals: Emits a post_bulk_operation when a bulk_update or a bulk_create occurs.
Examples:
.. code-block:: python
# Start off with no objects in the database. Call a bulk_upsert on the TestModel, which includes
# a char_field, int_field, and float_field
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
], ['int_field'], ['char_field'])
# All objects should have been created
print(TestModel.objects.count())
3
# Now perform a bulk upsert on all the char_field values. Since the objects existed previously
# (known by the int_field uniqueness constraint), the char fields should be updated
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='0', int_field=1),
TestModel(float_field=2.0, char_field='0', int_field=2),
TestModel(float_field=3.0, char_field='0', int_field=3),
], ['int_field'], ['char_field'])
# No more new objects should have been created, and every char field should be 0
print(TestModel.objects.count(), TestModel.objects.filter(char_field='-1').count())
3, 3
# Do the exact same operation, but this time add an additional object that is not already
# stored. It will be created.
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
TestModel(float_field=4.0, char_field='4', int_field=4),
], ['int_field'], ['char_field'])
# There should be one more object
print(TestModel.objects.count())
4
# Note that one can also do the upsert on a queryset. Perform the same data upsert on a
# filter for int_field=1. In this case, only one object has the ability to be updated.
# All of the other objects will be created
bulk_upsert(TestModel.objects.filter(int_field=1), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
TestModel(float_field=4.0, char_field='4', int_field=4),
], ['int_field'], ['char_field'])
# There should be three more objects
print(TestModel.objects.count())
7
]
if <ast.UnaryOp object at 0x7da18ede6f20> begin[:]
<ast.Raise object at 0x7da18ede66e0>
variable[update_fields] assign[=] <ast.BoolOp object at 0x7da18ede41f0>
if name[native] begin[:]
if name[return_upserts_distinct] begin[:]
<ast.Raise object at 0x7da18ede7430>
variable[return_value] assign[=] <ast.BoolOp object at 0x7da18ede61d0>
if name[sync] begin[:]
variable[orig_ids] assign[=] call[name[frozenset], parameter[call[name[queryset].values_list, parameter[constant[pk]]]]]
call[call[name[queryset].filter, parameter[]].delete, parameter[]]
call[name[post_bulk_operation].send, parameter[]]
return[name[return_value]]
variable[extant_model_objs] assign[=] <ast.DictComp object at 0x7da18ede4130>
<ast.Tuple object at 0x7da18ede5570> assign[=] call[name[_get_model_objs_to_update_and_create], parameter[name[model_objs], name[unique_fields], name[update_fields], name[extant_model_objs]]]
if name[sync] begin[:]
variable[model_objs_to_update_set] assign[=] call[name[frozenset], parameter[name[model_objs_to_update]]]
variable[model_objs_to_delete] assign[=] <ast.ListComp object at 0x7da18ede5f90>
if name[model_objs_to_delete] begin[:]
call[call[name[queryset].filter, parameter[]].delete, parameter[]]
if name[update_fields] begin[:]
call[name[bulk_update], parameter[name[queryset], name[model_objs_to_update], name[update_fields]]]
call[name[queryset].bulk_create, parameter[name[model_objs_to_create]]]
if name[return_upserts_distinct] begin[:]
return[call[name[_get_upserts_distinct], parameter[name[queryset], name[model_objs_to_update], name[model_objs_to_create], name[unique_fields]]]]
if name[return_upserts] begin[:]
return[call[name[_get_upserts], parameter[name[queryset], name[model_objs_to_update], name[model_objs_to_create], name[unique_fields]]]]
|
keyword[def] identifier[bulk_upsert] (
identifier[queryset] , identifier[model_objs] , identifier[unique_fields] , identifier[update_fields] = keyword[None] , identifier[return_upserts] = keyword[False] , identifier[return_upserts_distinct] = keyword[False] ,
identifier[sync] = keyword[False] , identifier[native] = keyword[False]
):
literal[string]
keyword[if] keyword[not] identifier[unique_fields] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[update_fields] = identifier[update_fields] keyword[or] []
keyword[if] identifier[native] :
keyword[if] identifier[return_upserts_distinct] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[return_value] = identifier[Query] (). identifier[from_table] ( identifier[table] = identifier[queryset] . identifier[model] ). identifier[upsert] (
identifier[model_objs] , identifier[unique_fields] , identifier[update_fields] , identifier[return_models] = identifier[return_upserts] keyword[or] identifier[sync]
) keyword[or] []
keyword[if] identifier[sync] :
identifier[orig_ids] = identifier[frozenset] ( identifier[queryset] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] ))
identifier[queryset] . identifier[filter] ( identifier[pk__in] = identifier[orig_ids] - identifier[frozenset] ([ identifier[m] . identifier[pk] keyword[for] identifier[m] keyword[in] identifier[return_value] ])). identifier[delete] ()
identifier[post_bulk_operation] . identifier[send] ( identifier[sender] = identifier[queryset] . identifier[model] , identifier[model] = identifier[queryset] . identifier[model] )
keyword[return] identifier[return_value]
identifier[extant_model_objs] ={
identifier[tuple] ( identifier[getattr] ( identifier[extant_model_obj] , identifier[field] ) keyword[for] identifier[field] keyword[in] identifier[unique_fields] ): identifier[extant_model_obj]
keyword[for] identifier[extant_model_obj] keyword[in] identifier[queryset]
}
identifier[model_objs_to_update] , identifier[model_objs_to_create] = identifier[_get_model_objs_to_update_and_create] (
identifier[model_objs] , identifier[unique_fields] , identifier[update_fields] , identifier[extant_model_objs] )
keyword[if] identifier[sync] :
identifier[model_objs_to_update_set] = identifier[frozenset] ( identifier[model_objs_to_update] )
identifier[model_objs_to_delete] =[
identifier[model_obj] . identifier[pk] keyword[for] identifier[model_obj] keyword[in] identifier[extant_model_objs] . identifier[values] () keyword[if] identifier[model_obj] keyword[not] keyword[in] identifier[model_objs_to_update_set]
]
keyword[if] identifier[model_objs_to_delete] :
identifier[queryset] . identifier[filter] ( identifier[pk__in] = identifier[model_objs_to_delete] ). identifier[delete] ()
keyword[if] identifier[update_fields] :
identifier[bulk_update] ( identifier[queryset] , identifier[model_objs_to_update] , identifier[update_fields] )
identifier[queryset] . identifier[bulk_create] ( identifier[model_objs_to_create] )
keyword[if] identifier[return_upserts_distinct] :
keyword[return] identifier[_get_upserts_distinct] ( identifier[queryset] , identifier[model_objs_to_update] , identifier[model_objs_to_create] , identifier[unique_fields] )
keyword[if] identifier[return_upserts] :
keyword[return] identifier[_get_upserts] ( identifier[queryset] , identifier[model_objs_to_update] , identifier[model_objs_to_create] , identifier[unique_fields] )
|
def bulk_upsert(queryset, model_objs, unique_fields, update_fields=None, return_upserts=False, return_upserts_distinct=False, sync=False, native=False):
"""
Performs a bulk update or insert on a list of model objects. Matches all objects in the queryset
with the objs provided using the field values in unique_fields.
If an existing object is matched, it is updated with the values from the provided objects. Objects
that don't match anything are bulk created.
A user can provide a list update_fields so that any changed values on those fields will be updated.
However, if update_fields is not provided, this function reduces down to performing a bulk_create
on any non extant objects.
:type model_objs: list of dict
:param model_objs: A list of dictionaries that have fields corresponding to the model in the manager.
:type unique_fields: list of str
:param unique_fields: A list of fields that are used to determine if an object in objs matches a model
from the queryset.
:type update_fields: list of str
:param update_fields: A list of fields used from the objects in objs as fields when updating existing
models. If None, this function will only perform a bulk create for model_objs that do not
currently exist in the database.
:type return_upserts_distinct: bool
:param return_upserts_distinct: A flag specifying whether to return the upserted values as a list of distinct lists,
one containing the updated models and the other containing the new models. If True, this performs an
additional query to fetch any bulk created values.
:type return_upserts: bool
:param return_upserts: A flag specifying whether to return the upserted values. If True, this performs
an additional query to fetch any bulk created values.
:type sync: bool
:param sync: A flag specifying whether a sync operation should be applied to the bulk_upsert. If this
is True, all values in the queryset that were not updated will be deleted such that the
entire list of model objects is synced to the queryset.
:type native: bool
:param native: A flag specifying whether to use postgres insert on conflict (upsert).
:signals: Emits a post_bulk_operation when a bulk_update or a bulk_create occurs.
Examples:
.. code-block:: python
# Start off with no objects in the database. Call a bulk_upsert on the TestModel, which includes
# a char_field, int_field, and float_field
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
], ['int_field'], ['char_field'])
# All objects should have been created
print(TestModel.objects.count())
3
# Now perform a bulk upsert on all the char_field values. Since the objects existed previously
# (known by the int_field uniqueness constraint), the char fields should be updated
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='0', int_field=1),
TestModel(float_field=2.0, char_field='0', int_field=2),
TestModel(float_field=3.0, char_field='0', int_field=3),
], ['int_field'], ['char_field'])
# No more new objects should have been created, and every char field should be 0
print(TestModel.objects.count(), TestModel.objects.filter(char_field='-1').count())
3, 3
# Do the exact same operation, but this time add an additional object that is not already
# stored. It will be created.
bulk_upsert(TestModel.objects.all(), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
TestModel(float_field=4.0, char_field='4', int_field=4),
], ['int_field'], ['char_field'])
# There should be one more object
print(TestModel.objects.count())
4
# Note that one can also do the upsert on a queryset. Perform the same data upsert on a
# filter for int_field=1. In this case, only one object has the ability to be updated.
# All of the other objects will be created
bulk_upsert(TestModel.objects.filter(int_field=1), [
TestModel(float_field=1.0, char_field='1', int_field=1),
TestModel(float_field=2.0, char_field='2', int_field=2),
TestModel(float_field=3.0, char_field='3', int_field=3),
TestModel(float_field=4.0, char_field='4', int_field=4),
], ['int_field'], ['char_field'])
# There should be three more objects
print(TestModel.objects.count())
7
"""
if not unique_fields:
raise ValueError('Must provide unique_fields argument') # depends on [control=['if'], data=[]]
update_fields = update_fields or []
if native:
if return_upserts_distinct:
raise NotImplementedError('return upserts distinct not supported with native postgres upsert') # depends on [control=['if'], data=[]]
return_value = Query().from_table(table=queryset.model).upsert(model_objs, unique_fields, update_fields, return_models=return_upserts or sync) or []
if sync:
orig_ids = frozenset(queryset.values_list('pk', flat=True))
queryset.filter(pk__in=orig_ids - frozenset([m.pk for m in return_value])).delete() # depends on [control=['if'], data=[]]
post_bulk_operation.send(sender=queryset.model, model=queryset.model)
return return_value # depends on [control=['if'], data=[]]
# Create a look up table for all of the objects in the queryset keyed on the unique_fields
extant_model_objs = {tuple((getattr(extant_model_obj, field) for field in unique_fields)): extant_model_obj for extant_model_obj in queryset}
# Find all of the objects to update and all of the objects to create
(model_objs_to_update, model_objs_to_create) = _get_model_objs_to_update_and_create(model_objs, unique_fields, update_fields, extant_model_objs)
# Find all objects in the queryset that will not be updated. These will be deleted if the sync option is
# True
if sync:
model_objs_to_update_set = frozenset(model_objs_to_update)
model_objs_to_delete = [model_obj.pk for model_obj in extant_model_objs.values() if model_obj not in model_objs_to_update_set]
if model_objs_to_delete:
queryset.filter(pk__in=model_objs_to_delete).delete() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Apply bulk updates and creates
if update_fields:
bulk_update(queryset, model_objs_to_update, update_fields) # depends on [control=['if'], data=[]]
queryset.bulk_create(model_objs_to_create)
# Optionally return the bulk upserted values
if return_upserts_distinct:
# return a list of lists, the first being the updated models, the second being the newly created objects
return _get_upserts_distinct(queryset, model_objs_to_update, model_objs_to_create, unique_fields) # depends on [control=['if'], data=[]]
if return_upserts:
return _get_upserts(queryset, model_objs_to_update, model_objs_to_create, unique_fields) # depends on [control=['if'], data=[]]
|
def get_update_tile(self, params, values):
""" Get the amount of support size required for a particular update."""
doglobal, particles = self._update_type(params)
if doglobal:
return self.shape.copy()
# 1) store the current parameters of interest
values0 = self.get_values(params)
# 2) calculate the current tileset
tiles0 = [self._tile(n) for n in particles]
# 3) update to newer parameters and calculate tileset
self.set_values(params, values)
tiles1 = [self._tile(n) for n in particles]
# 4) revert parameters & return union of all tiles
self.set_values(params, values0)
return Tile.boundingtile(tiles0 + tiles1)
|
def function[get_update_tile, parameter[self, params, values]]:
constant[ Get the amount of support size required for a particular update.]
<ast.Tuple object at 0x7da18ede60b0> assign[=] call[name[self]._update_type, parameter[name[params]]]
if name[doglobal] begin[:]
return[call[name[self].shape.copy, parameter[]]]
variable[values0] assign[=] call[name[self].get_values, parameter[name[params]]]
variable[tiles0] assign[=] <ast.ListComp object at 0x7da18ede4670>
call[name[self].set_values, parameter[name[params], name[values]]]
variable[tiles1] assign[=] <ast.ListComp object at 0x7da18ede40d0>
call[name[self].set_values, parameter[name[params], name[values0]]]
return[call[name[Tile].boundingtile, parameter[binary_operation[name[tiles0] + name[tiles1]]]]]
|
keyword[def] identifier[get_update_tile] ( identifier[self] , identifier[params] , identifier[values] ):
literal[string]
identifier[doglobal] , identifier[particles] = identifier[self] . identifier[_update_type] ( identifier[params] )
keyword[if] identifier[doglobal] :
keyword[return] identifier[self] . identifier[shape] . identifier[copy] ()
identifier[values0] = identifier[self] . identifier[get_values] ( identifier[params] )
identifier[tiles0] =[ identifier[self] . identifier[_tile] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[particles] ]
identifier[self] . identifier[set_values] ( identifier[params] , identifier[values] )
identifier[tiles1] =[ identifier[self] . identifier[_tile] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[particles] ]
identifier[self] . identifier[set_values] ( identifier[params] , identifier[values0] )
keyword[return] identifier[Tile] . identifier[boundingtile] ( identifier[tiles0] + identifier[tiles1] )
|
def get_update_tile(self, params, values):
""" Get the amount of support size required for a particular update."""
(doglobal, particles) = self._update_type(params)
if doglobal:
return self.shape.copy() # depends on [control=['if'], data=[]]
# 1) store the current parameters of interest
values0 = self.get_values(params)
# 2) calculate the current tileset
tiles0 = [self._tile(n) for n in particles]
# 3) update to newer parameters and calculate tileset
self.set_values(params, values)
tiles1 = [self._tile(n) for n in particles]
# 4) revert parameters & return union of all tiles
self.set_values(params, values0)
return Tile.boundingtile(tiles0 + tiles1)
|
def convert_all_videos(app_label, model_name, object_pk):
"""
Automatically converts all videos of a given instance.
"""
# get instance
Model = apps.get_model(app_label=app_label, model_name=model_name)
instance = Model.objects.get(pk=object_pk)
# search for `VideoFields`
fields = instance._meta.fields
for field in fields:
if isinstance(field, VideoField):
if not getattr(instance, field.name):
# ignore empty fields
continue
# trigger conversion
fieldfile = getattr(instance, field.name)
convert_video(fieldfile)
|
def function[convert_all_videos, parameter[app_label, model_name, object_pk]]:
constant[
Automatically converts all videos of a given instance.
]
variable[Model] assign[=] call[name[apps].get_model, parameter[]]
variable[instance] assign[=] call[name[Model].objects.get, parameter[]]
variable[fields] assign[=] name[instance]._meta.fields
for taget[name[field]] in starred[name[fields]] begin[:]
if call[name[isinstance], parameter[name[field], name[VideoField]]] begin[:]
if <ast.UnaryOp object at 0x7da1b04ec4c0> begin[:]
continue
variable[fieldfile] assign[=] call[name[getattr], parameter[name[instance], name[field].name]]
call[name[convert_video], parameter[name[fieldfile]]]
|
keyword[def] identifier[convert_all_videos] ( identifier[app_label] , identifier[model_name] , identifier[object_pk] ):
literal[string]
identifier[Model] = identifier[apps] . identifier[get_model] ( identifier[app_label] = identifier[app_label] , identifier[model_name] = identifier[model_name] )
identifier[instance] = identifier[Model] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[object_pk] )
identifier[fields] = identifier[instance] . identifier[_meta] . identifier[fields]
keyword[for] identifier[field] keyword[in] identifier[fields] :
keyword[if] identifier[isinstance] ( identifier[field] , identifier[VideoField] ):
keyword[if] keyword[not] identifier[getattr] ( identifier[instance] , identifier[field] . identifier[name] ):
keyword[continue]
identifier[fieldfile] = identifier[getattr] ( identifier[instance] , identifier[field] . identifier[name] )
identifier[convert_video] ( identifier[fieldfile] )
|
def convert_all_videos(app_label, model_name, object_pk):
"""
Automatically converts all videos of a given instance.
"""
# get instance
Model = apps.get_model(app_label=app_label, model_name=model_name)
instance = Model.objects.get(pk=object_pk)
# search for `VideoFields`
fields = instance._meta.fields
for field in fields:
if isinstance(field, VideoField):
if not getattr(instance, field.name):
# ignore empty fields
continue # depends on [control=['if'], data=[]]
# trigger conversion
fieldfile = getattr(instance, field.name)
convert_video(fieldfile) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
|
def get_last_calc_id(datadir=None):
"""
Extract the latest calculation ID from the given directory.
If none is found, return 0.
"""
datadir = datadir or get_datadir()
calcs = get_calc_ids(datadir)
if not calcs:
return 0
return calcs[-1]
|
def function[get_last_calc_id, parameter[datadir]]:
constant[
Extract the latest calculation ID from the given directory.
If none is found, return 0.
]
variable[datadir] assign[=] <ast.BoolOp object at 0x7da18f00e050>
variable[calcs] assign[=] call[name[get_calc_ids], parameter[name[datadir]]]
if <ast.UnaryOp object at 0x7da18f00c310> begin[:]
return[constant[0]]
return[call[name[calcs]][<ast.UnaryOp object at 0x7da18f00df60>]]
|
keyword[def] identifier[get_last_calc_id] ( identifier[datadir] = keyword[None] ):
literal[string]
identifier[datadir] = identifier[datadir] keyword[or] identifier[get_datadir] ()
identifier[calcs] = identifier[get_calc_ids] ( identifier[datadir] )
keyword[if] keyword[not] identifier[calcs] :
keyword[return] literal[int]
keyword[return] identifier[calcs] [- literal[int] ]
|
def get_last_calc_id(datadir=None):
"""
Extract the latest calculation ID from the given directory.
If none is found, return 0.
"""
datadir = datadir or get_datadir()
calcs = get_calc_ids(datadir)
if not calcs:
return 0 # depends on [control=['if'], data=[]]
return calcs[-1]
|
def new_consumer(self, config, consumer_name):
"""Return a consumer dict for the given name and configuration.
:param dict config: The consumer configuration
:param str consumer_name: The consumer name
:rtype: dict
"""
return Consumer(0,
dict(),
config.get('qty', self.DEFAULT_CONSUMER_QTY),
config.get('queue', consumer_name))
|
def function[new_consumer, parameter[self, config, consumer_name]]:
constant[Return a consumer dict for the given name and configuration.
:param dict config: The consumer configuration
:param str consumer_name: The consumer name
:rtype: dict
]
return[call[name[Consumer], parameter[constant[0], call[name[dict], parameter[]], call[name[config].get, parameter[constant[qty], name[self].DEFAULT_CONSUMER_QTY]], call[name[config].get, parameter[constant[queue], name[consumer_name]]]]]]
|
keyword[def] identifier[new_consumer] ( identifier[self] , identifier[config] , identifier[consumer_name] ):
literal[string]
keyword[return] identifier[Consumer] ( literal[int] ,
identifier[dict] (),
identifier[config] . identifier[get] ( literal[string] , identifier[self] . identifier[DEFAULT_CONSUMER_QTY] ),
identifier[config] . identifier[get] ( literal[string] , identifier[consumer_name] ))
|
def new_consumer(self, config, consumer_name):
"""Return a consumer dict for the given name and configuration.
:param dict config: The consumer configuration
:param str consumer_name: The consumer name
:rtype: dict
"""
return Consumer(0, dict(), config.get('qty', self.DEFAULT_CONSUMER_QTY), config.get('queue', consumer_name))
|
def get_session(self, sid, namespace=None):
"""Return the user session for a client.
The only difference with the :func:`socketio.Server.get_session`
method is that when the ``namespace`` argument is not given the
namespace associated with the class is used.
"""
return self.server.get_session(
sid, namespace=namespace or self.namespace)
|
def function[get_session, parameter[self, sid, namespace]]:
constant[Return the user session for a client.
The only difference with the :func:`socketio.Server.get_session`
method is that when the ``namespace`` argument is not given the
namespace associated with the class is used.
]
return[call[name[self].server.get_session, parameter[name[sid]]]]
|
keyword[def] identifier[get_session] ( identifier[self] , identifier[sid] , identifier[namespace] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[server] . identifier[get_session] (
identifier[sid] , identifier[namespace] = identifier[namespace] keyword[or] identifier[self] . identifier[namespace] )
|
def get_session(self, sid, namespace=None):
"""Return the user session for a client.
The only difference with the :func:`socketio.Server.get_session`
method is that when the ``namespace`` argument is not given the
namespace associated with the class is used.
"""
return self.server.get_session(sid, namespace=namespace or self.namespace)
|
def _ScanNode(self, scan_context, scan_node, auto_recurse=True):
"""Scans a node for supported formats.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): source scan node.
auto_recurse (Optional[bool]): True if the scan should automatically
recurse as far as possible.
Raises:
BackEndError: if the source cannot be scanned.
ValueError: if the scan context or scan node is invalid.
"""
if not scan_context:
raise ValueError('Invalid scan context.')
if not scan_node:
raise ValueError('Invalid scan node.')
scan_path_spec = scan_node.path_spec
system_level_file_entry = None
if scan_node.IsSystemLevel():
system_level_file_entry = resolver.Resolver.OpenFileEntry(
scan_node.path_spec, resolver_context=self._resolver_context)
if system_level_file_entry is None:
raise errors.BackEndError('Unable to open file entry.')
if system_level_file_entry.IsDirectory():
scan_context.SetSourceType(definitions.SOURCE_TYPE_DIRECTORY)
return
source_path_spec = self.ScanForStorageMediaImage(scan_node.path_spec)
if source_path_spec:
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
if not auto_recurse:
return
# In case we did not find a storage media image type we keep looking
# since not all RAW storage media image naming schemas are known and
# its type can only detected by its content.
source_path_spec = None
while True:
if scan_node.IsFileSystem():
# No need to scan a file systems scan node for volume systems.
break
if scan_node.SupportsEncryption():
self._ScanEncryptedVolumeNode(scan_context, scan_node)
if scan_context.IsLockedScanNode(scan_node.path_spec):
# Scan node is locked, such as an encrypted volume, and we cannot
# scan it for a volume system.
break
source_path_spec = self.ScanForVolumeSystem(scan_node.path_spec)
if not source_path_spec:
# No volume system found continue with a file system scan.
break
if not scan_context.HasScanNode(source_path_spec):
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry and system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
if scan_node.IsVolumeSystemRoot():
self._ScanVolumeSystemRootNode(
scan_context, scan_node, auto_recurse=auto_recurse)
# We already have already scanned for the file systems.
return
if not auto_recurse and scan_context.updated:
return
# Nothing new found.
if not scan_context.updated:
break
# In case we did not find a volume system type we keep looking
# since we could be dealing with a storage media image that contains
# a single volume.
# No need to scan the root of a volume system for a file system.
if scan_node.IsVolumeSystemRoot():
pass
elif scan_context.IsLockedScanNode(scan_node.path_spec):
# Scan node is locked, such as an encrypted volume, and we cannot
# scan it for a file system.
pass
elif (scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW and
auto_recurse and scan_node.path_spec != scan_path_spec):
# Since scanning for file systems in VSS snapshot volumes can
# be expensive we only do this when explicitly asked for.
pass
elif not scan_node.IsFileSystem():
source_path_spec = self.ScanForFileSystem(scan_node.path_spec)
if not source_path_spec:
# Since RAW storage media image can only be determined by naming schema
# we could have single file that is not a RAW storage media image yet
# matches the naming schema.
if scan_node.path_spec.type_indicator == definitions.TYPE_INDICATOR_RAW:
scan_node = scan_context.RemoveScanNode(scan_node.path_spec)
# Make sure to override the previously assigned source type.
scan_context.source_type = definitions.SOURCE_TYPE_FILE
else:
scan_context.SetSourceType(definitions.SOURCE_TYPE_FILE)
elif not scan_context.HasScanNode(source_path_spec):
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry and system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
# If all scans failed mark the scan node as scanned so we do not scan it
# again.
if not scan_node.scanned:
scan_node.scanned = True
|
def function[_ScanNode, parameter[self, scan_context, scan_node, auto_recurse]]:
constant[Scans a node for supported formats.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): source scan node.
auto_recurse (Optional[bool]): True if the scan should automatically
recurse as far as possible.
Raises:
BackEndError: if the source cannot be scanned.
ValueError: if the scan context or scan node is invalid.
]
if <ast.UnaryOp object at 0x7da1b080ace0> begin[:]
<ast.Raise object at 0x7da1b080b190>
if <ast.UnaryOp object at 0x7da1b067b5b0> begin[:]
<ast.Raise object at 0x7da1b0679ea0>
variable[scan_path_spec] assign[=] name[scan_node].path_spec
variable[system_level_file_entry] assign[=] constant[None]
if call[name[scan_node].IsSystemLevel, parameter[]] begin[:]
variable[system_level_file_entry] assign[=] call[name[resolver].Resolver.OpenFileEntry, parameter[name[scan_node].path_spec]]
if compare[name[system_level_file_entry] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0679180>
if call[name[system_level_file_entry].IsDirectory, parameter[]] begin[:]
call[name[scan_context].SetSourceType, parameter[name[definitions].SOURCE_TYPE_DIRECTORY]]
return[None]
variable[source_path_spec] assign[=] call[name[self].ScanForStorageMediaImage, parameter[name[scan_node].path_spec]]
if name[source_path_spec] begin[:]
name[scan_node].scanned assign[=] constant[True]
variable[scan_node] assign[=] call[name[scan_context].AddScanNode, parameter[name[source_path_spec], name[scan_node]]]
if call[name[system_level_file_entry].IsDevice, parameter[]] begin[:]
variable[source_type] assign[=] name[definitions].SOURCE_TYPE_STORAGE_MEDIA_DEVICE
call[name[scan_context].SetSourceType, parameter[name[source_type]]]
if <ast.UnaryOp object at 0x7da1b0847df0> begin[:]
return[None]
variable[source_path_spec] assign[=] constant[None]
while constant[True] begin[:]
if call[name[scan_node].IsFileSystem, parameter[]] begin[:]
break
if call[name[scan_node].SupportsEncryption, parameter[]] begin[:]
call[name[self]._ScanEncryptedVolumeNode, parameter[name[scan_context], name[scan_node]]]
if call[name[scan_context].IsLockedScanNode, parameter[name[scan_node].path_spec]] begin[:]
break
variable[source_path_spec] assign[=] call[name[self].ScanForVolumeSystem, parameter[name[scan_node].path_spec]]
if <ast.UnaryOp object at 0x7da1b07b9c00> begin[:]
break
if <ast.UnaryOp object at 0x7da1b07baf50> begin[:]
name[scan_node].scanned assign[=] constant[True]
variable[scan_node] assign[=] call[name[scan_context].AddScanNode, parameter[name[source_path_spec], name[scan_node]]]
if <ast.BoolOp object at 0x7da1b07ba6e0> begin[:]
variable[source_type] assign[=] name[definitions].SOURCE_TYPE_STORAGE_MEDIA_DEVICE
call[name[scan_context].SetSourceType, parameter[name[source_type]]]
if call[name[scan_node].IsVolumeSystemRoot, parameter[]] begin[:]
call[name[self]._ScanVolumeSystemRootNode, parameter[name[scan_context], name[scan_node]]]
return[None]
if <ast.BoolOp object at 0x7da1b07bb130> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b07bb6d0> begin[:]
break
if call[name[scan_node].IsVolumeSystemRoot, parameter[]] begin[:]
pass
if <ast.UnaryOp object at 0x7da1b07af850> begin[:]
name[scan_node].scanned assign[=] constant[True]
|
keyword[def] identifier[_ScanNode] ( identifier[self] , identifier[scan_context] , identifier[scan_node] , identifier[auto_recurse] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[scan_context] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[scan_node] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[scan_path_spec] = identifier[scan_node] . identifier[path_spec]
identifier[system_level_file_entry] = keyword[None]
keyword[if] identifier[scan_node] . identifier[IsSystemLevel] ():
identifier[system_level_file_entry] = identifier[resolver] . identifier[Resolver] . identifier[OpenFileEntry] (
identifier[scan_node] . identifier[path_spec] , identifier[resolver_context] = identifier[self] . identifier[_resolver_context] )
keyword[if] identifier[system_level_file_entry] keyword[is] keyword[None] :
keyword[raise] identifier[errors] . identifier[BackEndError] ( literal[string] )
keyword[if] identifier[system_level_file_entry] . identifier[IsDirectory] ():
identifier[scan_context] . identifier[SetSourceType] ( identifier[definitions] . identifier[SOURCE_TYPE_DIRECTORY] )
keyword[return]
identifier[source_path_spec] = identifier[self] . identifier[ScanForStorageMediaImage] ( identifier[scan_node] . identifier[path_spec] )
keyword[if] identifier[source_path_spec] :
identifier[scan_node] . identifier[scanned] = keyword[True]
identifier[scan_node] = identifier[scan_context] . identifier[AddScanNode] ( identifier[source_path_spec] , identifier[scan_node] )
keyword[if] identifier[system_level_file_entry] . identifier[IsDevice] ():
identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_STORAGE_MEDIA_DEVICE]
keyword[else] :
identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_STORAGE_MEDIA_IMAGE]
identifier[scan_context] . identifier[SetSourceType] ( identifier[source_type] )
keyword[if] keyword[not] identifier[auto_recurse] :
keyword[return]
identifier[source_path_spec] = keyword[None]
keyword[while] keyword[True] :
keyword[if] identifier[scan_node] . identifier[IsFileSystem] ():
keyword[break]
keyword[if] identifier[scan_node] . identifier[SupportsEncryption] ():
identifier[self] . identifier[_ScanEncryptedVolumeNode] ( identifier[scan_context] , identifier[scan_node] )
keyword[if] identifier[scan_context] . identifier[IsLockedScanNode] ( identifier[scan_node] . identifier[path_spec] ):
keyword[break]
identifier[source_path_spec] = identifier[self] . identifier[ScanForVolumeSystem] ( identifier[scan_node] . identifier[path_spec] )
keyword[if] keyword[not] identifier[source_path_spec] :
keyword[break]
keyword[if] keyword[not] identifier[scan_context] . identifier[HasScanNode] ( identifier[source_path_spec] ):
identifier[scan_node] . identifier[scanned] = keyword[True]
identifier[scan_node] = identifier[scan_context] . identifier[AddScanNode] ( identifier[source_path_spec] , identifier[scan_node] )
keyword[if] identifier[system_level_file_entry] keyword[and] identifier[system_level_file_entry] . identifier[IsDevice] ():
identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_STORAGE_MEDIA_DEVICE]
keyword[else] :
identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_STORAGE_MEDIA_IMAGE]
identifier[scan_context] . identifier[SetSourceType] ( identifier[source_type] )
keyword[if] identifier[scan_node] . identifier[IsVolumeSystemRoot] ():
identifier[self] . identifier[_ScanVolumeSystemRootNode] (
identifier[scan_context] , identifier[scan_node] , identifier[auto_recurse] = identifier[auto_recurse] )
keyword[return]
keyword[if] keyword[not] identifier[auto_recurse] keyword[and] identifier[scan_context] . identifier[updated] :
keyword[return]
keyword[if] keyword[not] identifier[scan_context] . identifier[updated] :
keyword[break]
keyword[if] identifier[scan_node] . identifier[IsVolumeSystemRoot] ():
keyword[pass]
keyword[elif] identifier[scan_context] . identifier[IsLockedScanNode] ( identifier[scan_node] . identifier[path_spec] ):
keyword[pass]
keyword[elif] ( identifier[scan_node] . identifier[type_indicator] == identifier[definitions] . identifier[TYPE_INDICATOR_VSHADOW] keyword[and]
identifier[auto_recurse] keyword[and] identifier[scan_node] . identifier[path_spec] != identifier[scan_path_spec] ):
keyword[pass]
keyword[elif] keyword[not] identifier[scan_node] . identifier[IsFileSystem] ():
identifier[source_path_spec] = identifier[self] . identifier[ScanForFileSystem] ( identifier[scan_node] . identifier[path_spec] )
keyword[if] keyword[not] identifier[source_path_spec] :
keyword[if] identifier[scan_node] . identifier[path_spec] . identifier[type_indicator] == identifier[definitions] . identifier[TYPE_INDICATOR_RAW] :
identifier[scan_node] = identifier[scan_context] . identifier[RemoveScanNode] ( identifier[scan_node] . identifier[path_spec] )
identifier[scan_context] . identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_FILE]
keyword[else] :
identifier[scan_context] . identifier[SetSourceType] ( identifier[definitions] . identifier[SOURCE_TYPE_FILE] )
keyword[elif] keyword[not] identifier[scan_context] . identifier[HasScanNode] ( identifier[source_path_spec] ):
identifier[scan_node] . identifier[scanned] = keyword[True]
identifier[scan_node] = identifier[scan_context] . identifier[AddScanNode] ( identifier[source_path_spec] , identifier[scan_node] )
keyword[if] identifier[system_level_file_entry] keyword[and] identifier[system_level_file_entry] . identifier[IsDevice] ():
identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_STORAGE_MEDIA_DEVICE]
keyword[else] :
identifier[source_type] = identifier[definitions] . identifier[SOURCE_TYPE_STORAGE_MEDIA_IMAGE]
identifier[scan_context] . identifier[SetSourceType] ( identifier[source_type] )
keyword[if] keyword[not] identifier[scan_node] . identifier[scanned] :
identifier[scan_node] . identifier[scanned] = keyword[True]
|
def _ScanNode(self, scan_context, scan_node, auto_recurse=True):
"""Scans a node for supported formats.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): source scan node.
auto_recurse (Optional[bool]): True if the scan should automatically
recurse as far as possible.
Raises:
BackEndError: if the source cannot be scanned.
ValueError: if the scan context or scan node is invalid.
"""
if not scan_context:
raise ValueError('Invalid scan context.') # depends on [control=['if'], data=[]]
if not scan_node:
raise ValueError('Invalid scan node.') # depends on [control=['if'], data=[]]
scan_path_spec = scan_node.path_spec
system_level_file_entry = None
if scan_node.IsSystemLevel():
system_level_file_entry = resolver.Resolver.OpenFileEntry(scan_node.path_spec, resolver_context=self._resolver_context)
if system_level_file_entry is None:
raise errors.BackEndError('Unable to open file entry.') # depends on [control=['if'], data=[]]
if system_level_file_entry.IsDirectory():
scan_context.SetSourceType(definitions.SOURCE_TYPE_DIRECTORY)
return # depends on [control=['if'], data=[]]
source_path_spec = self.ScanForStorageMediaImage(scan_node.path_spec)
if source_path_spec:
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE # depends on [control=['if'], data=[]]
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
if not auto_recurse:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# In case we did not find a storage media image type we keep looking
# since not all RAW storage media image naming schemas are known and
# its type can only detected by its content.
source_path_spec = None
while True:
if scan_node.IsFileSystem():
# No need to scan a file systems scan node for volume systems.
break # depends on [control=['if'], data=[]]
if scan_node.SupportsEncryption():
self._ScanEncryptedVolumeNode(scan_context, scan_node) # depends on [control=['if'], data=[]]
if scan_context.IsLockedScanNode(scan_node.path_spec):
# Scan node is locked, such as an encrypted volume, and we cannot
# scan it for a volume system.
break # depends on [control=['if'], data=[]]
source_path_spec = self.ScanForVolumeSystem(scan_node.path_spec)
if not source_path_spec:
# No volume system found continue with a file system scan.
break # depends on [control=['if'], data=[]]
if not scan_context.HasScanNode(source_path_spec):
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry and system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE # depends on [control=['if'], data=[]]
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type) # depends on [control=['if'], data=[]]
if scan_node.IsVolumeSystemRoot():
self._ScanVolumeSystemRootNode(scan_context, scan_node, auto_recurse=auto_recurse)
# We already have already scanned for the file systems.
return # depends on [control=['if'], data=[]]
if not auto_recurse and scan_context.updated:
return # depends on [control=['if'], data=[]]
# Nothing new found.
if not scan_context.updated:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# In case we did not find a volume system type we keep looking
# since we could be dealing with a storage media image that contains
# a single volume.
# No need to scan the root of a volume system for a file system.
if scan_node.IsVolumeSystemRoot():
pass # depends on [control=['if'], data=[]]
elif scan_context.IsLockedScanNode(scan_node.path_spec):
# Scan node is locked, such as an encrypted volume, and we cannot
# scan it for a file system.
pass # depends on [control=['if'], data=[]]
elif scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW and auto_recurse and (scan_node.path_spec != scan_path_spec):
# Since scanning for file systems in VSS snapshot volumes can
# be expensive we only do this when explicitly asked for.
pass # depends on [control=['if'], data=[]]
elif not scan_node.IsFileSystem():
source_path_spec = self.ScanForFileSystem(scan_node.path_spec)
if not source_path_spec:
# Since RAW storage media image can only be determined by naming schema
# we could have single file that is not a RAW storage media image yet
# matches the naming schema.
if scan_node.path_spec.type_indicator == definitions.TYPE_INDICATOR_RAW:
scan_node = scan_context.RemoveScanNode(scan_node.path_spec)
# Make sure to override the previously assigned source type.
scan_context.source_type = definitions.SOURCE_TYPE_FILE # depends on [control=['if'], data=[]]
else:
scan_context.SetSourceType(definitions.SOURCE_TYPE_FILE) # depends on [control=['if'], data=[]]
elif not scan_context.HasScanNode(source_path_spec):
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry and system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE # depends on [control=['if'], data=[]]
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# If all scans failed mark the scan node as scanned so we do not scan it
# again.
if not scan_node.scanned:
scan_node.scanned = True # depends on [control=['if'], data=[]]
|
def rotate(self, log):
"""Move the current log to a new file with timestamp and create a new empty log file."""
self.write(log, rotate=True)
self.write({})
|
def function[rotate, parameter[self, log]]:
constant[Move the current log to a new file with timestamp and create a new empty log file.]
call[name[self].write, parameter[name[log]]]
call[name[self].write, parameter[dictionary[[], []]]]
|
keyword[def] identifier[rotate] ( identifier[self] , identifier[log] ):
literal[string]
identifier[self] . identifier[write] ( identifier[log] , identifier[rotate] = keyword[True] )
identifier[self] . identifier[write] ({})
|
def rotate(self, log):
"""Move the current log to a new file with timestamp and create a new empty log file."""
self.write(log, rotate=True)
self.write({})
|
def update(self, membershipId, isModerator=None, **request_parameters):
"""Update a team membership, by ID.
Args:
membershipId(basestring): The team membership ID.
isModerator(bool): Set to True to make the person a team moderator.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
TeamMembership: A TeamMembership object with the updated Webex
Teams team-membership details.
Raises:
TypeError: If the parameter types are incorrect.
ApiError: If the Webex Teams cloud returns an error.
"""
check_type(membershipId, basestring, may_be_none=False)
check_type(isModerator, bool)
put_data = dict_from_items_with_values(
request_parameters,
isModerator=isModerator,
)
# API request
json_data = self._session.put(API_ENDPOINT + '/' + membershipId,
json=put_data)
# Return a team membership object created from the response JSON data
return self._object_factory(OBJECT_TYPE, json_data)
|
def function[update, parameter[self, membershipId, isModerator]]:
constant[Update a team membership, by ID.
Args:
membershipId(basestring): The team membership ID.
isModerator(bool): Set to True to make the person a team moderator.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
TeamMembership: A TeamMembership object with the updated Webex
Teams team-membership details.
Raises:
TypeError: If the parameter types are incorrect.
ApiError: If the Webex Teams cloud returns an error.
]
call[name[check_type], parameter[name[membershipId], name[basestring]]]
call[name[check_type], parameter[name[isModerator], name[bool]]]
variable[put_data] assign[=] call[name[dict_from_items_with_values], parameter[name[request_parameters]]]
variable[json_data] assign[=] call[name[self]._session.put, parameter[binary_operation[binary_operation[name[API_ENDPOINT] + constant[/]] + name[membershipId]]]]
return[call[name[self]._object_factory, parameter[name[OBJECT_TYPE], name[json_data]]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[membershipId] , identifier[isModerator] = keyword[None] ,** identifier[request_parameters] ):
literal[string]
identifier[check_type] ( identifier[membershipId] , identifier[basestring] , identifier[may_be_none] = keyword[False] )
identifier[check_type] ( identifier[isModerator] , identifier[bool] )
identifier[put_data] = identifier[dict_from_items_with_values] (
identifier[request_parameters] ,
identifier[isModerator] = identifier[isModerator] ,
)
identifier[json_data] = identifier[self] . identifier[_session] . identifier[put] ( identifier[API_ENDPOINT] + literal[string] + identifier[membershipId] ,
identifier[json] = identifier[put_data] )
keyword[return] identifier[self] . identifier[_object_factory] ( identifier[OBJECT_TYPE] , identifier[json_data] )
|
def update(self, membershipId, isModerator=None, **request_parameters):
"""Update a team membership, by ID.
Args:
membershipId(basestring): The team membership ID.
isModerator(bool): Set to True to make the person a team moderator.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
TeamMembership: A TeamMembership object with the updated Webex
Teams team-membership details.
Raises:
TypeError: If the parameter types are incorrect.
ApiError: If the Webex Teams cloud returns an error.
"""
check_type(membershipId, basestring, may_be_none=False)
check_type(isModerator, bool)
put_data = dict_from_items_with_values(request_parameters, isModerator=isModerator)
# API request
json_data = self._session.put(API_ENDPOINT + '/' + membershipId, json=put_data)
# Return a team membership object created from the response JSON data
return self._object_factory(OBJECT_TYPE, json_data)
|
def msgurls(msg, urlidx=1):
"""Main entry function for urlscan.py
"""
# Written as a generator so I can easily choose only
# one subpart in the future (e.g., for
# multipart/alternative). Actually, I might even add
# a browser for the message structure?
enc = get_charset(msg)
if msg.is_multipart():
for part in msg.get_payload():
for chunk in msgurls(part, urlidx):
urlidx += 1
yield chunk
elif msg.get_content_type() == "text/plain":
decoded = decode_msg(msg, enc)
for chunk in extracturls(decoded):
urlidx += 1
yield chunk
elif msg.get_content_type() == "text/html":
decoded = decode_msg(msg, enc)
for chunk in extracthtmlurls(decoded):
urlidx += 1
yield chunk
|
def function[msgurls, parameter[msg, urlidx]]:
constant[Main entry function for urlscan.py
]
variable[enc] assign[=] call[name[get_charset], parameter[name[msg]]]
if call[name[msg].is_multipart, parameter[]] begin[:]
for taget[name[part]] in starred[call[name[msg].get_payload, parameter[]]] begin[:]
for taget[name[chunk]] in starred[call[name[msgurls], parameter[name[part], name[urlidx]]]] begin[:]
<ast.AugAssign object at 0x7da20c796140>
<ast.Yield object at 0x7da20c794310>
|
keyword[def] identifier[msgurls] ( identifier[msg] , identifier[urlidx] = literal[int] ):
literal[string]
identifier[enc] = identifier[get_charset] ( identifier[msg] )
keyword[if] identifier[msg] . identifier[is_multipart] ():
keyword[for] identifier[part] keyword[in] identifier[msg] . identifier[get_payload] ():
keyword[for] identifier[chunk] keyword[in] identifier[msgurls] ( identifier[part] , identifier[urlidx] ):
identifier[urlidx] += literal[int]
keyword[yield] identifier[chunk]
keyword[elif] identifier[msg] . identifier[get_content_type] ()== literal[string] :
identifier[decoded] = identifier[decode_msg] ( identifier[msg] , identifier[enc] )
keyword[for] identifier[chunk] keyword[in] identifier[extracturls] ( identifier[decoded] ):
identifier[urlidx] += literal[int]
keyword[yield] identifier[chunk]
keyword[elif] identifier[msg] . identifier[get_content_type] ()== literal[string] :
identifier[decoded] = identifier[decode_msg] ( identifier[msg] , identifier[enc] )
keyword[for] identifier[chunk] keyword[in] identifier[extracthtmlurls] ( identifier[decoded] ):
identifier[urlidx] += literal[int]
keyword[yield] identifier[chunk]
|
def msgurls(msg, urlidx=1):
"""Main entry function for urlscan.py
"""
# Written as a generator so I can easily choose only
# one subpart in the future (e.g., for
# multipart/alternative). Actually, I might even add
# a browser for the message structure?
enc = get_charset(msg)
if msg.is_multipart():
for part in msg.get_payload():
for chunk in msgurls(part, urlidx):
urlidx += 1
yield chunk # depends on [control=['for'], data=['chunk']] # depends on [control=['for'], data=['part']] # depends on [control=['if'], data=[]]
elif msg.get_content_type() == 'text/plain':
decoded = decode_msg(msg, enc)
for chunk in extracturls(decoded):
urlidx += 1
yield chunk # depends on [control=['for'], data=['chunk']] # depends on [control=['if'], data=[]]
elif msg.get_content_type() == 'text/html':
decoded = decode_msg(msg, enc)
for chunk in extracthtmlurls(decoded):
urlidx += 1
yield chunk # depends on [control=['for'], data=['chunk']] # depends on [control=['if'], data=[]]
|
def update_vpnservice(self, vpnservice, desc):
'''
Updates a VPN service
'''
vpnservice_id = self._find_vpnservice_id(vpnservice)
body = {'description': desc}
return self.network_conn.update_vpnservice(vpnservice_id,
body={'vpnservice': body})
|
def function[update_vpnservice, parameter[self, vpnservice, desc]]:
constant[
Updates a VPN service
]
variable[vpnservice_id] assign[=] call[name[self]._find_vpnservice_id, parameter[name[vpnservice]]]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da18f720790>], [<ast.Name object at 0x7da18f7202e0>]]
return[call[name[self].network_conn.update_vpnservice, parameter[name[vpnservice_id]]]]
|
keyword[def] identifier[update_vpnservice] ( identifier[self] , identifier[vpnservice] , identifier[desc] ):
literal[string]
identifier[vpnservice_id] = identifier[self] . identifier[_find_vpnservice_id] ( identifier[vpnservice] )
identifier[body] ={ literal[string] : identifier[desc] }
keyword[return] identifier[self] . identifier[network_conn] . identifier[update_vpnservice] ( identifier[vpnservice_id] ,
identifier[body] ={ literal[string] : identifier[body] })
|
def update_vpnservice(self, vpnservice, desc):
"""
Updates a VPN service
"""
vpnservice_id = self._find_vpnservice_id(vpnservice)
body = {'description': desc}
return self.network_conn.update_vpnservice(vpnservice_id, body={'vpnservice': body})
|
def make_refresh_on_demand_service(injector_component):
"""
create a refresh on demand service listening to refresh order on the component admin queue
:param injector_component: the injector_component to bind with the new refresh on demande service
:return: the created service
"""
LOGGER.debug("InjectorCachedComponentService.make_refresh_on_demand_service")
args = {
'service_q': injector_component.id,
'treatment_callback': injector_component.refresh,
'service_name': injector_component.id + " - On Demand Refreshing Service"
}
return InjectorCachedComponentService.driver.make_service(args)
|
def function[make_refresh_on_demand_service, parameter[injector_component]]:
constant[
create a refresh on demand service listening to refresh order on the component admin queue
:param injector_component: the injector_component to bind with the new refresh on demande service
:return: the created service
]
call[name[LOGGER].debug, parameter[constant[InjectorCachedComponentService.make_refresh_on_demand_service]]]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da18f09fd90>, <ast.Constant object at 0x7da18f09ec80>, <ast.Constant object at 0x7da18f09d120>], [<ast.Attribute object at 0x7da18f09d000>, <ast.Attribute object at 0x7da18f09c790>, <ast.BinOp object at 0x7da18f09c4f0>]]
return[call[name[InjectorCachedComponentService].driver.make_service, parameter[name[args]]]]
|
keyword[def] identifier[make_refresh_on_demand_service] ( identifier[injector_component] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
identifier[args] ={
literal[string] : identifier[injector_component] . identifier[id] ,
literal[string] : identifier[injector_component] . identifier[refresh] ,
literal[string] : identifier[injector_component] . identifier[id] + literal[string]
}
keyword[return] identifier[InjectorCachedComponentService] . identifier[driver] . identifier[make_service] ( identifier[args] )
|
def make_refresh_on_demand_service(injector_component):
"""
create a refresh on demand service listening to refresh order on the component admin queue
:param injector_component: the injector_component to bind with the new refresh on demande service
:return: the created service
"""
LOGGER.debug('InjectorCachedComponentService.make_refresh_on_demand_service')
args = {'service_q': injector_component.id, 'treatment_callback': injector_component.refresh, 'service_name': injector_component.id + ' - On Demand Refreshing Service'}
return InjectorCachedComponentService.driver.make_service(args)
|
def _UpdateDatabaseFromResponse(self, response, mode):
"""
Update database table given a user input in the form
"TABLENAME COL1=VAL1 COL2=VAL2".
Either ADD or DELETE from table depending on mode argument.
If the change succeeds the updated table is printed to stdout.
Parameters
----------
response : string
User input.
mode : string
Valid values are 'ADD' or 'DEL'.
Returns
----------
None
Will always return None. There are numerous early returns in the cases
where the database update cannot proceed for any reason.
"""
# Get tableName from user input (form TABLENAME COL1=VAL1 COL2=VAL2 etc)
try:
tableName, tableColumns = response.split(' ', 1)
except ValueError:
goodlogging.Log.Info("DB", "Database update failed - failed to extract table name from response")
return None
# Check user input against known table list
if tableName not in self._tableDict.keys():
goodlogging.Log.Info("DB", "Database update failed - unkown table name: {0}".format(tableName))
return None
# Build re pattern to extract column from user input (form TABLENAME COL1=VAL1 COL2=VAL2 etc)
rowSelect = []
for column in self._tableDict[tableName]:
colPatternList = ['(?:{0})'.format(i) for i in self._tableDict[tableName] if i != column]
colPatternList.append('(?:$)')
colPatternMatch = '|'.join(colPatternList)
matchPattern = '{0}.*?{1}=(.+?)\s*(?:{2})'.format(tableName, column, colPatternMatch)
match = re.findall(matchPattern, response)
# Match should be in form [(VAL1, VAL2, VAL3, etc.)]
if len(match) == 1:
rowSelect.append((column, match[0]))
elif len(match) > 1:
goodlogging.Log.Info('DB', 'Database update failed - multiple matches found for table {0} column {1}'.format(tableName, column))
return None
if len(rowSelect) == 0:
goodlogging.Log.Info('DB', 'Database update failed - no row selection critera found in response')
return None
# Print selected rows
rowCount = self._PrintDatabaseTable(tableName, rowSelect)
# Do DELETE flow
if mode.upper() == 'DEL':
if rowCount == 0:
goodlogging.Log.Info("DB", "Database update failed - no rows found for given search critera: {0}".format(response))
return None
deleteConfirmation = goodlogging.Log.Input("DB", "***WARNING*** DELETE THESE ROWS FROM {0} TABLE? [y/n]: ".format(tableName))
deleteConfirmation = util.ValidUserResponse(deleteConfirmation, ('y', 'n'))
if deleteConfirmation.lower() == 'n':
goodlogging.Log.Info("DB", "Database table row delete cancelled")
return None
# Build delete database query (form DELETE FROM TableName WHERE COL1=?, COL2=?)
dbQuery = "DELETE FROM {0}".format(tableName) \
+ " WHERE " \
+ ' AND '.join(['{0}=?'.format(i) for i, j in rowSelect])
dbQueryParams = [j for i, j in rowSelect]
self._ActionDatabase(dbQuery, dbQueryParams)
goodlogging.Log.Info("DB", "Deleted {0} row(s) from database table {0}:".format(rowCount, tableName))
# Do ADD flow
elif mode.upper() == 'ADD':
if rowCount != 0:
goodlogging.Log.Info("DB", "Database update failed - a row already exists for the given critera: {0}".format(response))
return None
# Build insert database query (form INSERT INTO TableName (COL1, COL2) VALUES (?,?))
dbQuery = "INSERT INTO {0} (".format(tableName) \
+ ', '.join(['{0}'.format(i) for i, j in rowSelect]) \
+ ") VALUES (" \
+ ', '.join(['?']*len(rowSelect)) \
+ ")"
dbQueryParams = [j for i, j in rowSelect]
self._ActionDatabase(dbQuery, dbQueryParams)
goodlogging.Log.Info("DB", "Added row to database table {0}:".format(tableName))
# Print resulting database table
self._PrintDatabaseTable(tableName)
|
def function[_UpdateDatabaseFromResponse, parameter[self, response, mode]]:
constant[
Update database table given a user input in the form
"TABLENAME COL1=VAL1 COL2=VAL2".
Either ADD or DELETE from table depending on mode argument.
If the change succeeds the updated table is printed to stdout.
Parameters
----------
response : string
User input.
mode : string
Valid values are 'ADD' or 'DEL'.
Returns
----------
None
Will always return None. There are numerous early returns in the cases
where the database update cannot proceed for any reason.
]
<ast.Try object at 0x7da1b28aff70>
if compare[name[tableName] <ast.NotIn object at 0x7da2590d7190> call[name[self]._tableDict.keys, parameter[]]] begin[:]
call[name[goodlogging].Log.Info, parameter[constant[DB], call[constant[Database update failed - unkown table name: {0}].format, parameter[name[tableName]]]]]
return[constant[None]]
variable[rowSelect] assign[=] list[[]]
for taget[name[column]] in starred[call[name[self]._tableDict][name[tableName]]] begin[:]
variable[colPatternList] assign[=] <ast.ListComp object at 0x7da1b28afd90>
call[name[colPatternList].append, parameter[constant[(?:$)]]]
variable[colPatternMatch] assign[=] call[constant[|].join, parameter[name[colPatternList]]]
variable[matchPattern] assign[=] call[constant[{0}.*?{1}=(.+?)\s*(?:{2})].format, parameter[name[tableName], name[column], name[colPatternMatch]]]
variable[match] assign[=] call[name[re].findall, parameter[name[matchPattern], name[response]]]
if compare[call[name[len], parameter[name[match]]] equal[==] constant[1]] begin[:]
call[name[rowSelect].append, parameter[tuple[[<ast.Name object at 0x7da1b28af490>, <ast.Subscript object at 0x7da1b28af430>]]]]
if compare[call[name[len], parameter[name[rowSelect]]] equal[==] constant[0]] begin[:]
call[name[goodlogging].Log.Info, parameter[constant[DB], constant[Database update failed - no row selection critera found in response]]]
return[constant[None]]
variable[rowCount] assign[=] call[name[self]._PrintDatabaseTable, parameter[name[tableName], name[rowSelect]]]
if compare[call[name[mode].upper, parameter[]] equal[==] constant[DEL]] begin[:]
if compare[name[rowCount] equal[==] constant[0]] begin[:]
call[name[goodlogging].Log.Info, parameter[constant[DB], call[constant[Database update failed - no rows found for given search critera: {0}].format, parameter[name[response]]]]]
return[constant[None]]
variable[deleteConfirmation] assign[=] call[name[goodlogging].Log.Input, parameter[constant[DB], call[constant[***WARNING*** DELETE THESE ROWS FROM {0} TABLE? [y/n]: ].format, parameter[name[tableName]]]]]
variable[deleteConfirmation] assign[=] call[name[util].ValidUserResponse, parameter[name[deleteConfirmation], tuple[[<ast.Constant object at 0x7da1b28ac580>, <ast.Constant object at 0x7da1b28acb50>]]]]
if compare[call[name[deleteConfirmation].lower, parameter[]] equal[==] constant[n]] begin[:]
call[name[goodlogging].Log.Info, parameter[constant[DB], constant[Database table row delete cancelled]]]
return[constant[None]]
variable[dbQuery] assign[=] binary_operation[binary_operation[call[constant[DELETE FROM {0}].format, parameter[name[tableName]]] + constant[ WHERE ]] + call[constant[ AND ].join, parameter[<ast.ListComp object at 0x7da1b28aeaa0>]]]
variable[dbQueryParams] assign[=] <ast.ListComp object at 0x7da1b28ae6b0>
call[name[self]._ActionDatabase, parameter[name[dbQuery], name[dbQueryParams]]]
call[name[goodlogging].Log.Info, parameter[constant[DB], call[constant[Deleted {0} row(s) from database table {0}:].format, parameter[name[rowCount], name[tableName]]]]]
call[name[self]._PrintDatabaseTable, parameter[name[tableName]]]
|
keyword[def] identifier[_UpdateDatabaseFromResponse] ( identifier[self] , identifier[response] , identifier[mode] ):
literal[string]
keyword[try] :
identifier[tableName] , identifier[tableColumns] = identifier[response] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
keyword[return] keyword[None]
keyword[if] identifier[tableName] keyword[not] keyword[in] identifier[self] . identifier[_tableDict] . identifier[keys] ():
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[tableName] ))
keyword[return] keyword[None]
identifier[rowSelect] =[]
keyword[for] identifier[column] keyword[in] identifier[self] . identifier[_tableDict] [ identifier[tableName] ]:
identifier[colPatternList] =[ literal[string] . identifier[format] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[self] . identifier[_tableDict] [ identifier[tableName] ] keyword[if] identifier[i] != identifier[column] ]
identifier[colPatternList] . identifier[append] ( literal[string] )
identifier[colPatternMatch] = literal[string] . identifier[join] ( identifier[colPatternList] )
identifier[matchPattern] = literal[string] . identifier[format] ( identifier[tableName] , identifier[column] , identifier[colPatternMatch] )
identifier[match] = identifier[re] . identifier[findall] ( identifier[matchPattern] , identifier[response] )
keyword[if] identifier[len] ( identifier[match] )== literal[int] :
identifier[rowSelect] . identifier[append] (( identifier[column] , identifier[match] [ literal[int] ]))
keyword[elif] identifier[len] ( identifier[match] )> literal[int] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[tableName] , identifier[column] ))
keyword[return] keyword[None]
keyword[if] identifier[len] ( identifier[rowSelect] )== literal[int] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
keyword[return] keyword[None]
identifier[rowCount] = identifier[self] . identifier[_PrintDatabaseTable] ( identifier[tableName] , identifier[rowSelect] )
keyword[if] identifier[mode] . identifier[upper] ()== literal[string] :
keyword[if] identifier[rowCount] == literal[int] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[response] ))
keyword[return] keyword[None]
identifier[deleteConfirmation] = identifier[goodlogging] . identifier[Log] . identifier[Input] ( literal[string] , literal[string] . identifier[format] ( identifier[tableName] ))
identifier[deleteConfirmation] = identifier[util] . identifier[ValidUserResponse] ( identifier[deleteConfirmation] ,( literal[string] , literal[string] ))
keyword[if] identifier[deleteConfirmation] . identifier[lower] ()== literal[string] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
keyword[return] keyword[None]
identifier[dbQuery] = literal[string] . identifier[format] ( identifier[tableName] )+ literal[string] + literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[i] ) keyword[for] identifier[i] , identifier[j] keyword[in] identifier[rowSelect] ])
identifier[dbQueryParams] =[ identifier[j] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[rowSelect] ]
identifier[self] . identifier[_ActionDatabase] ( identifier[dbQuery] , identifier[dbQueryParams] )
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[rowCount] , identifier[tableName] ))
keyword[elif] identifier[mode] . identifier[upper] ()== literal[string] :
keyword[if] identifier[rowCount] != literal[int] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[response] ))
keyword[return] keyword[None]
identifier[dbQuery] = literal[string] . identifier[format] ( identifier[tableName] )+ literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[i] ) keyword[for] identifier[i] , identifier[j] keyword[in] identifier[rowSelect] ])+ literal[string] + literal[string] . identifier[join] ([ literal[string] ]* identifier[len] ( identifier[rowSelect] ))+ literal[string]
identifier[dbQueryParams] =[ identifier[j] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[rowSelect] ]
identifier[self] . identifier[_ActionDatabase] ( identifier[dbQuery] , identifier[dbQueryParams] )
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[tableName] ))
identifier[self] . identifier[_PrintDatabaseTable] ( identifier[tableName] )
|
def _UpdateDatabaseFromResponse(self, response, mode):
"""
Update database table given a user input in the form
"TABLENAME COL1=VAL1 COL2=VAL2".
Either ADD or DELETE from table depending on mode argument.
If the change succeeds the updated table is printed to stdout.
Parameters
----------
response : string
User input.
mode : string
Valid values are 'ADD' or 'DEL'.
Returns
----------
None
Will always return None. There are numerous early returns in the cases
where the database update cannot proceed for any reason.
"""
# Get tableName from user input (form TABLENAME COL1=VAL1 COL2=VAL2 etc)
try:
(tableName, tableColumns) = response.split(' ', 1) # depends on [control=['try'], data=[]]
except ValueError:
goodlogging.Log.Info('DB', 'Database update failed - failed to extract table name from response')
return None # depends on [control=['except'], data=[]]
# Check user input against known table list
if tableName not in self._tableDict.keys():
goodlogging.Log.Info('DB', 'Database update failed - unkown table name: {0}'.format(tableName))
return None # depends on [control=['if'], data=['tableName']]
# Build re pattern to extract column from user input (form TABLENAME COL1=VAL1 COL2=VAL2 etc)
rowSelect = []
for column in self._tableDict[tableName]:
colPatternList = ['(?:{0})'.format(i) for i in self._tableDict[tableName] if i != column]
colPatternList.append('(?:$)')
colPatternMatch = '|'.join(colPatternList)
matchPattern = '{0}.*?{1}=(.+?)\\s*(?:{2})'.format(tableName, column, colPatternMatch)
match = re.findall(matchPattern, response)
# Match should be in form [(VAL1, VAL2, VAL3, etc.)]
if len(match) == 1:
rowSelect.append((column, match[0])) # depends on [control=['if'], data=[]]
elif len(match) > 1:
goodlogging.Log.Info('DB', 'Database update failed - multiple matches found for table {0} column {1}'.format(tableName, column))
return None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['column']]
if len(rowSelect) == 0:
goodlogging.Log.Info('DB', 'Database update failed - no row selection critera found in response')
return None # depends on [control=['if'], data=[]]
# Print selected rows
rowCount = self._PrintDatabaseTable(tableName, rowSelect)
# Do DELETE flow
if mode.upper() == 'DEL':
if rowCount == 0:
goodlogging.Log.Info('DB', 'Database update failed - no rows found for given search critera: {0}'.format(response))
return None # depends on [control=['if'], data=[]]
deleteConfirmation = goodlogging.Log.Input('DB', '***WARNING*** DELETE THESE ROWS FROM {0} TABLE? [y/n]: '.format(tableName))
deleteConfirmation = util.ValidUserResponse(deleteConfirmation, ('y', 'n'))
if deleteConfirmation.lower() == 'n':
goodlogging.Log.Info('DB', 'Database table row delete cancelled')
return None # depends on [control=['if'], data=[]]
# Build delete database query (form DELETE FROM TableName WHERE COL1=?, COL2=?)
dbQuery = 'DELETE FROM {0}'.format(tableName) + ' WHERE ' + ' AND '.join(['{0}=?'.format(i) for (i, j) in rowSelect])
dbQueryParams = [j for (i, j) in rowSelect]
self._ActionDatabase(dbQuery, dbQueryParams)
goodlogging.Log.Info('DB', 'Deleted {0} row(s) from database table {0}:'.format(rowCount, tableName)) # depends on [control=['if'], data=[]]
# Do ADD flow
elif mode.upper() == 'ADD':
if rowCount != 0:
goodlogging.Log.Info('DB', 'Database update failed - a row already exists for the given critera: {0}'.format(response))
return None # depends on [control=['if'], data=[]]
# Build insert database query (form INSERT INTO TableName (COL1, COL2) VALUES (?,?))
dbQuery = 'INSERT INTO {0} ('.format(tableName) + ', '.join(['{0}'.format(i) for (i, j) in rowSelect]) + ') VALUES (' + ', '.join(['?'] * len(rowSelect)) + ')'
dbQueryParams = [j for (i, j) in rowSelect]
self._ActionDatabase(dbQuery, dbQueryParams)
goodlogging.Log.Info('DB', 'Added row to database table {0}:'.format(tableName)) # depends on [control=['if'], data=[]]
# Print resulting database table
self._PrintDatabaseTable(tableName)
|
def interjoint_paths(self):
"""
Returns paths between the adjacent critical points
in the skeleton, where a critical point is the set of
terminal and branch points.
"""
paths = []
for tree in self.components():
subpaths = self._single_tree_interjoint_paths(tree)
paths.extend(subpaths)
return paths
|
def function[interjoint_paths, parameter[self]]:
constant[
Returns paths between the adjacent critical points
in the skeleton, where a critical point is the set of
terminal and branch points.
]
variable[paths] assign[=] list[[]]
for taget[name[tree]] in starred[call[name[self].components, parameter[]]] begin[:]
variable[subpaths] assign[=] call[name[self]._single_tree_interjoint_paths, parameter[name[tree]]]
call[name[paths].extend, parameter[name[subpaths]]]
return[name[paths]]
|
keyword[def] identifier[interjoint_paths] ( identifier[self] ):
literal[string]
identifier[paths] =[]
keyword[for] identifier[tree] keyword[in] identifier[self] . identifier[components] ():
identifier[subpaths] = identifier[self] . identifier[_single_tree_interjoint_paths] ( identifier[tree] )
identifier[paths] . identifier[extend] ( identifier[subpaths] )
keyword[return] identifier[paths]
|
def interjoint_paths(self):
"""
Returns paths between the adjacent critical points
in the skeleton, where a critical point is the set of
terminal and branch points.
"""
paths = []
for tree in self.components():
subpaths = self._single_tree_interjoint_paths(tree)
paths.extend(subpaths) # depends on [control=['for'], data=['tree']]
return paths
|
def path(self):
"""
Serve gzip file if client accept it.
Generate or update the gzip file if needed.
"""
path = self._path()
statobj = os.stat(path)
ae = self.request.META.get('HTTP_ACCEPT_ENCODING', '')
if re_accepts_gzip.search(ae) and getattr(settings, 'UMAP_GZIP', True):
gzip_path = "{path}{ext}".format(path=path, ext=self.EXT)
up_to_date = True
if not os.path.exists(gzip_path):
up_to_date = False
else:
gzip_statobj = os.stat(gzip_path)
if statobj.st_mtime > gzip_statobj.st_mtime:
up_to_date = False
if not up_to_date:
gzip_file(path, gzip_path)
path = gzip_path
return path
|
def function[path, parameter[self]]:
constant[
Serve gzip file if client accept it.
Generate or update the gzip file if needed.
]
variable[path] assign[=] call[name[self]._path, parameter[]]
variable[statobj] assign[=] call[name[os].stat, parameter[name[path]]]
variable[ae] assign[=] call[name[self].request.META.get, parameter[constant[HTTP_ACCEPT_ENCODING], constant[]]]
if <ast.BoolOp object at 0x7da1b208bcd0> begin[:]
variable[gzip_path] assign[=] call[constant[{path}{ext}].format, parameter[]]
variable[up_to_date] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b208bfd0> begin[:]
variable[up_to_date] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b20895d0> begin[:]
call[name[gzip_file], parameter[name[path], name[gzip_path]]]
variable[path] assign[=] name[gzip_path]
return[name[path]]
|
keyword[def] identifier[path] ( identifier[self] ):
literal[string]
identifier[path] = identifier[self] . identifier[_path] ()
identifier[statobj] = identifier[os] . identifier[stat] ( identifier[path] )
identifier[ae] = identifier[self] . identifier[request] . identifier[META] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[re_accepts_gzip] . identifier[search] ( identifier[ae] ) keyword[and] identifier[getattr] ( identifier[settings] , literal[string] , keyword[True] ):
identifier[gzip_path] = literal[string] . identifier[format] ( identifier[path] = identifier[path] , identifier[ext] = identifier[self] . identifier[EXT] )
identifier[up_to_date] = keyword[True]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[gzip_path] ):
identifier[up_to_date] = keyword[False]
keyword[else] :
identifier[gzip_statobj] = identifier[os] . identifier[stat] ( identifier[gzip_path] )
keyword[if] identifier[statobj] . identifier[st_mtime] > identifier[gzip_statobj] . identifier[st_mtime] :
identifier[up_to_date] = keyword[False]
keyword[if] keyword[not] identifier[up_to_date] :
identifier[gzip_file] ( identifier[path] , identifier[gzip_path] )
identifier[path] = identifier[gzip_path]
keyword[return] identifier[path]
|
def path(self):
"""
Serve gzip file if client accept it.
Generate or update the gzip file if needed.
"""
path = self._path()
statobj = os.stat(path)
ae = self.request.META.get('HTTP_ACCEPT_ENCODING', '')
if re_accepts_gzip.search(ae) and getattr(settings, 'UMAP_GZIP', True):
gzip_path = '{path}{ext}'.format(path=path, ext=self.EXT)
up_to_date = True
if not os.path.exists(gzip_path):
up_to_date = False # depends on [control=['if'], data=[]]
else:
gzip_statobj = os.stat(gzip_path)
if statobj.st_mtime > gzip_statobj.st_mtime:
up_to_date = False # depends on [control=['if'], data=[]]
if not up_to_date:
gzip_file(path, gzip_path) # depends on [control=['if'], data=[]]
path = gzip_path # depends on [control=['if'], data=[]]
return path
|
def END(self):
"""END state."""
logger.debug('In state: END')
self.current_state = STATE_END
if self.script is not None:
self.script.script_init(self.client.lease, self.current_state)
self.script.script_go()
else:
set_net(self.client.lease)
return
|
def function[END, parameter[self]]:
constant[END state.]
call[name[logger].debug, parameter[constant[In state: END]]]
name[self].current_state assign[=] name[STATE_END]
if compare[name[self].script is_not constant[None]] begin[:]
call[name[self].script.script_init, parameter[name[self].client.lease, name[self].current_state]]
call[name[self].script.script_go, parameter[]]
return[None]
|
keyword[def] identifier[END] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[current_state] = identifier[STATE_END]
keyword[if] identifier[self] . identifier[script] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[script] . identifier[script_init] ( identifier[self] . identifier[client] . identifier[lease] , identifier[self] . identifier[current_state] )
identifier[self] . identifier[script] . identifier[script_go] ()
keyword[else] :
identifier[set_net] ( identifier[self] . identifier[client] . identifier[lease] )
keyword[return]
|
def END(self):
"""END state."""
logger.debug('In state: END')
self.current_state = STATE_END
if self.script is not None:
self.script.script_init(self.client.lease, self.current_state)
self.script.script_go() # depends on [control=['if'], data=[]]
else:
set_net(self.client.lease)
return
|
def match_any_learning_objective(self, match):
"""Matches an item with any objective.
arg: match (boolean): ``true`` to match items with any
learning objective, ``false`` to match items with no
learning objectives
*compliance: mandatory -- This method must be implemented.*
"""
match_key = 'learningObjectiveIds'
param = '$exists'
if match:
flag = 'true'
else:
flag = 'false'
if match_key in self._my_osid_query._query_terms:
self._my_osid_query._query_terms[match_key][param] = flag
else:
self._my_osid_query._query_terms[match_key] = {param: flag}
self._my_osid_query._query_terms[match_key]['$nin'] = [[], ['']]
|
def function[match_any_learning_objective, parameter[self, match]]:
constant[Matches an item with any objective.
arg: match (boolean): ``true`` to match items with any
learning objective, ``false`` to match items with no
learning objectives
*compliance: mandatory -- This method must be implemented.*
]
variable[match_key] assign[=] constant[learningObjectiveIds]
variable[param] assign[=] constant[$exists]
if name[match] begin[:]
variable[flag] assign[=] constant[true]
if compare[name[match_key] in name[self]._my_osid_query._query_terms] begin[:]
call[call[name[self]._my_osid_query._query_terms][name[match_key]]][name[param]] assign[=] name[flag]
call[call[name[self]._my_osid_query._query_terms][name[match_key]]][constant[$nin]] assign[=] list[[<ast.List object at 0x7da18f58e740>, <ast.List object at 0x7da18f58e4a0>]]
|
keyword[def] identifier[match_any_learning_objective] ( identifier[self] , identifier[match] ):
literal[string]
identifier[match_key] = literal[string]
identifier[param] = literal[string]
keyword[if] identifier[match] :
identifier[flag] = literal[string]
keyword[else] :
identifier[flag] = literal[string]
keyword[if] identifier[match_key] keyword[in] identifier[self] . identifier[_my_osid_query] . identifier[_query_terms] :
identifier[self] . identifier[_my_osid_query] . identifier[_query_terms] [ identifier[match_key] ][ identifier[param] ]= identifier[flag]
keyword[else] :
identifier[self] . identifier[_my_osid_query] . identifier[_query_terms] [ identifier[match_key] ]={ identifier[param] : identifier[flag] }
identifier[self] . identifier[_my_osid_query] . identifier[_query_terms] [ identifier[match_key] ][ literal[string] ]=[[],[ literal[string] ]]
|
def match_any_learning_objective(self, match):
"""Matches an item with any objective.
arg: match (boolean): ``true`` to match items with any
learning objective, ``false`` to match items with no
learning objectives
*compliance: mandatory -- This method must be implemented.*
"""
match_key = 'learningObjectiveIds'
param = '$exists'
if match:
flag = 'true' # depends on [control=['if'], data=[]]
else:
flag = 'false'
if match_key in self._my_osid_query._query_terms:
self._my_osid_query._query_terms[match_key][param] = flag # depends on [control=['if'], data=['match_key']]
else:
self._my_osid_query._query_terms[match_key] = {param: flag}
self._my_osid_query._query_terms[match_key]['$nin'] = [[], ['']]
|
def validate_units(self, value):
"""Validate units, assuming that it was called by _validate_type_*."""
self.validate_quantity(value)
self.units_type = inspect.stack()[1][3].split('_')[-1]
assert self.units_type, ("`validate_units` should not be called "
"directly. It should be called by a "
"_validate_type_* methods that sets "
"`units_type`")
units = getattr(pq, self.units_map[self.units_type])
if not value.simplified.units == units:
self._error('%s' % value,
"Must have dimensions of %s." % self.units_type)
return True
|
def function[validate_units, parameter[self, value]]:
constant[Validate units, assuming that it was called by _validate_type_*.]
call[name[self].validate_quantity, parameter[name[value]]]
name[self].units_type assign[=] call[call[call[call[call[name[inspect].stack, parameter[]]][constant[1]]][constant[3]].split, parameter[constant[_]]]][<ast.UnaryOp object at 0x7da1b0e17460>]
assert[name[self].units_type]
variable[units] assign[=] call[name[getattr], parameter[name[pq], call[name[self].units_map][name[self].units_type]]]
if <ast.UnaryOp object at 0x7da1b0e14310> begin[:]
call[name[self]._error, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[value]], binary_operation[constant[Must have dimensions of %s.] <ast.Mod object at 0x7da2590d6920> name[self].units_type]]]
return[constant[True]]
|
keyword[def] identifier[validate_units] ( identifier[self] , identifier[value] ):
literal[string]
identifier[self] . identifier[validate_quantity] ( identifier[value] )
identifier[self] . identifier[units_type] = identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ]
keyword[assert] identifier[self] . identifier[units_type] ,( literal[string]
literal[string]
literal[string]
literal[string] )
identifier[units] = identifier[getattr] ( identifier[pq] , identifier[self] . identifier[units_map] [ identifier[self] . identifier[units_type] ])
keyword[if] keyword[not] identifier[value] . identifier[simplified] . identifier[units] == identifier[units] :
identifier[self] . identifier[_error] ( literal[string] % identifier[value] ,
literal[string] % identifier[self] . identifier[units_type] )
keyword[return] keyword[True]
|
def validate_units(self, value):
"""Validate units, assuming that it was called by _validate_type_*."""
self.validate_quantity(value)
self.units_type = inspect.stack()[1][3].split('_')[-1]
assert self.units_type, '`validate_units` should not be called directly. It should be called by a _validate_type_* methods that sets `units_type`'
units = getattr(pq, self.units_map[self.units_type])
if not value.simplified.units == units:
self._error('%s' % value, 'Must have dimensions of %s.' % self.units_type) # depends on [control=['if'], data=[]]
return True
|
def import_submodules(package, recursive=True):
""" Import all submodules of a module, recursively, including subpackages
:param package: package (name or actual module)
:type package: str | module
:rtype: dict[str, types.ModuleType]
"""
if isinstance(package, str):
package = importlib.import_module(package)
results = {}
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
full_name = package.__name__ + "." + name
results[name] = importlib.import_module(full_name)
if recursive and is_pkg:
results.update(import_submodules(full_name))
return results
|
def function[import_submodules, parameter[package, recursive]]:
constant[ Import all submodules of a module, recursively, including subpackages
:param package: package (name or actual module)
:type package: str | module
:rtype: dict[str, types.ModuleType]
]
if call[name[isinstance], parameter[name[package], name[str]]] begin[:]
variable[package] assign[=] call[name[importlib].import_module, parameter[name[package]]]
variable[results] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18eb55d50>, <ast.Name object at 0x7da18eb55ea0>, <ast.Name object at 0x7da18eb54d90>]]] in starred[call[name[pkgutil].walk_packages, parameter[name[package].__path__]]] begin[:]
variable[full_name] assign[=] binary_operation[binary_operation[name[package].__name__ + constant[.]] + name[name]]
call[name[results]][name[name]] assign[=] call[name[importlib].import_module, parameter[name[full_name]]]
if <ast.BoolOp object at 0x7da18eb545e0> begin[:]
call[name[results].update, parameter[call[name[import_submodules], parameter[name[full_name]]]]]
return[name[results]]
|
keyword[def] identifier[import_submodules] ( identifier[package] , identifier[recursive] = keyword[True] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[package] , identifier[str] ):
identifier[package] = identifier[importlib] . identifier[import_module] ( identifier[package] )
identifier[results] ={}
keyword[for] identifier[loader] , identifier[name] , identifier[is_pkg] keyword[in] identifier[pkgutil] . identifier[walk_packages] ( identifier[package] . identifier[__path__] ):
identifier[full_name] = identifier[package] . identifier[__name__] + literal[string] + identifier[name]
identifier[results] [ identifier[name] ]= identifier[importlib] . identifier[import_module] ( identifier[full_name] )
keyword[if] identifier[recursive] keyword[and] identifier[is_pkg] :
identifier[results] . identifier[update] ( identifier[import_submodules] ( identifier[full_name] ))
keyword[return] identifier[results]
|
def import_submodules(package, recursive=True):
""" Import all submodules of a module, recursively, including subpackages
:param package: package (name or actual module)
:type package: str | module
:rtype: dict[str, types.ModuleType]
"""
if isinstance(package, str):
package = importlib.import_module(package) # depends on [control=['if'], data=[]]
results = {}
for (loader, name, is_pkg) in pkgutil.walk_packages(package.__path__):
full_name = package.__name__ + '.' + name
results[name] = importlib.import_module(full_name)
if recursive and is_pkg:
results.update(import_submodules(full_name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return results
|
def _assemble_gap(self, stmt):
"""Example: act(p(HGNC:RASA1), ma(gap)) =| act(p(HGNC:KRAS), ma(gtp))"""
gap = deepcopy(stmt.gap)
gap.activity = ActivityCondition('gap', True)
ras = deepcopy(stmt.ras)
ras.activity = ActivityCondition('gtpbound', True)
self._add_nodes_edges(gap, ras, pc.DIRECTLY_DECREASES, stmt.evidence)
|
def function[_assemble_gap, parameter[self, stmt]]:
constant[Example: act(p(HGNC:RASA1), ma(gap)) =| act(p(HGNC:KRAS), ma(gtp))]
variable[gap] assign[=] call[name[deepcopy], parameter[name[stmt].gap]]
name[gap].activity assign[=] call[name[ActivityCondition], parameter[constant[gap], constant[True]]]
variable[ras] assign[=] call[name[deepcopy], parameter[name[stmt].ras]]
name[ras].activity assign[=] call[name[ActivityCondition], parameter[constant[gtpbound], constant[True]]]
call[name[self]._add_nodes_edges, parameter[name[gap], name[ras], name[pc].DIRECTLY_DECREASES, name[stmt].evidence]]
|
keyword[def] identifier[_assemble_gap] ( identifier[self] , identifier[stmt] ):
literal[string]
identifier[gap] = identifier[deepcopy] ( identifier[stmt] . identifier[gap] )
identifier[gap] . identifier[activity] = identifier[ActivityCondition] ( literal[string] , keyword[True] )
identifier[ras] = identifier[deepcopy] ( identifier[stmt] . identifier[ras] )
identifier[ras] . identifier[activity] = identifier[ActivityCondition] ( literal[string] , keyword[True] )
identifier[self] . identifier[_add_nodes_edges] ( identifier[gap] , identifier[ras] , identifier[pc] . identifier[DIRECTLY_DECREASES] , identifier[stmt] . identifier[evidence] )
|
def _assemble_gap(self, stmt):
"""Example: act(p(HGNC:RASA1), ma(gap)) =| act(p(HGNC:KRAS), ma(gtp))"""
gap = deepcopy(stmt.gap)
gap.activity = ActivityCondition('gap', True)
ras = deepcopy(stmt.ras)
ras.activity = ActivityCondition('gtpbound', True)
self._add_nodes_edges(gap, ras, pc.DIRECTLY_DECREASES, stmt.evidence)
|
def map_version(self, requirement, local_version):
"""
Maps a local version name to one recognised by the Requirement class
Parameters
----------
requirement : str
Name of the requirement
version : str
version string
"""
if isinstance(self._versions_map, dict):
version = self._versions_map.get(requirement, {}).get(
local_version, local_version)
else:
version = self._versions_map(requirement, local_version)
return version
|
def function[map_version, parameter[self, requirement, local_version]]:
constant[
Maps a local version name to one recognised by the Requirement class
Parameters
----------
requirement : str
Name of the requirement
version : str
version string
]
if call[name[isinstance], parameter[name[self]._versions_map, name[dict]]] begin[:]
variable[version] assign[=] call[call[name[self]._versions_map.get, parameter[name[requirement], dictionary[[], []]]].get, parameter[name[local_version], name[local_version]]]
return[name[version]]
|
keyword[def] identifier[map_version] ( identifier[self] , identifier[requirement] , identifier[local_version] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_versions_map] , identifier[dict] ):
identifier[version] = identifier[self] . identifier[_versions_map] . identifier[get] ( identifier[requirement] ,{}). identifier[get] (
identifier[local_version] , identifier[local_version] )
keyword[else] :
identifier[version] = identifier[self] . identifier[_versions_map] ( identifier[requirement] , identifier[local_version] )
keyword[return] identifier[version]
|
def map_version(self, requirement, local_version):
"""
Maps a local version name to one recognised by the Requirement class
Parameters
----------
requirement : str
Name of the requirement
version : str
version string
"""
if isinstance(self._versions_map, dict):
version = self._versions_map.get(requirement, {}).get(local_version, local_version) # depends on [control=['if'], data=[]]
else:
version = self._versions_map(requirement, local_version)
return version
|
def rweibull(alpha, beta, size=None):
"""
Weibull random variates.
"""
tmp = -np.log(runiform(0, 1, size))
return beta * (tmp ** (1. / alpha))
|
def function[rweibull, parameter[alpha, beta, size]]:
constant[
Weibull random variates.
]
variable[tmp] assign[=] <ast.UnaryOp object at 0x7da2041da2c0>
return[binary_operation[name[beta] * binary_operation[name[tmp] ** binary_operation[constant[1.0] / name[alpha]]]]]
|
keyword[def] identifier[rweibull] ( identifier[alpha] , identifier[beta] , identifier[size] = keyword[None] ):
literal[string]
identifier[tmp] =- identifier[np] . identifier[log] ( identifier[runiform] ( literal[int] , literal[int] , identifier[size] ))
keyword[return] identifier[beta] *( identifier[tmp] **( literal[int] / identifier[alpha] ))
|
def rweibull(alpha, beta, size=None):
"""
Weibull random variates.
"""
tmp = -np.log(runiform(0, 1, size))
return beta * tmp ** (1.0 / alpha)
|
def v_unique_name_children(ctx, stmt):
"""Make sure that each child of stmt has a unique name"""
def sort_pos(p1, p2):
if p1.line < p2.line:
return (p1,p2)
else:
return (p2,p1)
dict = {}
chs = stmt.i_children
def check(c):
key = (c.i_module.i_modulename, c.arg)
if key in dict:
dup = dict[key]
(minpos, maxpos) = sort_pos(c.pos, dup.pos)
pos = chk_uses_pos(c, maxpos)
err_add(ctx.errors, pos,
'DUPLICATE_CHILD_NAME', (stmt.arg, stmt.pos, c.arg, minpos))
else:
dict[key] = c
# also check all data nodes in the cases
if c.keyword == 'choice':
for case in c.i_children:
for cc in case.i_children:
check(cc)
for c in chs:
check(c)
|
def function[v_unique_name_children, parameter[ctx, stmt]]:
constant[Make sure that each child of stmt has a unique name]
def function[sort_pos, parameter[p1, p2]]:
if compare[name[p1].line less[<] name[p2].line] begin[:]
return[tuple[[<ast.Name object at 0x7da20e9b0bb0>, <ast.Name object at 0x7da20e9b1a20>]]]
variable[dict] assign[=] dictionary[[], []]
variable[chs] assign[=] name[stmt].i_children
def function[check, parameter[c]]:
variable[key] assign[=] tuple[[<ast.Attribute object at 0x7da20e9b39a0>, <ast.Attribute object at 0x7da20e9b01c0>]]
if compare[name[key] in name[dict]] begin[:]
variable[dup] assign[=] call[name[dict]][name[key]]
<ast.Tuple object at 0x7da20e9b3d00> assign[=] call[name[sort_pos], parameter[name[c].pos, name[dup].pos]]
variable[pos] assign[=] call[name[chk_uses_pos], parameter[name[c], name[maxpos]]]
call[name[err_add], parameter[name[ctx].errors, name[pos], constant[DUPLICATE_CHILD_NAME], tuple[[<ast.Attribute object at 0x7da20e9b39d0>, <ast.Attribute object at 0x7da20e9b0280>, <ast.Attribute object at 0x7da20e9b0dc0>, <ast.Name object at 0x7da20e9b1810>]]]]
if compare[name[c].keyword equal[==] constant[choice]] begin[:]
for taget[name[case]] in starred[name[c].i_children] begin[:]
for taget[name[cc]] in starred[name[case].i_children] begin[:]
call[name[check], parameter[name[cc]]]
for taget[name[c]] in starred[name[chs]] begin[:]
call[name[check], parameter[name[c]]]
|
keyword[def] identifier[v_unique_name_children] ( identifier[ctx] , identifier[stmt] ):
literal[string]
keyword[def] identifier[sort_pos] ( identifier[p1] , identifier[p2] ):
keyword[if] identifier[p1] . identifier[line] < identifier[p2] . identifier[line] :
keyword[return] ( identifier[p1] , identifier[p2] )
keyword[else] :
keyword[return] ( identifier[p2] , identifier[p1] )
identifier[dict] ={}
identifier[chs] = identifier[stmt] . identifier[i_children]
keyword[def] identifier[check] ( identifier[c] ):
identifier[key] =( identifier[c] . identifier[i_module] . identifier[i_modulename] , identifier[c] . identifier[arg] )
keyword[if] identifier[key] keyword[in] identifier[dict] :
identifier[dup] = identifier[dict] [ identifier[key] ]
( identifier[minpos] , identifier[maxpos] )= identifier[sort_pos] ( identifier[c] . identifier[pos] , identifier[dup] . identifier[pos] )
identifier[pos] = identifier[chk_uses_pos] ( identifier[c] , identifier[maxpos] )
identifier[err_add] ( identifier[ctx] . identifier[errors] , identifier[pos] ,
literal[string] ,( identifier[stmt] . identifier[arg] , identifier[stmt] . identifier[pos] , identifier[c] . identifier[arg] , identifier[minpos] ))
keyword[else] :
identifier[dict] [ identifier[key] ]= identifier[c]
keyword[if] identifier[c] . identifier[keyword] == literal[string] :
keyword[for] identifier[case] keyword[in] identifier[c] . identifier[i_children] :
keyword[for] identifier[cc] keyword[in] identifier[case] . identifier[i_children] :
identifier[check] ( identifier[cc] )
keyword[for] identifier[c] keyword[in] identifier[chs] :
identifier[check] ( identifier[c] )
|
def v_unique_name_children(ctx, stmt):
"""Make sure that each child of stmt has a unique name"""
def sort_pos(p1, p2):
if p1.line < p2.line:
return (p1, p2) # depends on [control=['if'], data=[]]
else:
return (p2, p1)
dict = {}
chs = stmt.i_children
def check(c):
key = (c.i_module.i_modulename, c.arg)
if key in dict:
dup = dict[key]
(minpos, maxpos) = sort_pos(c.pos, dup.pos)
pos = chk_uses_pos(c, maxpos)
err_add(ctx.errors, pos, 'DUPLICATE_CHILD_NAME', (stmt.arg, stmt.pos, c.arg, minpos)) # depends on [control=['if'], data=['key', 'dict']]
else:
dict[key] = c
# also check all data nodes in the cases
if c.keyword == 'choice':
for case in c.i_children:
for cc in case.i_children:
check(cc) # depends on [control=['for'], data=['cc']] # depends on [control=['for'], data=['case']] # depends on [control=['if'], data=[]]
for c in chs:
check(c) # depends on [control=['for'], data=['c']]
|
def r_annotations(self):
""" Route to retrieve annotations by target
:param target_urn: The CTS URN for which to retrieve annotations
:type target_urn: str
:return: a JSON string containing count and list of resources
:rtype: {str: Any}
"""
target = request.args.get("target", None)
wildcard = request.args.get("wildcard", ".", type=str)
include = request.args.get("include")
exclude = request.args.get("exclude")
limit = request.args.get("limit", None, type=int)
start = request.args.get("start", 1, type=int)
expand = request.args.get("expand", False, type=bool)
if target:
try:
urn = MyCapytain.common.reference.URN(target)
except ValueError:
return "invalid urn", 400
count, annotations = self.__queryinterface__.getAnnotations(urn, wildcard=wildcard, include=include,
exclude=exclude, limit=limit, start=start,
expand=expand)
else:
# Note that this implementation is not done for too much annotations
# because we do not implement pagination here
count, annotations = self.__queryinterface__.getAnnotations(None, limit=limit, start=start, expand=expand)
mapped = []
response = {
"@context": type(self).JSONLD_CONTEXT,
"id": url_for(".r_annotations", start=start, limit=limit),
"type": "AnnotationCollection",
"startIndex": start,
"items": [
],
"total": count
}
for a in annotations:
mapped.append({
"id": url_for(".r_annotation", sha=a.sha),
"body": url_for(".r_annotation_body", sha=a.sha),
"type": "Annotation",
"target": a.target.to_json(),
"dc:type": a.type_uri,
"owl:sameAs": [a.uri],
"nemo:slug": a.slug
})
response["items"] = mapped
response = jsonify(response)
return response
|
def function[r_annotations, parameter[self]]:
constant[ Route to retrieve annotations by target
:param target_urn: The CTS URN for which to retrieve annotations
:type target_urn: str
:return: a JSON string containing count and list of resources
:rtype: {str: Any}
]
variable[target] assign[=] call[name[request].args.get, parameter[constant[target], constant[None]]]
variable[wildcard] assign[=] call[name[request].args.get, parameter[constant[wildcard], constant[.]]]
variable[include] assign[=] call[name[request].args.get, parameter[constant[include]]]
variable[exclude] assign[=] call[name[request].args.get, parameter[constant[exclude]]]
variable[limit] assign[=] call[name[request].args.get, parameter[constant[limit], constant[None]]]
variable[start] assign[=] call[name[request].args.get, parameter[constant[start], constant[1]]]
variable[expand] assign[=] call[name[request].args.get, parameter[constant[expand], constant[False]]]
if name[target] begin[:]
<ast.Try object at 0x7da1b0023dc0>
<ast.Tuple object at 0x7da1b0022560> assign[=] call[name[self].__queryinterface__.getAnnotations, parameter[name[urn]]]
variable[mapped] assign[=] list[[]]
variable[response] assign[=] dictionary[[<ast.Constant object at 0x7da1b00232b0>, <ast.Constant object at 0x7da1b0021cc0>, <ast.Constant object at 0x7da1b0021900>, <ast.Constant object at 0x7da1b0023a00>, <ast.Constant object at 0x7da1b0020e50>, <ast.Constant object at 0x7da1b0022530>], [<ast.Attribute object at 0x7da1b0021ae0>, <ast.Call object at 0x7da1b00922c0>, <ast.Constant object at 0x7da1b0091960>, <ast.Name object at 0x7da1b0091900>, <ast.List object at 0x7da1b0091810>, <ast.Name object at 0x7da1b00918a0>]]
for taget[name[a]] in starred[name[annotations]] begin[:]
call[name[mapped].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0091570>, <ast.Constant object at 0x7da1b0091540>, <ast.Constant object at 0x7da1b0090460>, <ast.Constant object at 0x7da1b0090490>, <ast.Constant object at 0x7da1b00904c0>, <ast.Constant object at 0x7da1b0090400>, <ast.Constant object at 0x7da1b0090430>], [<ast.Call object at 0x7da1b00904f0>, <ast.Call object at 0x7da1b0090520>, <ast.Constant object at 0x7da1b0093eb0>, <ast.Call object at 0x7da1b0093ee0>, <ast.Attribute object at 0x7da1b0093f70>, <ast.List object at 0x7da1b0092cb0>, <ast.Attribute object at 0x7da1b0092d40>]]]]
call[name[response]][constant[items]] assign[=] name[mapped]
variable[response] assign[=] call[name[jsonify], parameter[name[response]]]
return[name[response]]
|
keyword[def] identifier[r_annotations] ( identifier[self] ):
literal[string]
identifier[target] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , keyword[None] )
identifier[wildcard] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , literal[string] , identifier[type] = identifier[str] )
identifier[include] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[exclude] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[limit] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , keyword[None] , identifier[type] = identifier[int] )
identifier[start] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , literal[int] , identifier[type] = identifier[int] )
identifier[expand] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , keyword[False] , identifier[type] = identifier[bool] )
keyword[if] identifier[target] :
keyword[try] :
identifier[urn] = identifier[MyCapytain] . identifier[common] . identifier[reference] . identifier[URN] ( identifier[target] )
keyword[except] identifier[ValueError] :
keyword[return] literal[string] , literal[int]
identifier[count] , identifier[annotations] = identifier[self] . identifier[__queryinterface__] . identifier[getAnnotations] ( identifier[urn] , identifier[wildcard] = identifier[wildcard] , identifier[include] = identifier[include] ,
identifier[exclude] = identifier[exclude] , identifier[limit] = identifier[limit] , identifier[start] = identifier[start] ,
identifier[expand] = identifier[expand] )
keyword[else] :
identifier[count] , identifier[annotations] = identifier[self] . identifier[__queryinterface__] . identifier[getAnnotations] ( keyword[None] , identifier[limit] = identifier[limit] , identifier[start] = identifier[start] , identifier[expand] = identifier[expand] )
identifier[mapped] =[]
identifier[response] ={
literal[string] : identifier[type] ( identifier[self] ). identifier[JSONLD_CONTEXT] ,
literal[string] : identifier[url_for] ( literal[string] , identifier[start] = identifier[start] , identifier[limit] = identifier[limit] ),
literal[string] : literal[string] ,
literal[string] : identifier[start] ,
literal[string] :[
],
literal[string] : identifier[count]
}
keyword[for] identifier[a] keyword[in] identifier[annotations] :
identifier[mapped] . identifier[append] ({
literal[string] : identifier[url_for] ( literal[string] , identifier[sha] = identifier[a] . identifier[sha] ),
literal[string] : identifier[url_for] ( literal[string] , identifier[sha] = identifier[a] . identifier[sha] ),
literal[string] : literal[string] ,
literal[string] : identifier[a] . identifier[target] . identifier[to_json] (),
literal[string] : identifier[a] . identifier[type_uri] ,
literal[string] :[ identifier[a] . identifier[uri] ],
literal[string] : identifier[a] . identifier[slug]
})
identifier[response] [ literal[string] ]= identifier[mapped]
identifier[response] = identifier[jsonify] ( identifier[response] )
keyword[return] identifier[response]
|
def r_annotations(self):
""" Route to retrieve annotations by target
:param target_urn: The CTS URN for which to retrieve annotations
:type target_urn: str
:return: a JSON string containing count and list of resources
:rtype: {str: Any}
"""
target = request.args.get('target', None)
wildcard = request.args.get('wildcard', '.', type=str)
include = request.args.get('include')
exclude = request.args.get('exclude')
limit = request.args.get('limit', None, type=int)
start = request.args.get('start', 1, type=int)
expand = request.args.get('expand', False, type=bool)
if target:
try:
urn = MyCapytain.common.reference.URN(target) # depends on [control=['try'], data=[]]
except ValueError:
return ('invalid urn', 400) # depends on [control=['except'], data=[]]
(count, annotations) = self.__queryinterface__.getAnnotations(urn, wildcard=wildcard, include=include, exclude=exclude, limit=limit, start=start, expand=expand) # depends on [control=['if'], data=[]]
else:
# Note that this implementation is not done for too much annotations
# because we do not implement pagination here
(count, annotations) = self.__queryinterface__.getAnnotations(None, limit=limit, start=start, expand=expand)
mapped = []
response = {'@context': type(self).JSONLD_CONTEXT, 'id': url_for('.r_annotations', start=start, limit=limit), 'type': 'AnnotationCollection', 'startIndex': start, 'items': [], 'total': count}
for a in annotations:
mapped.append({'id': url_for('.r_annotation', sha=a.sha), 'body': url_for('.r_annotation_body', sha=a.sha), 'type': 'Annotation', 'target': a.target.to_json(), 'dc:type': a.type_uri, 'owl:sameAs': [a.uri], 'nemo:slug': a.slug}) # depends on [control=['for'], data=['a']]
response['items'] = mapped
response = jsonify(response)
return response
|
def credentials_required(method_func):
"""
Decorator for methods that checks that the client has credentials.
Throws a CredentialsMissingError when they are absent.
"""
def _checkcredentials(self, *args, **kwargs):
if self.username and self.password:
return method_func(self, *args, **kwargs)
else:
raise CredentialsMissingError("This is a private method. \
You must provide a username and password when you initialize the \
DocumentCloud client to attempt this type of request.")
return wraps(method_func)(_checkcredentials)
|
def function[credentials_required, parameter[method_func]]:
constant[
Decorator for methods that checks that the client has credentials.
Throws a CredentialsMissingError when they are absent.
]
def function[_checkcredentials, parameter[self]]:
if <ast.BoolOp object at 0x7da204567100> begin[:]
return[call[name[method_func], parameter[name[self], <ast.Starred object at 0x7da2043463b0>]]]
return[call[call[name[wraps], parameter[name[method_func]]], parameter[name[_checkcredentials]]]]
|
keyword[def] identifier[credentials_required] ( identifier[method_func] ):
literal[string]
keyword[def] identifier[_checkcredentials] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[self] . identifier[username] keyword[and] identifier[self] . identifier[password] :
keyword[return] identifier[method_func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[CredentialsMissingError] ( literal[string] )
keyword[return] identifier[wraps] ( identifier[method_func] )( identifier[_checkcredentials] )
|
def credentials_required(method_func):
"""
Decorator for methods that checks that the client has credentials.
Throws a CredentialsMissingError when they are absent.
"""
def _checkcredentials(self, *args, **kwargs):
if self.username and self.password:
return method_func(self, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
raise CredentialsMissingError('This is a private method. You must provide a username and password when you initialize the DocumentCloud client to attempt this type of request.')
return wraps(method_func)(_checkcredentials)
|
def check_number_available(self, id_environment, num_vlan, id_vlan):
"""Checking if environment has a number vlan available
:param id_environment: Identifier of environment
:param num_vlan: Vlan number
:param id_vlan: Vlan indentifier (False if inserting a vlan)
:return: True is has number available, False if hasn't
:raise AmbienteNaoExisteError: Ambiente não cadastrado.
:raise InvalidParameterError: Invalid ID for VLAN.
:raise VlanNaoExisteError: VLAN not found.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
url = 'vlan/check_number_available/' + \
str(id_environment) + '/' + str(num_vlan) + '/' + str(id_vlan)
code, xml = self.submit(None, 'GET', url)
return self.response(code, xml)
|
def function[check_number_available, parameter[self, id_environment, num_vlan, id_vlan]]:
constant[Checking if environment has a number vlan available
:param id_environment: Identifier of environment
:param num_vlan: Vlan number
:param id_vlan: Vlan indentifier (False if inserting a vlan)
:return: True is has number available, False if hasn't
:raise AmbienteNaoExisteError: Ambiente não cadastrado.
:raise InvalidParameterError: Invalid ID for VLAN.
:raise VlanNaoExisteError: VLAN not found.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
]
variable[url] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[vlan/check_number_available/] + call[name[str], parameter[name[id_environment]]]] + constant[/]] + call[name[str], parameter[name[num_vlan]]]] + constant[/]] + call[name[str], parameter[name[id_vlan]]]]
<ast.Tuple object at 0x7da2047eb1c0> assign[=] call[name[self].submit, parameter[constant[None], constant[GET], name[url]]]
return[call[name[self].response, parameter[name[code], name[xml]]]]
|
keyword[def] identifier[check_number_available] ( identifier[self] , identifier[id_environment] , identifier[num_vlan] , identifier[id_vlan] ):
literal[string]
identifier[url] = literal[string] + identifier[str] ( identifier[id_environment] )+ literal[string] + identifier[str] ( identifier[num_vlan] )+ literal[string] + identifier[str] ( identifier[id_vlan] )
identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] )
keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] )
|
def check_number_available(self, id_environment, num_vlan, id_vlan):
"""Checking if environment has a number vlan available
:param id_environment: Identifier of environment
:param num_vlan: Vlan number
:param id_vlan: Vlan indentifier (False if inserting a vlan)
:return: True is has number available, False if hasn't
:raise AmbienteNaoExisteError: Ambiente não cadastrado.
:raise InvalidParameterError: Invalid ID for VLAN.
:raise VlanNaoExisteError: VLAN not found.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
url = 'vlan/check_number_available/' + str(id_environment) + '/' + str(num_vlan) + '/' + str(id_vlan)
(code, xml) = self.submit(None, 'GET', url)
return self.response(code, xml)
|
def copy(self):
"""
Makes a safe copy of the model.
"""
copied_model = GPModel(kernel = self.model.kern.copy(),
noise_var=self.noise_var,
exact_feval=self.exact_feval,
optimizer=self.optimizer,
max_iters=self.max_iters,
optimize_restarts=self.optimize_restarts,
verbose=self.verbose,
ARD=self.ARD)
copied_model._create_model(self.model.X,self.model.Y)
copied_model.updateModel(self.model.X,self.model.Y, None, None)
return copied_model
|
def function[copy, parameter[self]]:
constant[
Makes a safe copy of the model.
]
variable[copied_model] assign[=] call[name[GPModel], parameter[]]
call[name[copied_model]._create_model, parameter[name[self].model.X, name[self].model.Y]]
call[name[copied_model].updateModel, parameter[name[self].model.X, name[self].model.Y, constant[None], constant[None]]]
return[name[copied_model]]
|
keyword[def] identifier[copy] ( identifier[self] ):
literal[string]
identifier[copied_model] = identifier[GPModel] ( identifier[kernel] = identifier[self] . identifier[model] . identifier[kern] . identifier[copy] (),
identifier[noise_var] = identifier[self] . identifier[noise_var] ,
identifier[exact_feval] = identifier[self] . identifier[exact_feval] ,
identifier[optimizer] = identifier[self] . identifier[optimizer] ,
identifier[max_iters] = identifier[self] . identifier[max_iters] ,
identifier[optimize_restarts] = identifier[self] . identifier[optimize_restarts] ,
identifier[verbose] = identifier[self] . identifier[verbose] ,
identifier[ARD] = identifier[self] . identifier[ARD] )
identifier[copied_model] . identifier[_create_model] ( identifier[self] . identifier[model] . identifier[X] , identifier[self] . identifier[model] . identifier[Y] )
identifier[copied_model] . identifier[updateModel] ( identifier[self] . identifier[model] . identifier[X] , identifier[self] . identifier[model] . identifier[Y] , keyword[None] , keyword[None] )
keyword[return] identifier[copied_model]
|
def copy(self):
"""
Makes a safe copy of the model.
"""
copied_model = GPModel(kernel=self.model.kern.copy(), noise_var=self.noise_var, exact_feval=self.exact_feval, optimizer=self.optimizer, max_iters=self.max_iters, optimize_restarts=self.optimize_restarts, verbose=self.verbose, ARD=self.ARD)
copied_model._create_model(self.model.X, self.model.Y)
copied_model.updateModel(self.model.X, self.model.Y, None, None)
return copied_model
|
def financials(symbol, token='', version=''):
'''Pulls income statement, balance sheet, and cash flow data from the four most recent reported quarters.
https://iexcloud.io/docs/api/#financials
Updates at 8am, 9am UTC daily
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
dict: result
'''
_raiseIfNotStr(symbol)
return _getJson('stock/' + symbol + '/financials', token, version)
|
def function[financials, parameter[symbol, token, version]]:
constant[Pulls income statement, balance sheet, and cash flow data from the four most recent reported quarters.
https://iexcloud.io/docs/api/#financials
Updates at 8am, 9am UTC daily
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
dict: result
]
call[name[_raiseIfNotStr], parameter[name[symbol]]]
return[call[name[_getJson], parameter[binary_operation[binary_operation[constant[stock/] + name[symbol]] + constant[/financials]], name[token], name[version]]]]
|
keyword[def] identifier[financials] ( identifier[symbol] , identifier[token] = literal[string] , identifier[version] = literal[string] ):
literal[string]
identifier[_raiseIfNotStr] ( identifier[symbol] )
keyword[return] identifier[_getJson] ( literal[string] + identifier[symbol] + literal[string] , identifier[token] , identifier[version] )
|
def financials(symbol, token='', version=''):
"""Pulls income statement, balance sheet, and cash flow data from the four most recent reported quarters.
https://iexcloud.io/docs/api/#financials
Updates at 8am, 9am UTC daily
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
dict: result
"""
_raiseIfNotStr(symbol)
return _getJson('stock/' + symbol + '/financials', token, version)
|
def get_last_modified_datetime(self):
"""Return datetime object of modified time of machine file. Return now if not a file."""
if self._path:
statbuf = os.stat(self._path)
return datetime.utcfromtimestamp(statbuf.st_mtime)
else:
return datetime.now()
|
def function[get_last_modified_datetime, parameter[self]]:
constant[Return datetime object of modified time of machine file. Return now if not a file.]
if name[self]._path begin[:]
variable[statbuf] assign[=] call[name[os].stat, parameter[name[self]._path]]
return[call[name[datetime].utcfromtimestamp, parameter[name[statbuf].st_mtime]]]
|
keyword[def] identifier[get_last_modified_datetime] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_path] :
identifier[statbuf] = identifier[os] . identifier[stat] ( identifier[self] . identifier[_path] )
keyword[return] identifier[datetime] . identifier[utcfromtimestamp] ( identifier[statbuf] . identifier[st_mtime] )
keyword[else] :
keyword[return] identifier[datetime] . identifier[now] ()
|
def get_last_modified_datetime(self):
"""Return datetime object of modified time of machine file. Return now if not a file."""
if self._path:
statbuf = os.stat(self._path)
return datetime.utcfromtimestamp(statbuf.st_mtime) # depends on [control=['if'], data=[]]
else:
return datetime.now()
|
def clear(self):
"Remove all items and reset internal structures"
dict.clear(self)
self._key = 0
if hasattr(self._tree_view, "wx_obj"):
self._tree_view.wx_obj.DeleteAllItems()
|
def function[clear, parameter[self]]:
constant[Remove all items and reset internal structures]
call[name[dict].clear, parameter[name[self]]]
name[self]._key assign[=] constant[0]
if call[name[hasattr], parameter[name[self]._tree_view, constant[wx_obj]]] begin[:]
call[name[self]._tree_view.wx_obj.DeleteAllItems, parameter[]]
|
keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
identifier[dict] . identifier[clear] ( identifier[self] )
identifier[self] . identifier[_key] = literal[int]
keyword[if] identifier[hasattr] ( identifier[self] . identifier[_tree_view] , literal[string] ):
identifier[self] . identifier[_tree_view] . identifier[wx_obj] . identifier[DeleteAllItems] ()
|
def clear(self):
"""Remove all items and reset internal structures"""
dict.clear(self)
self._key = 0
if hasattr(self._tree_view, 'wx_obj'):
self._tree_view.wx_obj.DeleteAllItems() # depends on [control=['if'], data=[]]
|
def attach_data(self, node):
'''Generic method called for visit_XXXX() with XXXX in
GatherOMPData.statements list
'''
if self.current:
for curr in self.current:
md = OMPDirective(curr)
metadata.add(node, md)
self.current = list()
# add a Pass to hold some directives
for field_name, field in ast.iter_fields(node):
if field_name in GatherOMPData.statement_lists:
if(field and
isinstance(field[-1], ast.Expr) and
self.isompdirective(field[-1].value)):
field.append(ast.Pass())
self.generic_visit(node)
# add an If to hold scoping OpenMP directives
directives = metadata.get(node, OMPDirective)
field_names = {n for n, _ in ast.iter_fields(node)}
has_no_scope = field_names.isdisjoint(GatherOMPData.statement_lists)
if directives and has_no_scope:
# some directives create a scope, but the holding stmt may not
# artificially create one here if needed
sdirective = ''.join(d.s for d in directives)
scoping = ('parallel', 'task', 'section')
if any(s in sdirective for s in scoping):
metadata.clear(node, OMPDirective)
node = ast.If(ast.Num(1), [node], [])
for directive in directives:
metadata.add(node, directive)
return node
|
def function[attach_data, parameter[self, node]]:
constant[Generic method called for visit_XXXX() with XXXX in
GatherOMPData.statements list
]
if name[self].current begin[:]
for taget[name[curr]] in starred[name[self].current] begin[:]
variable[md] assign[=] call[name[OMPDirective], parameter[name[curr]]]
call[name[metadata].add, parameter[name[node], name[md]]]
name[self].current assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18dc994e0>, <ast.Name object at 0x7da18dc9b0a0>]]] in starred[call[name[ast].iter_fields, parameter[name[node]]]] begin[:]
if compare[name[field_name] in name[GatherOMPData].statement_lists] begin[:]
if <ast.BoolOp object at 0x7da18dc9b9d0> begin[:]
call[name[field].append, parameter[call[name[ast].Pass, parameter[]]]]
call[name[self].generic_visit, parameter[name[node]]]
variable[directives] assign[=] call[name[metadata].get, parameter[name[node], name[OMPDirective]]]
variable[field_names] assign[=] <ast.SetComp object at 0x7da18dc9bb20>
variable[has_no_scope] assign[=] call[name[field_names].isdisjoint, parameter[name[GatherOMPData].statement_lists]]
if <ast.BoolOp object at 0x7da18dc9be50> begin[:]
variable[sdirective] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da18dc98cd0>]]
variable[scoping] assign[=] tuple[[<ast.Constant object at 0x7da18dc986d0>, <ast.Constant object at 0x7da18dc9a2f0>, <ast.Constant object at 0x7da18dc9a710>]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18dc9bb50>]] begin[:]
call[name[metadata].clear, parameter[name[node], name[OMPDirective]]]
variable[node] assign[=] call[name[ast].If, parameter[call[name[ast].Num, parameter[constant[1]]], list[[<ast.Name object at 0x7da18dc98130>]], list[[]]]]
for taget[name[directive]] in starred[name[directives]] begin[:]
call[name[metadata].add, parameter[name[node], name[directive]]]
return[name[node]]
|
keyword[def] identifier[attach_data] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[self] . identifier[current] :
keyword[for] identifier[curr] keyword[in] identifier[self] . identifier[current] :
identifier[md] = identifier[OMPDirective] ( identifier[curr] )
identifier[metadata] . identifier[add] ( identifier[node] , identifier[md] )
identifier[self] . identifier[current] = identifier[list] ()
keyword[for] identifier[field_name] , identifier[field] keyword[in] identifier[ast] . identifier[iter_fields] ( identifier[node] ):
keyword[if] identifier[field_name] keyword[in] identifier[GatherOMPData] . identifier[statement_lists] :
keyword[if] ( identifier[field] keyword[and]
identifier[isinstance] ( identifier[field] [- literal[int] ], identifier[ast] . identifier[Expr] ) keyword[and]
identifier[self] . identifier[isompdirective] ( identifier[field] [- literal[int] ]. identifier[value] )):
identifier[field] . identifier[append] ( identifier[ast] . identifier[Pass] ())
identifier[self] . identifier[generic_visit] ( identifier[node] )
identifier[directives] = identifier[metadata] . identifier[get] ( identifier[node] , identifier[OMPDirective] )
identifier[field_names] ={ identifier[n] keyword[for] identifier[n] , identifier[_] keyword[in] identifier[ast] . identifier[iter_fields] ( identifier[node] )}
identifier[has_no_scope] = identifier[field_names] . identifier[isdisjoint] ( identifier[GatherOMPData] . identifier[statement_lists] )
keyword[if] identifier[directives] keyword[and] identifier[has_no_scope] :
identifier[sdirective] = literal[string] . identifier[join] ( identifier[d] . identifier[s] keyword[for] identifier[d] keyword[in] identifier[directives] )
identifier[scoping] =( literal[string] , literal[string] , literal[string] )
keyword[if] identifier[any] ( identifier[s] keyword[in] identifier[sdirective] keyword[for] identifier[s] keyword[in] identifier[scoping] ):
identifier[metadata] . identifier[clear] ( identifier[node] , identifier[OMPDirective] )
identifier[node] = identifier[ast] . identifier[If] ( identifier[ast] . identifier[Num] ( literal[int] ),[ identifier[node] ],[])
keyword[for] identifier[directive] keyword[in] identifier[directives] :
identifier[metadata] . identifier[add] ( identifier[node] , identifier[directive] )
keyword[return] identifier[node]
|
def attach_data(self, node):
"""Generic method called for visit_XXXX() with XXXX in
GatherOMPData.statements list
"""
if self.current:
for curr in self.current:
md = OMPDirective(curr)
metadata.add(node, md) # depends on [control=['for'], data=['curr']]
self.current = list() # depends on [control=['if'], data=[]]
# add a Pass to hold some directives
for (field_name, field) in ast.iter_fields(node):
if field_name in GatherOMPData.statement_lists:
if field and isinstance(field[-1], ast.Expr) and self.isompdirective(field[-1].value):
field.append(ast.Pass()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.generic_visit(node)
# add an If to hold scoping OpenMP directives
directives = metadata.get(node, OMPDirective)
field_names = {n for (n, _) in ast.iter_fields(node)}
has_no_scope = field_names.isdisjoint(GatherOMPData.statement_lists)
if directives and has_no_scope:
# some directives create a scope, but the holding stmt may not
# artificially create one here if needed
sdirective = ''.join((d.s for d in directives))
scoping = ('parallel', 'task', 'section')
if any((s in sdirective for s in scoping)):
metadata.clear(node, OMPDirective)
node = ast.If(ast.Num(1), [node], [])
for directive in directives:
metadata.add(node, directive) # depends on [control=['for'], data=['directive']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return node
|
def evaluate(self, instance, step, extra):
"""Evaluate the current definition and fill its attributes.
Uses attributes definition in the following order:
- values defined when defining the ParameteredAttribute
- additional values defined when instantiating the containing factory
Args:
instance (builder.Resolver): The object holding currently computed
attributes
step: a factory.builder.BuildStep
extra (dict): additional, call-time added kwargs
for the step.
"""
defaults = dict(self.defaults)
if extra:
defaults.update(extra)
return self.generate(step, defaults)
|
def function[evaluate, parameter[self, instance, step, extra]]:
constant[Evaluate the current definition and fill its attributes.
Uses attributes definition in the following order:
- values defined when defining the ParameteredAttribute
- additional values defined when instantiating the containing factory
Args:
instance (builder.Resolver): The object holding currently computed
attributes
step: a factory.builder.BuildStep
extra (dict): additional, call-time added kwargs
for the step.
]
variable[defaults] assign[=] call[name[dict], parameter[name[self].defaults]]
if name[extra] begin[:]
call[name[defaults].update, parameter[name[extra]]]
return[call[name[self].generate, parameter[name[step], name[defaults]]]]
|
keyword[def] identifier[evaluate] ( identifier[self] , identifier[instance] , identifier[step] , identifier[extra] ):
literal[string]
identifier[defaults] = identifier[dict] ( identifier[self] . identifier[defaults] )
keyword[if] identifier[extra] :
identifier[defaults] . identifier[update] ( identifier[extra] )
keyword[return] identifier[self] . identifier[generate] ( identifier[step] , identifier[defaults] )
|
def evaluate(self, instance, step, extra):
"""Evaluate the current definition and fill its attributes.
Uses attributes definition in the following order:
- values defined when defining the ParameteredAttribute
- additional values defined when instantiating the containing factory
Args:
instance (builder.Resolver): The object holding currently computed
attributes
step: a factory.builder.BuildStep
extra (dict): additional, call-time added kwargs
for the step.
"""
defaults = dict(self.defaults)
if extra:
defaults.update(extra) # depends on [control=['if'], data=[]]
return self.generate(step, defaults)
|
def a2enconf(conf):
'''
.. versionadded:: 2016.3.0
Runs a2enconf for the given conf.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.a2enconf security
'''
ret = {}
command = ['a2enconf', conf]
try:
status = __salt__['cmd.retcode'](command, python_shell=False)
except Exception as e:
return e
ret['Name'] = 'Apache2 Enable Conf'
ret['Conf'] = conf
if status == 1:
ret['Status'] = 'Conf {0} Not found'.format(conf)
elif status == 0:
ret['Status'] = 'Conf {0} enabled'.format(conf)
else:
ret['Status'] = status
return ret
|
def function[a2enconf, parameter[conf]]:
constant[
.. versionadded:: 2016.3.0
Runs a2enconf for the given conf.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.a2enconf security
]
variable[ret] assign[=] dictionary[[], []]
variable[command] assign[=] list[[<ast.Constant object at 0x7da1b1c31960>, <ast.Name object at 0x7da1b1c31cf0>]]
<ast.Try object at 0x7da1b1c330a0>
call[name[ret]][constant[Name]] assign[=] constant[Apache2 Enable Conf]
call[name[ret]][constant[Conf]] assign[=] name[conf]
if compare[name[status] equal[==] constant[1]] begin[:]
call[name[ret]][constant[Status]] assign[=] call[constant[Conf {0} Not found].format, parameter[name[conf]]]
return[name[ret]]
|
keyword[def] identifier[a2enconf] ( identifier[conf] ):
literal[string]
identifier[ret] ={}
identifier[command] =[ literal[string] , identifier[conf] ]
keyword[try] :
identifier[status] = identifier[__salt__] [ literal[string] ]( identifier[command] , identifier[python_shell] = keyword[False] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] identifier[e]
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= identifier[conf]
keyword[if] identifier[status] == literal[int] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[conf] )
keyword[elif] identifier[status] == literal[int] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[conf] )
keyword[else] :
identifier[ret] [ literal[string] ]= identifier[status]
keyword[return] identifier[ret]
|
def a2enconf(conf):
"""
.. versionadded:: 2016.3.0
Runs a2enconf for the given conf.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.a2enconf security
"""
ret = {}
command = ['a2enconf', conf]
try:
status = __salt__['cmd.retcode'](command, python_shell=False) # depends on [control=['try'], data=[]]
except Exception as e:
return e # depends on [control=['except'], data=['e']]
ret['Name'] = 'Apache2 Enable Conf'
ret['Conf'] = conf
if status == 1:
ret['Status'] = 'Conf {0} Not found'.format(conf) # depends on [control=['if'], data=[]]
elif status == 0:
ret['Status'] = 'Conf {0} enabled'.format(conf) # depends on [control=['if'], data=[]]
else:
ret['Status'] = status
return ret
|
def reset(self):
""" Resets the readonly variables.
"""
self.p_lmbda = 0.0
self.q_lmbda = 0.0
self.mu_vmin = 0.0
self.mu_vmax = 0.0
|
def function[reset, parameter[self]]:
constant[ Resets the readonly variables.
]
name[self].p_lmbda assign[=] constant[0.0]
name[self].q_lmbda assign[=] constant[0.0]
name[self].mu_vmin assign[=] constant[0.0]
name[self].mu_vmax assign[=] constant[0.0]
|
keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
identifier[self] . identifier[p_lmbda] = literal[int]
identifier[self] . identifier[q_lmbda] = literal[int]
identifier[self] . identifier[mu_vmin] = literal[int]
identifier[self] . identifier[mu_vmax] = literal[int]
|
def reset(self):
""" Resets the readonly variables.
"""
self.p_lmbda = 0.0
self.q_lmbda = 0.0
self.mu_vmin = 0.0
self.mu_vmax = 0.0
|
def add_color_stop_rgba(self, offset, red, green, blue, alpha=1):
"""Adds a translucent color stop to a gradient pattern.
The offset specifies the location along the gradient's control vector.
For example,
a linear gradient's control vector is from (x0,y0) to (x1,y1)
while a radial gradient's control vector is
from any point on the start circle
to the corresponding point on the end circle.
If two (or more) stops are specified with identical offset values,
they will be sorted
according to the order in which the stops are added
(stops added earlier before stops added later).
This can be useful for reliably making sharp color transitions
instead of the typical blend.
The color components and offset are in the range 0 to 1.
If the values passed in are outside that range, they will be clamped.
:param offset: Location along the gradient's control vector
:param red: Red component of the color.
:param green: Green component of the color.
:param blue: Blue component of the color.
:param alpha:
Alpha component of the color.
1 (the default) is opaque, 0 fully transparent.
:type offset: float
:type red: float
:type green: float
:type blue: float
:type alpha: float
"""
cairo.cairo_pattern_add_color_stop_rgba(
self._pointer, offset, red, green, blue, alpha)
self._check_status()
|
def function[add_color_stop_rgba, parameter[self, offset, red, green, blue, alpha]]:
constant[Adds a translucent color stop to a gradient pattern.
The offset specifies the location along the gradient's control vector.
For example,
a linear gradient's control vector is from (x0,y0) to (x1,y1)
while a radial gradient's control vector is
from any point on the start circle
to the corresponding point on the end circle.
If two (or more) stops are specified with identical offset values,
they will be sorted
according to the order in which the stops are added
(stops added earlier before stops added later).
This can be useful for reliably making sharp color transitions
instead of the typical blend.
The color components and offset are in the range 0 to 1.
If the values passed in are outside that range, they will be clamped.
:param offset: Location along the gradient's control vector
:param red: Red component of the color.
:param green: Green component of the color.
:param blue: Blue component of the color.
:param alpha:
Alpha component of the color.
1 (the default) is opaque, 0 fully transparent.
:type offset: float
:type red: float
:type green: float
:type blue: float
:type alpha: float
]
call[name[cairo].cairo_pattern_add_color_stop_rgba, parameter[name[self]._pointer, name[offset], name[red], name[green], name[blue], name[alpha]]]
call[name[self]._check_status, parameter[]]
|
keyword[def] identifier[add_color_stop_rgba] ( identifier[self] , identifier[offset] , identifier[red] , identifier[green] , identifier[blue] , identifier[alpha] = literal[int] ):
literal[string]
identifier[cairo] . identifier[cairo_pattern_add_color_stop_rgba] (
identifier[self] . identifier[_pointer] , identifier[offset] , identifier[red] , identifier[green] , identifier[blue] , identifier[alpha] )
identifier[self] . identifier[_check_status] ()
|
def add_color_stop_rgba(self, offset, red, green, blue, alpha=1):
"""Adds a translucent color stop to a gradient pattern.
The offset specifies the location along the gradient's control vector.
For example,
a linear gradient's control vector is from (x0,y0) to (x1,y1)
while a radial gradient's control vector is
from any point on the start circle
to the corresponding point on the end circle.
If two (or more) stops are specified with identical offset values,
they will be sorted
according to the order in which the stops are added
(stops added earlier before stops added later).
This can be useful for reliably making sharp color transitions
instead of the typical blend.
The color components and offset are in the range 0 to 1.
If the values passed in are outside that range, they will be clamped.
:param offset: Location along the gradient's control vector
:param red: Red component of the color.
:param green: Green component of the color.
:param blue: Blue component of the color.
:param alpha:
Alpha component of the color.
1 (the default) is opaque, 0 fully transparent.
:type offset: float
:type red: float
:type green: float
:type blue: float
:type alpha: float
"""
cairo.cairo_pattern_add_color_stop_rgba(self._pointer, offset, red, green, blue, alpha)
self._check_status()
|
def TEST(fname):
"""
Test function to step through all functions in
order to try and identify all features on a map
This test function should be placed in a main
section later
"""
#fname = os.path.join(os.getcwd(), '..','..', # os.path.join(os.path.getcwd(), '
m = MapObject(fname, os.path.join(os.getcwd(), 'img_prog_results'))
m.add_layer(ImagePathFollow('border'))
m.add_layer(ImagePathFollow('river'))
m.add_layer(ImagePathFollow('road'))
m.add_layer(ImageArea('sea', col='Blue', density='light'))
m.add_layer(ImageArea('desert', col='Yellow', density='med'))
m.add_layer(ImageArea('forest', col='Drak Green', density='light'))
m.add_layer(ImageArea('fields', col='Green', density='light'))
m.add_layer(ImageObject('mountains'))
m.add_layer(ImageObject('trees'))
m.add_layer(ImageObject('towns'))
|
def function[TEST, parameter[fname]]:
constant[
Test function to step through all functions in
order to try and identify all features on a map
This test function should be placed in a main
section later
]
variable[m] assign[=] call[name[MapObject], parameter[name[fname], call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], constant[img_prog_results]]]]]
call[name[m].add_layer, parameter[call[name[ImagePathFollow], parameter[constant[border]]]]]
call[name[m].add_layer, parameter[call[name[ImagePathFollow], parameter[constant[river]]]]]
call[name[m].add_layer, parameter[call[name[ImagePathFollow], parameter[constant[road]]]]]
call[name[m].add_layer, parameter[call[name[ImageArea], parameter[constant[sea]]]]]
call[name[m].add_layer, parameter[call[name[ImageArea], parameter[constant[desert]]]]]
call[name[m].add_layer, parameter[call[name[ImageArea], parameter[constant[forest]]]]]
call[name[m].add_layer, parameter[call[name[ImageArea], parameter[constant[fields]]]]]
call[name[m].add_layer, parameter[call[name[ImageObject], parameter[constant[mountains]]]]]
call[name[m].add_layer, parameter[call[name[ImageObject], parameter[constant[trees]]]]]
call[name[m].add_layer, parameter[call[name[ImageObject], parameter[constant[towns]]]]]
|
keyword[def] identifier[TEST] ( identifier[fname] ):
literal[string]
identifier[m] = identifier[MapObject] ( identifier[fname] , identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImagePathFollow] ( literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImagePathFollow] ( literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImagePathFollow] ( literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageArea] ( literal[string] , identifier[col] = literal[string] , identifier[density] = literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageArea] ( literal[string] , identifier[col] = literal[string] , identifier[density] = literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageArea] ( literal[string] , identifier[col] = literal[string] , identifier[density] = literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageArea] ( literal[string] , identifier[col] = literal[string] , identifier[density] = literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageObject] ( literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageObject] ( literal[string] ))
identifier[m] . identifier[add_layer] ( identifier[ImageObject] ( literal[string] ))
|
def TEST(fname):
"""
Test function to step through all functions in
order to try and identify all features on a map
This test function should be placed in a main
section later
"""
#fname = os.path.join(os.getcwd(), '..','..', # os.path.join(os.path.getcwd(), '
m = MapObject(fname, os.path.join(os.getcwd(), 'img_prog_results'))
m.add_layer(ImagePathFollow('border'))
m.add_layer(ImagePathFollow('river'))
m.add_layer(ImagePathFollow('road'))
m.add_layer(ImageArea('sea', col='Blue', density='light'))
m.add_layer(ImageArea('desert', col='Yellow', density='med'))
m.add_layer(ImageArea('forest', col='Drak Green', density='light'))
m.add_layer(ImageArea('fields', col='Green', density='light'))
m.add_layer(ImageObject('mountains'))
m.add_layer(ImageObject('trees'))
m.add_layer(ImageObject('towns'))
|
def update_webhook(self, webhook_url, webhook_id, events=None):
"""Register webhook (if it doesn't exit)."""
hooks = self._request(MINUT_WEBHOOKS_URL, request_type='GET')['hooks']
try:
self._webhook = next(
hook for hook in hooks if hook['url'] == webhook_url)
_LOGGER.debug("Webhook: %s", self._webhook)
except StopIteration: # Not found
if events is None:
events = [e for v in EVENTS.values() for e in v if e]
self._webhook = self._register_webhook(webhook_url, events)
_LOGGER.debug("Registered hook: %s", self._webhook)
return self._webhook
|
def function[update_webhook, parameter[self, webhook_url, webhook_id, events]]:
constant[Register webhook (if it doesn't exit).]
variable[hooks] assign[=] call[call[name[self]._request, parameter[name[MINUT_WEBHOOKS_URL]]]][constant[hooks]]
<ast.Try object at 0x7da1b1121a50>
|
keyword[def] identifier[update_webhook] ( identifier[self] , identifier[webhook_url] , identifier[webhook_id] , identifier[events] = keyword[None] ):
literal[string]
identifier[hooks] = identifier[self] . identifier[_request] ( identifier[MINUT_WEBHOOKS_URL] , identifier[request_type] = literal[string] )[ literal[string] ]
keyword[try] :
identifier[self] . identifier[_webhook] = identifier[next] (
identifier[hook] keyword[for] identifier[hook] keyword[in] identifier[hooks] keyword[if] identifier[hook] [ literal[string] ]== identifier[webhook_url] )
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[_webhook] )
keyword[except] identifier[StopIteration] :
keyword[if] identifier[events] keyword[is] keyword[None] :
identifier[events] =[ identifier[e] keyword[for] identifier[v] keyword[in] identifier[EVENTS] . identifier[values] () keyword[for] identifier[e] keyword[in] identifier[v] keyword[if] identifier[e] ]
identifier[self] . identifier[_webhook] = identifier[self] . identifier[_register_webhook] ( identifier[webhook_url] , identifier[events] )
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[_webhook] )
keyword[return] identifier[self] . identifier[_webhook]
|
def update_webhook(self, webhook_url, webhook_id, events=None):
"""Register webhook (if it doesn't exit)."""
hooks = self._request(MINUT_WEBHOOKS_URL, request_type='GET')['hooks']
try:
self._webhook = next((hook for hook in hooks if hook['url'] == webhook_url))
_LOGGER.debug('Webhook: %s', self._webhook) # depends on [control=['try'], data=[]]
except StopIteration: # Not found
if events is None:
events = [e for v in EVENTS.values() for e in v if e] # depends on [control=['if'], data=['events']]
self._webhook = self._register_webhook(webhook_url, events)
_LOGGER.debug('Registered hook: %s', self._webhook)
return self._webhook # depends on [control=['except'], data=[]]
|
def call(self, method_path, **kwargs):
"""
Make an API call for specific method
:param method_path: format ``Interface.Method`` (e.g. ``ISteamWebAPIUtil.GetServerInfo``)
:type method_path: :class:`str`
:param kwargs: keyword arguments for the specific method
:return: response
:rtype: :class:`dict`, :class:`lxml.etree.Element` or :class:`str`
"""
interface, method = method_path.split('.', 1)
return getattr(getattr(self, interface), method)(**kwargs)
|
def function[call, parameter[self, method_path]]:
constant[
Make an API call for specific method
:param method_path: format ``Interface.Method`` (e.g. ``ISteamWebAPIUtil.GetServerInfo``)
:type method_path: :class:`str`
:param kwargs: keyword arguments for the specific method
:return: response
:rtype: :class:`dict`, :class:`lxml.etree.Element` or :class:`str`
]
<ast.Tuple object at 0x7da1b1d66b60> assign[=] call[name[method_path].split, parameter[constant[.], constant[1]]]
return[call[call[name[getattr], parameter[call[name[getattr], parameter[name[self], name[interface]]], name[method]]], parameter[]]]
|
keyword[def] identifier[call] ( identifier[self] , identifier[method_path] ,** identifier[kwargs] ):
literal[string]
identifier[interface] , identifier[method] = identifier[method_path] . identifier[split] ( literal[string] , literal[int] )
keyword[return] identifier[getattr] ( identifier[getattr] ( identifier[self] , identifier[interface] ), identifier[method] )(** identifier[kwargs] )
|
def call(self, method_path, **kwargs):
"""
Make an API call for specific method
:param method_path: format ``Interface.Method`` (e.g. ``ISteamWebAPIUtil.GetServerInfo``)
:type method_path: :class:`str`
:param kwargs: keyword arguments for the specific method
:return: response
:rtype: :class:`dict`, :class:`lxml.etree.Element` or :class:`str`
"""
(interface, method) = method_path.split('.', 1)
return getattr(getattr(self, interface), method)(**kwargs)
|
def add_fast_step(self, fastsim):
""" Add the fastsim context to the trace. """
for wire_name in self.trace:
self.trace[wire_name].append(fastsim.context[wire_name])
|
def function[add_fast_step, parameter[self, fastsim]]:
constant[ Add the fastsim context to the trace. ]
for taget[name[wire_name]] in starred[name[self].trace] begin[:]
call[call[name[self].trace][name[wire_name]].append, parameter[call[name[fastsim].context][name[wire_name]]]]
|
keyword[def] identifier[add_fast_step] ( identifier[self] , identifier[fastsim] ):
literal[string]
keyword[for] identifier[wire_name] keyword[in] identifier[self] . identifier[trace] :
identifier[self] . identifier[trace] [ identifier[wire_name] ]. identifier[append] ( identifier[fastsim] . identifier[context] [ identifier[wire_name] ])
|
def add_fast_step(self, fastsim):
""" Add the fastsim context to the trace. """
for wire_name in self.trace:
self.trace[wire_name].append(fastsim.context[wire_name]) # depends on [control=['for'], data=['wire_name']]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.