code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_predictions_under_minimal_repair(instance, repair_options, optimum):
'''
Computes the set of signs on edges/vertices that can be cautiously
derived from [instance], minus those that are a direct consequence
of obs_[ev]label predicates
'''
inst = instance.to_file()
repops = repair_options.to_file()
prg = [ inst, repops, prediction_core_prg, repair_cardinality_prg ]
options = '--project --enum-mode cautious --opt-mode=optN --opt-bound='+str(optimum)
solver = GringoClasp(clasp_options=options)
models = solver.run(prg, collapseTerms=True, collapseAtoms=False)
os.unlink(inst)
os.unlink(repops)
return whatsnew(instance,models[0]) | def function[get_predictions_under_minimal_repair, parameter[instance, repair_options, optimum]]:
constant[
Computes the set of signs on edges/vertices that can be cautiously
derived from [instance], minus those that are a direct consequence
of obs_[ev]label predicates
]
variable[inst] assign[=] call[name[instance].to_file, parameter[]]
variable[repops] assign[=] call[name[repair_options].to_file, parameter[]]
variable[prg] assign[=] list[[<ast.Name object at 0x7da20c6a9480>, <ast.Name object at 0x7da20c6aa800>, <ast.Name object at 0x7da20c6aa080>, <ast.Name object at 0x7da20c6abf40>]]
variable[options] assign[=] binary_operation[constant[--project --enum-mode cautious --opt-mode=optN --opt-bound=] + call[name[str], parameter[name[optimum]]]]
variable[solver] assign[=] call[name[GringoClasp], parameter[]]
variable[models] assign[=] call[name[solver].run, parameter[name[prg]]]
call[name[os].unlink, parameter[name[inst]]]
call[name[os].unlink, parameter[name[repops]]]
return[call[name[whatsnew], parameter[name[instance], call[name[models]][constant[0]]]]] | keyword[def] identifier[get_predictions_under_minimal_repair] ( identifier[instance] , identifier[repair_options] , identifier[optimum] ):
literal[string]
identifier[inst] = identifier[instance] . identifier[to_file] ()
identifier[repops] = identifier[repair_options] . identifier[to_file] ()
identifier[prg] =[ identifier[inst] , identifier[repops] , identifier[prediction_core_prg] , identifier[repair_cardinality_prg] ]
identifier[options] = literal[string] + identifier[str] ( identifier[optimum] )
identifier[solver] = identifier[GringoClasp] ( identifier[clasp_options] = identifier[options] )
identifier[models] = identifier[solver] . identifier[run] ( identifier[prg] , identifier[collapseTerms] = keyword[True] , identifier[collapseAtoms] = keyword[False] )
identifier[os] . identifier[unlink] ( identifier[inst] )
identifier[os] . identifier[unlink] ( identifier[repops] )
keyword[return] identifier[whatsnew] ( identifier[instance] , identifier[models] [ literal[int] ]) | def get_predictions_under_minimal_repair(instance, repair_options, optimum):
"""
Computes the set of signs on edges/vertices that can be cautiously
derived from [instance], minus those that are a direct consequence
of obs_[ev]label predicates
"""
inst = instance.to_file()
repops = repair_options.to_file()
prg = [inst, repops, prediction_core_prg, repair_cardinality_prg]
options = '--project --enum-mode cautious --opt-mode=optN --opt-bound=' + str(optimum)
solver = GringoClasp(clasp_options=options)
models = solver.run(prg, collapseTerms=True, collapseAtoms=False)
os.unlink(inst)
os.unlink(repops)
return whatsnew(instance, models[0]) |
def load_remote_molecule(url, format=None):
'''Load a molecule from the remote location specified by *url*.
**Example**
::
load_remote_molecule('https://raw.github.com/chemlab/chemlab-testdata/master/benzene.mol')
'''
filename, headers = urlretrieve(url)
return load_molecule(filename, format=format) | def function[load_remote_molecule, parameter[url, format]]:
constant[Load a molecule from the remote location specified by *url*.
**Example**
::
load_remote_molecule('https://raw.github.com/chemlab/chemlab-testdata/master/benzene.mol')
]
<ast.Tuple object at 0x7da2054a7df0> assign[=] call[name[urlretrieve], parameter[name[url]]]
return[call[name[load_molecule], parameter[name[filename]]]] | keyword[def] identifier[load_remote_molecule] ( identifier[url] , identifier[format] = keyword[None] ):
literal[string]
identifier[filename] , identifier[headers] = identifier[urlretrieve] ( identifier[url] )
keyword[return] identifier[load_molecule] ( identifier[filename] , identifier[format] = identifier[format] ) | def load_remote_molecule(url, format=None):
"""Load a molecule from the remote location specified by *url*.
**Example**
::
load_remote_molecule('https://raw.github.com/chemlab/chemlab-testdata/master/benzene.mol')
"""
(filename, headers) = urlretrieve(url)
return load_molecule(filename, format=format) |
def update(self, old_line, new_line, once=False):
"""Replace all lines matching `old_line` with `new_line`.
If ``once`` is set to True, remove only the first instance.
"""
nb = 0
for i, line in enumerate(self.lines):
if line.match(old_line):
self.lines[i] = new_line
nb += 1
if once:
return nb
return nb | def function[update, parameter[self, old_line, new_line, once]]:
constant[Replace all lines matching `old_line` with `new_line`.
If ``once`` is set to True, remove only the first instance.
]
variable[nb] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b2344a30>, <ast.Name object at 0x7da1b2345330>]]] in starred[call[name[enumerate], parameter[name[self].lines]]] begin[:]
if call[name[line].match, parameter[name[old_line]]] begin[:]
call[name[self].lines][name[i]] assign[=] name[new_line]
<ast.AugAssign object at 0x7da20c6a9420>
if name[once] begin[:]
return[name[nb]]
return[name[nb]] | keyword[def] identifier[update] ( identifier[self] , identifier[old_line] , identifier[new_line] , identifier[once] = keyword[False] ):
literal[string]
identifier[nb] = literal[int]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[self] . identifier[lines] ):
keyword[if] identifier[line] . identifier[match] ( identifier[old_line] ):
identifier[self] . identifier[lines] [ identifier[i] ]= identifier[new_line]
identifier[nb] += literal[int]
keyword[if] identifier[once] :
keyword[return] identifier[nb]
keyword[return] identifier[nb] | def update(self, old_line, new_line, once=False):
"""Replace all lines matching `old_line` with `new_line`.
If ``once`` is set to True, remove only the first instance.
"""
nb = 0
for (i, line) in enumerate(self.lines):
if line.match(old_line):
self.lines[i] = new_line
nb += 1
if once:
return nb # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return nb |
def list_order_by(l,firstItems):
"""given a list and a list of items to be first, return the list in the
same order except that it begins with each of the first items."""
l=list(l)
for item in firstItems[::-1]: #backwards
if item in l:
l.remove(item)
l.insert(0,item)
return l | def function[list_order_by, parameter[l, firstItems]]:
constant[given a list and a list of items to be first, return the list in the
same order except that it begins with each of the first items.]
variable[l] assign[=] call[name[list], parameter[name[l]]]
for taget[name[item]] in starred[call[name[firstItems]][<ast.Slice object at 0x7da1afe8f1f0>]] begin[:]
if compare[name[item] in name[l]] begin[:]
call[name[l].remove, parameter[name[item]]]
call[name[l].insert, parameter[constant[0], name[item]]]
return[name[l]] | keyword[def] identifier[list_order_by] ( identifier[l] , identifier[firstItems] ):
literal[string]
identifier[l] = identifier[list] ( identifier[l] )
keyword[for] identifier[item] keyword[in] identifier[firstItems] [::- literal[int] ]:
keyword[if] identifier[item] keyword[in] identifier[l] :
identifier[l] . identifier[remove] ( identifier[item] )
identifier[l] . identifier[insert] ( literal[int] , identifier[item] )
keyword[return] identifier[l] | def list_order_by(l, firstItems):
"""given a list and a list of items to be first, return the list in the
same order except that it begins with each of the first items."""
l = list(l)
for item in firstItems[::-1]: #backwards
if item in l:
l.remove(item)
l.insert(0, item) # depends on [control=['if'], data=['item', 'l']] # depends on [control=['for'], data=['item']]
return l |
def _fill_temporary_buffer(self, cursor, text, html=False):
"""fill the area below the active editting zone with text"""
current_pos = self._control.textCursor().position()
cursor.beginEditBlock()
self._append_plain_text('\n')
self._page(text, html=html)
cursor.endEditBlock()
cursor.setPosition(current_pos)
self._control.moveCursor(QtGui.QTextCursor.End)
self._control.setTextCursor(cursor)
self._temp_buffer_filled = True | def function[_fill_temporary_buffer, parameter[self, cursor, text, html]]:
constant[fill the area below the active editting zone with text]
variable[current_pos] assign[=] call[call[name[self]._control.textCursor, parameter[]].position, parameter[]]
call[name[cursor].beginEditBlock, parameter[]]
call[name[self]._append_plain_text, parameter[constant[
]]]
call[name[self]._page, parameter[name[text]]]
call[name[cursor].endEditBlock, parameter[]]
call[name[cursor].setPosition, parameter[name[current_pos]]]
call[name[self]._control.moveCursor, parameter[name[QtGui].QTextCursor.End]]
call[name[self]._control.setTextCursor, parameter[name[cursor]]]
name[self]._temp_buffer_filled assign[=] constant[True] | keyword[def] identifier[_fill_temporary_buffer] ( identifier[self] , identifier[cursor] , identifier[text] , identifier[html] = keyword[False] ):
literal[string]
identifier[current_pos] = identifier[self] . identifier[_control] . identifier[textCursor] (). identifier[position] ()
identifier[cursor] . identifier[beginEditBlock] ()
identifier[self] . identifier[_append_plain_text] ( literal[string] )
identifier[self] . identifier[_page] ( identifier[text] , identifier[html] = identifier[html] )
identifier[cursor] . identifier[endEditBlock] ()
identifier[cursor] . identifier[setPosition] ( identifier[current_pos] )
identifier[self] . identifier[_control] . identifier[moveCursor] ( identifier[QtGui] . identifier[QTextCursor] . identifier[End] )
identifier[self] . identifier[_control] . identifier[setTextCursor] ( identifier[cursor] )
identifier[self] . identifier[_temp_buffer_filled] = keyword[True] | def _fill_temporary_buffer(self, cursor, text, html=False):
"""fill the area below the active editting zone with text"""
current_pos = self._control.textCursor().position()
cursor.beginEditBlock()
self._append_plain_text('\n')
self._page(text, html=html)
cursor.endEditBlock()
cursor.setPosition(current_pos)
self._control.moveCursor(QtGui.QTextCursor.End)
self._control.setTextCursor(cursor)
self._temp_buffer_filled = True |
def data(self, index, role = Qt.DisplayRole):
""" Returns the data at an index for a certain role
"""
try:
if role == Qt.DisplayRole:
return to_string(self._cellValue(index), masked=self._cellMask(index),
decode_bytes=self.encoding, maskFormat=self.maskFormat,
strFormat=self.strFormat, intFormat=self.intFormat,
numFormat=self.numFormat, otherFormat=self.otherFormat)
elif role == Qt.FontRole:
#assert self._font, "Font undefined"
return self._font
elif role == Qt.TextColorRole:
masked = self._cellMask(index)
if not is_an_array(masked) and masked:
return self.missingColor
else:
return self.dataColor
elif role == Qt.TextAlignmentRole:
if self.horAlignment == ALIGN_SMART:
cellContainsNumber = isinstance(self._cellValue(index), numbers.Number)
horAlign = Qt.AlignRight if cellContainsNumber else Qt.AlignLeft
return horAlign | self.verAlignment
else:
return self.horAlignment | self.verAlignment
else:
return None
except Exception as ex:
logger.error("Slot is not exception-safe.")
logger.exception(ex)
if DEBUGGING:
raise | def function[data, parameter[self, index, role]]:
constant[ Returns the data at an index for a certain role
]
<ast.Try object at 0x7da1b0414fa0> | keyword[def] identifier[data] ( identifier[self] , identifier[index] , identifier[role] = identifier[Qt] . identifier[DisplayRole] ):
literal[string]
keyword[try] :
keyword[if] identifier[role] == identifier[Qt] . identifier[DisplayRole] :
keyword[return] identifier[to_string] ( identifier[self] . identifier[_cellValue] ( identifier[index] ), identifier[masked] = identifier[self] . identifier[_cellMask] ( identifier[index] ),
identifier[decode_bytes] = identifier[self] . identifier[encoding] , identifier[maskFormat] = identifier[self] . identifier[maskFormat] ,
identifier[strFormat] = identifier[self] . identifier[strFormat] , identifier[intFormat] = identifier[self] . identifier[intFormat] ,
identifier[numFormat] = identifier[self] . identifier[numFormat] , identifier[otherFormat] = identifier[self] . identifier[otherFormat] )
keyword[elif] identifier[role] == identifier[Qt] . identifier[FontRole] :
keyword[return] identifier[self] . identifier[_font]
keyword[elif] identifier[role] == identifier[Qt] . identifier[TextColorRole] :
identifier[masked] = identifier[self] . identifier[_cellMask] ( identifier[index] )
keyword[if] keyword[not] identifier[is_an_array] ( identifier[masked] ) keyword[and] identifier[masked] :
keyword[return] identifier[self] . identifier[missingColor]
keyword[else] :
keyword[return] identifier[self] . identifier[dataColor]
keyword[elif] identifier[role] == identifier[Qt] . identifier[TextAlignmentRole] :
keyword[if] identifier[self] . identifier[horAlignment] == identifier[ALIGN_SMART] :
identifier[cellContainsNumber] = identifier[isinstance] ( identifier[self] . identifier[_cellValue] ( identifier[index] ), identifier[numbers] . identifier[Number] )
identifier[horAlign] = identifier[Qt] . identifier[AlignRight] keyword[if] identifier[cellContainsNumber] keyword[else] identifier[Qt] . identifier[AlignLeft]
keyword[return] identifier[horAlign] | identifier[self] . identifier[verAlignment]
keyword[else] :
keyword[return] identifier[self] . identifier[horAlignment] | identifier[self] . identifier[verAlignment]
keyword[else] :
keyword[return] keyword[None]
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[logger] . identifier[error] ( literal[string] )
identifier[logger] . identifier[exception] ( identifier[ex] )
keyword[if] identifier[DEBUGGING] :
keyword[raise] | def data(self, index, role=Qt.DisplayRole):
""" Returns the data at an index for a certain role
"""
try:
if role == Qt.DisplayRole:
return to_string(self._cellValue(index), masked=self._cellMask(index), decode_bytes=self.encoding, maskFormat=self.maskFormat, strFormat=self.strFormat, intFormat=self.intFormat, numFormat=self.numFormat, otherFormat=self.otherFormat) # depends on [control=['if'], data=[]]
elif role == Qt.FontRole:
#assert self._font, "Font undefined"
return self._font # depends on [control=['if'], data=[]]
elif role == Qt.TextColorRole:
masked = self._cellMask(index)
if not is_an_array(masked) and masked:
return self.missingColor # depends on [control=['if'], data=[]]
else:
return self.dataColor # depends on [control=['if'], data=[]]
elif role == Qt.TextAlignmentRole:
if self.horAlignment == ALIGN_SMART:
cellContainsNumber = isinstance(self._cellValue(index), numbers.Number)
horAlign = Qt.AlignRight if cellContainsNumber else Qt.AlignLeft
return horAlign | self.verAlignment # depends on [control=['if'], data=[]]
else:
return self.horAlignment | self.verAlignment # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['try'], data=[]]
except Exception as ex:
logger.error('Slot is not exception-safe.')
logger.exception(ex)
if DEBUGGING:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['ex']] |
def add(lhs, rhs):
"""Returns element-wise sum of the input arrays with broadcasting.
Equivalent to ``lhs + rhs``, ``mx.nd.broadcast_add(lhs, rhs)`` and
``mx.nd.broadcast_plus(lhs, rhs)``.
.. note::
If the corresponding dimensions of two arrays have the same size or one of them has size 1,
then the arrays are broadcastable to a common shape
Parameters
----------
lhs : scalar or mxnet.ndarray.array
First array to be added.
rhs : scalar or mxnet.ndarray.array
Second array to be added.
If ``lhs.shape != rhs.shape``, they must be
broadcastable to a common shape.
Returns
-------
NDArray
The element-wise sum of the input arrays.
Examples
--------
>>> x = mx.nd.ones((2,3))
>>> y = mx.nd.arange(2).reshape((2,1))
>>> z = mx.nd.arange(2).reshape((1,2))
>>> x.asnumpy()
array([[ 1., 1., 1.],
[ 1., 1., 1.]], dtype=float32)
>>> y.asnumpy()
array([[ 0.],
[ 1.]], dtype=float32)
>>> z.asnumpy()
array([[ 0., 1.]], dtype=float32)
>>> (x+2).asnumpy()
array([[ 3., 3., 3.],
[ 3., 3., 3.]], dtype=float32)
>>> (x+y).asnumpy()
array([[ 1., 1., 1.],
[ 2., 2., 2.]], dtype=float32)
>>> mx.nd.add(x,y).asnumpy()
array([[ 1., 1., 1.],
[ 2., 2., 2.]], dtype=float32)
>>> (z + y).asnumpy()
array([[ 0., 1.],
[ 1., 2.]], dtype=float32)
"""
# pylint: disable= no-member, protected-access
return _ufunc_helper(
lhs,
rhs,
op.broadcast_add,
operator.add,
_internal._plus_scalar,
None) | def function[add, parameter[lhs, rhs]]:
constant[Returns element-wise sum of the input arrays with broadcasting.
Equivalent to ``lhs + rhs``, ``mx.nd.broadcast_add(lhs, rhs)`` and
``mx.nd.broadcast_plus(lhs, rhs)``.
.. note::
If the corresponding dimensions of two arrays have the same size or one of them has size 1,
then the arrays are broadcastable to a common shape
Parameters
----------
lhs : scalar or mxnet.ndarray.array
First array to be added.
rhs : scalar or mxnet.ndarray.array
Second array to be added.
If ``lhs.shape != rhs.shape``, they must be
broadcastable to a common shape.
Returns
-------
NDArray
The element-wise sum of the input arrays.
Examples
--------
>>> x = mx.nd.ones((2,3))
>>> y = mx.nd.arange(2).reshape((2,1))
>>> z = mx.nd.arange(2).reshape((1,2))
>>> x.asnumpy()
array([[ 1., 1., 1.],
[ 1., 1., 1.]], dtype=float32)
>>> y.asnumpy()
array([[ 0.],
[ 1.]], dtype=float32)
>>> z.asnumpy()
array([[ 0., 1.]], dtype=float32)
>>> (x+2).asnumpy()
array([[ 3., 3., 3.],
[ 3., 3., 3.]], dtype=float32)
>>> (x+y).asnumpy()
array([[ 1., 1., 1.],
[ 2., 2., 2.]], dtype=float32)
>>> mx.nd.add(x,y).asnumpy()
array([[ 1., 1., 1.],
[ 2., 2., 2.]], dtype=float32)
>>> (z + y).asnumpy()
array([[ 0., 1.],
[ 1., 2.]], dtype=float32)
]
return[call[name[_ufunc_helper], parameter[name[lhs], name[rhs], name[op].broadcast_add, name[operator].add, name[_internal]._plus_scalar, constant[None]]]] | keyword[def] identifier[add] ( identifier[lhs] , identifier[rhs] ):
literal[string]
keyword[return] identifier[_ufunc_helper] (
identifier[lhs] ,
identifier[rhs] ,
identifier[op] . identifier[broadcast_add] ,
identifier[operator] . identifier[add] ,
identifier[_internal] . identifier[_plus_scalar] ,
keyword[None] ) | def add(lhs, rhs):
"""Returns element-wise sum of the input arrays with broadcasting.
Equivalent to ``lhs + rhs``, ``mx.nd.broadcast_add(lhs, rhs)`` and
``mx.nd.broadcast_plus(lhs, rhs)``.
.. note::
If the corresponding dimensions of two arrays have the same size or one of them has size 1,
then the arrays are broadcastable to a common shape
Parameters
----------
lhs : scalar or mxnet.ndarray.array
First array to be added.
rhs : scalar or mxnet.ndarray.array
Second array to be added.
If ``lhs.shape != rhs.shape``, they must be
broadcastable to a common shape.
Returns
-------
NDArray
The element-wise sum of the input arrays.
Examples
--------
>>> x = mx.nd.ones((2,3))
>>> y = mx.nd.arange(2).reshape((2,1))
>>> z = mx.nd.arange(2).reshape((1,2))
>>> x.asnumpy()
array([[ 1., 1., 1.],
[ 1., 1., 1.]], dtype=float32)
>>> y.asnumpy()
array([[ 0.],
[ 1.]], dtype=float32)
>>> z.asnumpy()
array([[ 0., 1.]], dtype=float32)
>>> (x+2).asnumpy()
array([[ 3., 3., 3.],
[ 3., 3., 3.]], dtype=float32)
>>> (x+y).asnumpy()
array([[ 1., 1., 1.],
[ 2., 2., 2.]], dtype=float32)
>>> mx.nd.add(x,y).asnumpy()
array([[ 1., 1., 1.],
[ 2., 2., 2.]], dtype=float32)
>>> (z + y).asnumpy()
array([[ 0., 1.],
[ 1., 2.]], dtype=float32)
"""
# pylint: disable= no-member, protected-access
return _ufunc_helper(lhs, rhs, op.broadcast_add, operator.add, _internal._plus_scalar, None) |
def usesTime(self, fmt=None):
'''
Check if the format uses the creation time of the record.
'''
if fmt is None:
fmt = self._fmt
if not isinstance(fmt, basestring):
fmt = fmt[0]
return fmt.find('%(asctime)') >= 0 | def function[usesTime, parameter[self, fmt]]:
constant[
Check if the format uses the creation time of the record.
]
if compare[name[fmt] is constant[None]] begin[:]
variable[fmt] assign[=] name[self]._fmt
if <ast.UnaryOp object at 0x7da2054a78e0> begin[:]
variable[fmt] assign[=] call[name[fmt]][constant[0]]
return[compare[call[name[fmt].find, parameter[constant[%(asctime)]]] greater_or_equal[>=] constant[0]]] | keyword[def] identifier[usesTime] ( identifier[self] , identifier[fmt] = keyword[None] ):
literal[string]
keyword[if] identifier[fmt] keyword[is] keyword[None] :
identifier[fmt] = identifier[self] . identifier[_fmt]
keyword[if] keyword[not] identifier[isinstance] ( identifier[fmt] , identifier[basestring] ):
identifier[fmt] = identifier[fmt] [ literal[int] ]
keyword[return] identifier[fmt] . identifier[find] ( literal[string] )>= literal[int] | def usesTime(self, fmt=None):
"""
Check if the format uses the creation time of the record.
"""
if fmt is None:
fmt = self._fmt # depends on [control=['if'], data=['fmt']]
if not isinstance(fmt, basestring):
fmt = fmt[0] # depends on [control=['if'], data=[]]
return fmt.find('%(asctime)') >= 0 |
def _read_bytes(self, b):
"""Reads the requested number of bytes from a streaming message body.
:param int b: Number of bytes to read
:raises NotSupportedError: if content type is not supported
"""
_LOGGER.debug("%d bytes requested from stream with content type: %s", b, self.content_type)
if 0 <= b <= len(self.output_buffer) or self.__message_complete:
_LOGGER.debug("No need to read from source stream or source stream closed")
return
if self.content_type == ContentType.FRAMED_DATA:
_LOGGER.debug("Reading to framed body")
self.output_buffer += self._read_bytes_to_framed_body(b)
elif self.content_type == ContentType.NO_FRAMING:
_LOGGER.debug("Reading to non-framed body")
self.output_buffer += self._read_bytes_to_non_framed_body(b)
else:
raise NotSupportedError("Unsupported content type")
# To maintain backwards compatibility, only enforce this if a CMM is provided by the caller.
if self.config.key_provider is None and self.config.source_length is not None:
# Enforce that if the caller provided a source length value, the total bytes encrypted
# must not exceed that value.
if self._bytes_encrypted > self.config.source_length:
raise CustomMaximumValueExceeded(
"Bytes encrypted has exceeded stated source length estimate:\n{actual:d} > {estimated:d}".format(
actual=self._bytes_encrypted, estimated=self.config.source_length
)
) | def function[_read_bytes, parameter[self, b]]:
constant[Reads the requested number of bytes from a streaming message body.
:param int b: Number of bytes to read
:raises NotSupportedError: if content type is not supported
]
call[name[_LOGGER].debug, parameter[constant[%d bytes requested from stream with content type: %s], name[b], name[self].content_type]]
if <ast.BoolOp object at 0x7da2047e9150> begin[:]
call[name[_LOGGER].debug, parameter[constant[No need to read from source stream or source stream closed]]]
return[None]
if compare[name[self].content_type equal[==] name[ContentType].FRAMED_DATA] begin[:]
call[name[_LOGGER].debug, parameter[constant[Reading to framed body]]]
<ast.AugAssign object at 0x7da2047e8280>
if <ast.BoolOp object at 0x7da18fe92830> begin[:]
if compare[name[self]._bytes_encrypted greater[>] name[self].config.source_length] begin[:]
<ast.Raise object at 0x7da18fe92c80> | keyword[def] identifier[_read_bytes] ( identifier[self] , identifier[b] ):
literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[b] , identifier[self] . identifier[content_type] )
keyword[if] literal[int] <= identifier[b] <= identifier[len] ( identifier[self] . identifier[output_buffer] ) keyword[or] identifier[self] . identifier[__message_complete] :
identifier[_LOGGER] . identifier[debug] ( literal[string] )
keyword[return]
keyword[if] identifier[self] . identifier[content_type] == identifier[ContentType] . identifier[FRAMED_DATA] :
identifier[_LOGGER] . identifier[debug] ( literal[string] )
identifier[self] . identifier[output_buffer] += identifier[self] . identifier[_read_bytes_to_framed_body] ( identifier[b] )
keyword[elif] identifier[self] . identifier[content_type] == identifier[ContentType] . identifier[NO_FRAMING] :
identifier[_LOGGER] . identifier[debug] ( literal[string] )
identifier[self] . identifier[output_buffer] += identifier[self] . identifier[_read_bytes_to_non_framed_body] ( identifier[b] )
keyword[else] :
keyword[raise] identifier[NotSupportedError] ( literal[string] )
keyword[if] identifier[self] . identifier[config] . identifier[key_provider] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[config] . identifier[source_length] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[_bytes_encrypted] > identifier[self] . identifier[config] . identifier[source_length] :
keyword[raise] identifier[CustomMaximumValueExceeded] (
literal[string] . identifier[format] (
identifier[actual] = identifier[self] . identifier[_bytes_encrypted] , identifier[estimated] = identifier[self] . identifier[config] . identifier[source_length]
)
) | def _read_bytes(self, b):
"""Reads the requested number of bytes from a streaming message body.
:param int b: Number of bytes to read
:raises NotSupportedError: if content type is not supported
"""
_LOGGER.debug('%d bytes requested from stream with content type: %s', b, self.content_type)
if 0 <= b <= len(self.output_buffer) or self.__message_complete:
_LOGGER.debug('No need to read from source stream or source stream closed')
return # depends on [control=['if'], data=[]]
if self.content_type == ContentType.FRAMED_DATA:
_LOGGER.debug('Reading to framed body')
self.output_buffer += self._read_bytes_to_framed_body(b) # depends on [control=['if'], data=[]]
elif self.content_type == ContentType.NO_FRAMING:
_LOGGER.debug('Reading to non-framed body')
self.output_buffer += self._read_bytes_to_non_framed_body(b) # depends on [control=['if'], data=[]]
else:
raise NotSupportedError('Unsupported content type')
# To maintain backwards compatibility, only enforce this if a CMM is provided by the caller.
if self.config.key_provider is None and self.config.source_length is not None:
# Enforce that if the caller provided a source length value, the total bytes encrypted
# must not exceed that value.
if self._bytes_encrypted > self.config.source_length:
raise CustomMaximumValueExceeded('Bytes encrypted has exceeded stated source length estimate:\n{actual:d} > {estimated:d}'.format(actual=self._bytes_encrypted, estimated=self.config.source_length)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def get_device_activity(self, type_p):
"""Gets the current activity type of given devices or device groups.
in type_p of type :class:`DeviceType`
return activity of type :class:`DeviceActivity`
raises :class:`OleErrorInvalidarg`
Invalid device type.
"""
if not isinstance(type_p, list):
raise TypeError("type_p can only be an instance of type list")
for a in type_p[:10]:
if not isinstance(a, DeviceType):
raise TypeError(
"array can only contain objects of type DeviceType")
activity = self._call("getDeviceActivity",
in_p=[type_p])
activity = [DeviceActivity(a) for a in activity]
return activity | def function[get_device_activity, parameter[self, type_p]]:
constant[Gets the current activity type of given devices or device groups.
in type_p of type :class:`DeviceType`
return activity of type :class:`DeviceActivity`
raises :class:`OleErrorInvalidarg`
Invalid device type.
]
if <ast.UnaryOp object at 0x7da20e9b23b0> begin[:]
<ast.Raise object at 0x7da20e9b1ea0>
for taget[name[a]] in starred[call[name[type_p]][<ast.Slice object at 0x7da20e9b3880>]] begin[:]
if <ast.UnaryOp object at 0x7da20e9b3b20> begin[:]
<ast.Raise object at 0x7da20e9b12a0>
variable[activity] assign[=] call[name[self]._call, parameter[constant[getDeviceActivity]]]
variable[activity] assign[=] <ast.ListComp object at 0x7da20e9b1d20>
return[name[activity]] | keyword[def] identifier[get_device_activity] ( identifier[self] , identifier[type_p] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[type_p] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[for] identifier[a] keyword[in] identifier[type_p] [: literal[int] ]:
keyword[if] keyword[not] identifier[isinstance] ( identifier[a] , identifier[DeviceType] ):
keyword[raise] identifier[TypeError] (
literal[string] )
identifier[activity] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[type_p] ])
identifier[activity] =[ identifier[DeviceActivity] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[activity] ]
keyword[return] identifier[activity] | def get_device_activity(self, type_p):
"""Gets the current activity type of given devices or device groups.
in type_p of type :class:`DeviceType`
return activity of type :class:`DeviceActivity`
raises :class:`OleErrorInvalidarg`
Invalid device type.
"""
if not isinstance(type_p, list):
raise TypeError('type_p can only be an instance of type list') # depends on [control=['if'], data=[]]
for a in type_p[:10]:
if not isinstance(a, DeviceType):
raise TypeError('array can only contain objects of type DeviceType') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']]
activity = self._call('getDeviceActivity', in_p=[type_p])
activity = [DeviceActivity(a) for a in activity]
return activity |
def marathon_on_marathon(name='marathon-user'):
""" Context manager for altering the marathon client for MoM
:param name: service name of MoM to use
:type name: str
"""
toml_config_o = config.get_config()
dcos_url = config.get_config_val('core.dcos_url', toml_config_o)
service_name = 'service/{}/'.format(name)
marathon_url = urllib.parse.urljoin(dcos_url, service_name)
config.set_val('marathon.url', marathon_url)
try:
yield
finally:
# return config to previous state
config.save(toml_config_o) | def function[marathon_on_marathon, parameter[name]]:
constant[ Context manager for altering the marathon client for MoM
:param name: service name of MoM to use
:type name: str
]
variable[toml_config_o] assign[=] call[name[config].get_config, parameter[]]
variable[dcos_url] assign[=] call[name[config].get_config_val, parameter[constant[core.dcos_url], name[toml_config_o]]]
variable[service_name] assign[=] call[constant[service/{}/].format, parameter[name[name]]]
variable[marathon_url] assign[=] call[name[urllib].parse.urljoin, parameter[name[dcos_url], name[service_name]]]
call[name[config].set_val, parameter[constant[marathon.url], name[marathon_url]]]
<ast.Try object at 0x7da18f720fa0> | keyword[def] identifier[marathon_on_marathon] ( identifier[name] = literal[string] ):
literal[string]
identifier[toml_config_o] = identifier[config] . identifier[get_config] ()
identifier[dcos_url] = identifier[config] . identifier[get_config_val] ( literal[string] , identifier[toml_config_o] )
identifier[service_name] = literal[string] . identifier[format] ( identifier[name] )
identifier[marathon_url] = identifier[urllib] . identifier[parse] . identifier[urljoin] ( identifier[dcos_url] , identifier[service_name] )
identifier[config] . identifier[set_val] ( literal[string] , identifier[marathon_url] )
keyword[try] :
keyword[yield]
keyword[finally] :
identifier[config] . identifier[save] ( identifier[toml_config_o] ) | def marathon_on_marathon(name='marathon-user'):
""" Context manager for altering the marathon client for MoM
:param name: service name of MoM to use
:type name: str
"""
toml_config_o = config.get_config()
dcos_url = config.get_config_val('core.dcos_url', toml_config_o)
service_name = 'service/{}/'.format(name)
marathon_url = urllib.parse.urljoin(dcos_url, service_name)
config.set_val('marathon.url', marathon_url)
try:
yield # depends on [control=['try'], data=[]]
finally:
# return config to previous state
config.save(toml_config_o) |
def delete_location(self):
"""Deletes all the `geo:lat` and `geo:long` metadata properties on your Thing
"""
# normally this should only remove one triple each
for s, p, o in self._graph.triples((None, GEO_NS.lat, None)):
self._graph.remove((s, p, o))
for s, p, o in self._graph.triples((None, GEO_NS.long, None)):
self._graph.remove((s, p, o)) | def function[delete_location, parameter[self]]:
constant[Deletes all the `geo:lat` and `geo:long` metadata properties on your Thing
]
for taget[tuple[[<ast.Name object at 0x7da2041d85e0>, <ast.Name object at 0x7da2041db6d0>, <ast.Name object at 0x7da2041d9db0>]]] in starred[call[name[self]._graph.triples, parameter[tuple[[<ast.Constant object at 0x7da2041dad10>, <ast.Attribute object at 0x7da2041dbdc0>, <ast.Constant object at 0x7da2041d9810>]]]]] begin[:]
call[name[self]._graph.remove, parameter[tuple[[<ast.Name object at 0x7da2041d96c0>, <ast.Name object at 0x7da2041da770>, <ast.Name object at 0x7da2041d8be0>]]]]
for taget[tuple[[<ast.Name object at 0x7da2041dbf40>, <ast.Name object at 0x7da2041da710>, <ast.Name object at 0x7da2041dbc40>]]] in starred[call[name[self]._graph.triples, parameter[tuple[[<ast.Constant object at 0x7da2041d82e0>, <ast.Attribute object at 0x7da2041d8430>, <ast.Constant object at 0x7da2041db2b0>]]]]] begin[:]
call[name[self]._graph.remove, parameter[tuple[[<ast.Name object at 0x7da2041da020>, <ast.Name object at 0x7da2041d8340>, <ast.Name object at 0x7da2041da0e0>]]]] | keyword[def] identifier[delete_location] ( identifier[self] ):
literal[string]
keyword[for] identifier[s] , identifier[p] , identifier[o] keyword[in] identifier[self] . identifier[_graph] . identifier[triples] (( keyword[None] , identifier[GEO_NS] . identifier[lat] , keyword[None] )):
identifier[self] . identifier[_graph] . identifier[remove] (( identifier[s] , identifier[p] , identifier[o] ))
keyword[for] identifier[s] , identifier[p] , identifier[o] keyword[in] identifier[self] . identifier[_graph] . identifier[triples] (( keyword[None] , identifier[GEO_NS] . identifier[long] , keyword[None] )):
identifier[self] . identifier[_graph] . identifier[remove] (( identifier[s] , identifier[p] , identifier[o] )) | def delete_location(self):
"""Deletes all the `geo:lat` and `geo:long` metadata properties on your Thing
"""
# normally this should only remove one triple each
for (s, p, o) in self._graph.triples((None, GEO_NS.lat, None)):
self._graph.remove((s, p, o)) # depends on [control=['for'], data=[]]
for (s, p, o) in self._graph.triples((None, GEO_NS.long, None)):
self._graph.remove((s, p, o)) # depends on [control=['for'], data=[]] |
def write_two_phases(filename, data, io):
"""
Writes a file in two phase to the filesystem.
First write the data to a temporary file (in the same directory) and than renames the temporary file. If the
file already exists and its content is equal to the data that must be written no action is taken. This has the
following advantages:
* In case of some write error (e.g. disk full) the original file is kep in tact and no file with partially data
is written.
* Renaming a file is atomic. So, running processes will never read a partially written data.
:param str filename: The name of the file were the data must be stored.
:param str data: The data that must be written.
:param pystratum.style.PyStratumStyle.PyStratumStyle io: The output decorator.
"""
write_flag = True
if os.path.exists(filename):
with open(filename, 'r') as file:
old_data = file.read()
if data == old_data:
write_flag = False
if write_flag:
tmp_filename = filename + '.tmp'
with open(tmp_filename, 'w+') as file:
file.write(data)
os.replace(tmp_filename, filename)
io.text('Wrote: <fso>{0}</fso>'.format(filename))
else:
io.text('File <fso>{0}</fso> is up to date'.format(filename)) | def function[write_two_phases, parameter[filename, data, io]]:
constant[
Writes a file in two phase to the filesystem.
First write the data to a temporary file (in the same directory) and than renames the temporary file. If the
file already exists and its content is equal to the data that must be written no action is taken. This has the
following advantages:
* In case of some write error (e.g. disk full) the original file is kep in tact and no file with partially data
is written.
* Renaming a file is atomic. So, running processes will never read a partially written data.
:param str filename: The name of the file were the data must be stored.
:param str data: The data that must be written.
:param pystratum.style.PyStratumStyle.PyStratumStyle io: The output decorator.
]
variable[write_flag] assign[=] constant[True]
if call[name[os].path.exists, parameter[name[filename]]] begin[:]
with call[name[open], parameter[name[filename], constant[r]]] begin[:]
variable[old_data] assign[=] call[name[file].read, parameter[]]
if compare[name[data] equal[==] name[old_data]] begin[:]
variable[write_flag] assign[=] constant[False]
if name[write_flag] begin[:]
variable[tmp_filename] assign[=] binary_operation[name[filename] + constant[.tmp]]
with call[name[open], parameter[name[tmp_filename], constant[w+]]] begin[:]
call[name[file].write, parameter[name[data]]]
call[name[os].replace, parameter[name[tmp_filename], name[filename]]]
call[name[io].text, parameter[call[constant[Wrote: <fso>{0}</fso>].format, parameter[name[filename]]]]] | keyword[def] identifier[write_two_phases] ( identifier[filename] , identifier[data] , identifier[io] ):
literal[string]
identifier[write_flag] = keyword[True]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[file] :
identifier[old_data] = identifier[file] . identifier[read] ()
keyword[if] identifier[data] == identifier[old_data] :
identifier[write_flag] = keyword[False]
keyword[if] identifier[write_flag] :
identifier[tmp_filename] = identifier[filename] + literal[string]
keyword[with] identifier[open] ( identifier[tmp_filename] , literal[string] ) keyword[as] identifier[file] :
identifier[file] . identifier[write] ( identifier[data] )
identifier[os] . identifier[replace] ( identifier[tmp_filename] , identifier[filename] )
identifier[io] . identifier[text] ( literal[string] . identifier[format] ( identifier[filename] ))
keyword[else] :
identifier[io] . identifier[text] ( literal[string] . identifier[format] ( identifier[filename] )) | def write_two_phases(filename, data, io):
"""
Writes a file in two phase to the filesystem.
First write the data to a temporary file (in the same directory) and than renames the temporary file. If the
file already exists and its content is equal to the data that must be written no action is taken. This has the
following advantages:
* In case of some write error (e.g. disk full) the original file is kep in tact and no file with partially data
is written.
* Renaming a file is atomic. So, running processes will never read a partially written data.
:param str filename: The name of the file were the data must be stored.
:param str data: The data that must be written.
:param pystratum.style.PyStratumStyle.PyStratumStyle io: The output decorator.
"""
write_flag = True
if os.path.exists(filename):
with open(filename, 'r') as file:
old_data = file.read()
if data == old_data:
write_flag = False # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['file']] # depends on [control=['if'], data=[]]
if write_flag:
tmp_filename = filename + '.tmp'
with open(tmp_filename, 'w+') as file:
file.write(data) # depends on [control=['with'], data=['file']]
os.replace(tmp_filename, filename)
io.text('Wrote: <fso>{0}</fso>'.format(filename)) # depends on [control=['if'], data=[]]
else:
io.text('File <fso>{0}</fso> is up to date'.format(filename)) |
def _remove_from_index_operations(self, which, transforms):
"""
Helper preventing copy code.
Remove given what (transform prior etc) from which param index ops.
"""
if len(transforms) == 0:
transforms = which.properties()
removed = np.empty((0,), dtype=int)
for t in list(transforms):
unconstrained = which.remove(t, self._raveled_index())
removed = np.union1d(removed, unconstrained)
if t is __fixed__:
self._highest_parent_._set_unfixed(self, unconstrained)
return removed | def function[_remove_from_index_operations, parameter[self, which, transforms]]:
constant[
Helper preventing copy code.
Remove given what (transform prior etc) from which param index ops.
]
if compare[call[name[len], parameter[name[transforms]]] equal[==] constant[0]] begin[:]
variable[transforms] assign[=] call[name[which].properties, parameter[]]
variable[removed] assign[=] call[name[np].empty, parameter[tuple[[<ast.Constant object at 0x7da1b0ec3310>]]]]
for taget[name[t]] in starred[call[name[list], parameter[name[transforms]]]] begin[:]
variable[unconstrained] assign[=] call[name[which].remove, parameter[name[t], call[name[self]._raveled_index, parameter[]]]]
variable[removed] assign[=] call[name[np].union1d, parameter[name[removed], name[unconstrained]]]
if compare[name[t] is name[__fixed__]] begin[:]
call[name[self]._highest_parent_._set_unfixed, parameter[name[self], name[unconstrained]]]
return[name[removed]] | keyword[def] identifier[_remove_from_index_operations] ( identifier[self] , identifier[which] , identifier[transforms] ):
literal[string]
keyword[if] identifier[len] ( identifier[transforms] )== literal[int] :
identifier[transforms] = identifier[which] . identifier[properties] ()
identifier[removed] = identifier[np] . identifier[empty] (( literal[int] ,), identifier[dtype] = identifier[int] )
keyword[for] identifier[t] keyword[in] identifier[list] ( identifier[transforms] ):
identifier[unconstrained] = identifier[which] . identifier[remove] ( identifier[t] , identifier[self] . identifier[_raveled_index] ())
identifier[removed] = identifier[np] . identifier[union1d] ( identifier[removed] , identifier[unconstrained] )
keyword[if] identifier[t] keyword[is] identifier[__fixed__] :
identifier[self] . identifier[_highest_parent_] . identifier[_set_unfixed] ( identifier[self] , identifier[unconstrained] )
keyword[return] identifier[removed] | def _remove_from_index_operations(self, which, transforms):
"""
Helper preventing copy code.
Remove given what (transform prior etc) from which param index ops.
"""
if len(transforms) == 0:
transforms = which.properties() # depends on [control=['if'], data=[]]
removed = np.empty((0,), dtype=int)
for t in list(transforms):
unconstrained = which.remove(t, self._raveled_index())
removed = np.union1d(removed, unconstrained)
if t is __fixed__:
self._highest_parent_._set_unfixed(self, unconstrained) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']]
return removed |
def accept(self, origin, protocol):
"""
Create a new route attached to a L{IBoxReceiver} created by the
L{IBoxReceiverFactory} with the indicated protocol.
@type origin: C{unicode}
@param origin: The identifier of a route on the peer which will be
associated with this connection. Boxes sent back by the protocol
which is created in this call will be sent back to this route.
@type protocol: C{unicode}
@param protocol: The name of the protocol to which to establish a
connection.
@raise ProtocolUnknown: If no factory can be found for the named
protocol.
@return: A newly created C{unicode} route identifier for this
connection (as the value of a C{dict} with a C{'route'} key).
"""
for factory in self.store.powerupsFor(IBoxReceiverFactory):
# XXX What if there's a duplicate somewhere?
if factory.protocol == protocol:
receiver = factory.getBoxReceiver()
route = self.router.bindRoute(receiver)
# This might be better implemented using a hook on the box.
# See Twisted ticket #3479.
self.reactor.callLater(0, route.connectTo, origin)
return {'route': route.localRouteName}
raise ProtocolUnknown() | def function[accept, parameter[self, origin, protocol]]:
constant[
Create a new route attached to a L{IBoxReceiver} created by the
L{IBoxReceiverFactory} with the indicated protocol.
@type origin: C{unicode}
@param origin: The identifier of a route on the peer which will be
associated with this connection. Boxes sent back by the protocol
which is created in this call will be sent back to this route.
@type protocol: C{unicode}
@param protocol: The name of the protocol to which to establish a
connection.
@raise ProtocolUnknown: If no factory can be found for the named
protocol.
@return: A newly created C{unicode} route identifier for this
connection (as the value of a C{dict} with a C{'route'} key).
]
for taget[name[factory]] in starred[call[name[self].store.powerupsFor, parameter[name[IBoxReceiverFactory]]]] begin[:]
if compare[name[factory].protocol equal[==] name[protocol]] begin[:]
variable[receiver] assign[=] call[name[factory].getBoxReceiver, parameter[]]
variable[route] assign[=] call[name[self].router.bindRoute, parameter[name[receiver]]]
call[name[self].reactor.callLater, parameter[constant[0], name[route].connectTo, name[origin]]]
return[dictionary[[<ast.Constant object at 0x7da1b0a344c0>], [<ast.Attribute object at 0x7da1b0a34c70>]]]
<ast.Raise object at 0x7da1b0b829b0> | keyword[def] identifier[accept] ( identifier[self] , identifier[origin] , identifier[protocol] ):
literal[string]
keyword[for] identifier[factory] keyword[in] identifier[self] . identifier[store] . identifier[powerupsFor] ( identifier[IBoxReceiverFactory] ):
keyword[if] identifier[factory] . identifier[protocol] == identifier[protocol] :
identifier[receiver] = identifier[factory] . identifier[getBoxReceiver] ()
identifier[route] = identifier[self] . identifier[router] . identifier[bindRoute] ( identifier[receiver] )
identifier[self] . identifier[reactor] . identifier[callLater] ( literal[int] , identifier[route] . identifier[connectTo] , identifier[origin] )
keyword[return] { literal[string] : identifier[route] . identifier[localRouteName] }
keyword[raise] identifier[ProtocolUnknown] () | def accept(self, origin, protocol):
"""
Create a new route attached to a L{IBoxReceiver} created by the
L{IBoxReceiverFactory} with the indicated protocol.
@type origin: C{unicode}
@param origin: The identifier of a route on the peer which will be
associated with this connection. Boxes sent back by the protocol
which is created in this call will be sent back to this route.
@type protocol: C{unicode}
@param protocol: The name of the protocol to which to establish a
connection.
@raise ProtocolUnknown: If no factory can be found for the named
protocol.
@return: A newly created C{unicode} route identifier for this
connection (as the value of a C{dict} with a C{'route'} key).
"""
for factory in self.store.powerupsFor(IBoxReceiverFactory):
# XXX What if there's a duplicate somewhere?
if factory.protocol == protocol:
receiver = factory.getBoxReceiver()
route = self.router.bindRoute(receiver)
# This might be better implemented using a hook on the box.
# See Twisted ticket #3479.
self.reactor.callLater(0, route.connectTo, origin)
return {'route': route.localRouteName} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['factory']]
raise ProtocolUnknown() |
def pop(self):
"""
Remove and return an arbitrary element from the set.
Raises :exc:`KeyError` if the set is empty.
"""
result = self.redis.spop(self.key)
if result is None:
raise KeyError
return self._unpickle(result) | def function[pop, parameter[self]]:
constant[
Remove and return an arbitrary element from the set.
Raises :exc:`KeyError` if the set is empty.
]
variable[result] assign[=] call[name[self].redis.spop, parameter[name[self].key]]
if compare[name[result] is constant[None]] begin[:]
<ast.Raise object at 0x7da18eb572e0>
return[call[name[self]._unpickle, parameter[name[result]]]] | keyword[def] identifier[pop] ( identifier[self] ):
literal[string]
identifier[result] = identifier[self] . identifier[redis] . identifier[spop] ( identifier[self] . identifier[key] )
keyword[if] identifier[result] keyword[is] keyword[None] :
keyword[raise] identifier[KeyError]
keyword[return] identifier[self] . identifier[_unpickle] ( identifier[result] ) | def pop(self):
"""
Remove and return an arbitrary element from the set.
Raises :exc:`KeyError` if the set is empty.
"""
result = self.redis.spop(self.key)
if result is None:
raise KeyError # depends on [control=['if'], data=[]]
return self._unpickle(result) |
def active_brokers(self):
"""Return set of brokers that are not inactive or decommissioned."""
return {
broker
for broker in self._brokers
if not broker.inactive and not broker.decommissioned
} | def function[active_brokers, parameter[self]]:
constant[Return set of brokers that are not inactive or decommissioned.]
return[<ast.SetComp object at 0x7da1b077a800>] | keyword[def] identifier[active_brokers] ( identifier[self] ):
literal[string]
keyword[return] {
identifier[broker]
keyword[for] identifier[broker] keyword[in] identifier[self] . identifier[_brokers]
keyword[if] keyword[not] identifier[broker] . identifier[inactive] keyword[and] keyword[not] identifier[broker] . identifier[decommissioned]
} | def active_brokers(self):
"""Return set of brokers that are not inactive or decommissioned."""
return {broker for broker in self._brokers if not broker.inactive and (not broker.decommissioned)} |
def add_task_hook(self, task_hook):
"""Registers a specified task hook to this context
:type task_hook: heron.instance.src.python.utils.topology.ITaskHook
:param task_hook: Implementation of ITaskHook
"""
if not isinstance(task_hook, ITaskHook):
raise TypeError("In add_task_hook(): attempt to add non ITaskHook instance, given: %s"
% str(type(task_hook)))
self.task_hooks.append(task_hook) | def function[add_task_hook, parameter[self, task_hook]]:
constant[Registers a specified task hook to this context
:type task_hook: heron.instance.src.python.utils.topology.ITaskHook
:param task_hook: Implementation of ITaskHook
]
if <ast.UnaryOp object at 0x7da204347c70> begin[:]
<ast.Raise object at 0x7da2043472e0>
call[name[self].task_hooks.append, parameter[name[task_hook]]] | keyword[def] identifier[add_task_hook] ( identifier[self] , identifier[task_hook] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[task_hook] , identifier[ITaskHook] ):
keyword[raise] identifier[TypeError] ( literal[string]
% identifier[str] ( identifier[type] ( identifier[task_hook] )))
identifier[self] . identifier[task_hooks] . identifier[append] ( identifier[task_hook] ) | def add_task_hook(self, task_hook):
"""Registers a specified task hook to this context
:type task_hook: heron.instance.src.python.utils.topology.ITaskHook
:param task_hook: Implementation of ITaskHook
"""
if not isinstance(task_hook, ITaskHook):
raise TypeError('In add_task_hook(): attempt to add non ITaskHook instance, given: %s' % str(type(task_hook))) # depends on [control=['if'], data=[]]
self.task_hooks.append(task_hook) |
def _ParseCmdItem(self, cmd_input, template_file=None):
"""Creates Texttable with output of command.
Args:
cmd_input: String, Device response.
template_file: File object, template to parse with.
Returns:
TextTable containing command output.
Raises:
CliTableError: A template was not found for the given command.
"""
# Build FSM machine from the template.
fsm = textfsm.TextFSM(template_file)
if not self._keys:
self._keys = set(fsm.GetValuesByAttrib('Key'))
# Pass raw data through FSM.
table = texttable.TextTable()
table.header = fsm.header
# Fill TextTable from record entries.
for record in fsm.ParseText(cmd_input):
table.Append(record)
return table | def function[_ParseCmdItem, parameter[self, cmd_input, template_file]]:
constant[Creates Texttable with output of command.
Args:
cmd_input: String, Device response.
template_file: File object, template to parse with.
Returns:
TextTable containing command output.
Raises:
CliTableError: A template was not found for the given command.
]
variable[fsm] assign[=] call[name[textfsm].TextFSM, parameter[name[template_file]]]
if <ast.UnaryOp object at 0x7da1b16316f0> begin[:]
name[self]._keys assign[=] call[name[set], parameter[call[name[fsm].GetValuesByAttrib, parameter[constant[Key]]]]]
variable[table] assign[=] call[name[texttable].TextTable, parameter[]]
name[table].header assign[=] name[fsm].header
for taget[name[record]] in starred[call[name[fsm].ParseText, parameter[name[cmd_input]]]] begin[:]
call[name[table].Append, parameter[name[record]]]
return[name[table]] | keyword[def] identifier[_ParseCmdItem] ( identifier[self] , identifier[cmd_input] , identifier[template_file] = keyword[None] ):
literal[string]
identifier[fsm] = identifier[textfsm] . identifier[TextFSM] ( identifier[template_file] )
keyword[if] keyword[not] identifier[self] . identifier[_keys] :
identifier[self] . identifier[_keys] = identifier[set] ( identifier[fsm] . identifier[GetValuesByAttrib] ( literal[string] ))
identifier[table] = identifier[texttable] . identifier[TextTable] ()
identifier[table] . identifier[header] = identifier[fsm] . identifier[header]
keyword[for] identifier[record] keyword[in] identifier[fsm] . identifier[ParseText] ( identifier[cmd_input] ):
identifier[table] . identifier[Append] ( identifier[record] )
keyword[return] identifier[table] | def _ParseCmdItem(self, cmd_input, template_file=None):
"""Creates Texttable with output of command.
Args:
cmd_input: String, Device response.
template_file: File object, template to parse with.
Returns:
TextTable containing command output.
Raises:
CliTableError: A template was not found for the given command.
"""
# Build FSM machine from the template.
fsm = textfsm.TextFSM(template_file)
if not self._keys:
self._keys = set(fsm.GetValuesByAttrib('Key')) # depends on [control=['if'], data=[]]
# Pass raw data through FSM.
table = texttable.TextTable()
table.header = fsm.header
# Fill TextTable from record entries.
for record in fsm.ParseText(cmd_input):
table.Append(record) # depends on [control=['for'], data=['record']]
return table |
def mapper_from_grid_stack_and_border(self, grid_stack, border):
"""Setup a rectangular mapper from a rectangular pixelization, as follows:
1) If a border is supplied, relocate all of the grid-stack's regular and sub grid pixels beyond the border.
2) Determine the rectangular pixelization's geometry, by laying the pixelization over the sub-grid.
3) Setup the rectangular mapper from the relocated grid-stack and rectangular pixelization.
Parameters
----------
grid_stack : grids.GridStack
A stack of grid describing the observed image's pixel coordinates (e.g. an image-grid, sub-grid, etc.).
border : grids.RegularGridBorder | None
The border of the grid-stack's regular-grid.
"""
if border is not None:
relocated_grid_stack = border.relocated_grid_stack_from_grid_stack(grid_stack)
else:
relocated_grid_stack = grid_stack
geometry = self.geometry_from_grid(grid=relocated_grid_stack.sub)
return mappers.RectangularMapper(pixels=self.pixels, grid_stack=relocated_grid_stack, border=border,
shape=self.shape, geometry=geometry) | def function[mapper_from_grid_stack_and_border, parameter[self, grid_stack, border]]:
constant[Setup a rectangular mapper from a rectangular pixelization, as follows:
1) If a border is supplied, relocate all of the grid-stack's regular and sub grid pixels beyond the border.
2) Determine the rectangular pixelization's geometry, by laying the pixelization over the sub-grid.
3) Setup the rectangular mapper from the relocated grid-stack and rectangular pixelization.
Parameters
----------
grid_stack : grids.GridStack
A stack of grid describing the observed image's pixel coordinates (e.g. an image-grid, sub-grid, etc.).
border : grids.RegularGridBorder | None
The border of the grid-stack's regular-grid.
]
if compare[name[border] is_not constant[None]] begin[:]
variable[relocated_grid_stack] assign[=] call[name[border].relocated_grid_stack_from_grid_stack, parameter[name[grid_stack]]]
variable[geometry] assign[=] call[name[self].geometry_from_grid, parameter[]]
return[call[name[mappers].RectangularMapper, parameter[]]] | keyword[def] identifier[mapper_from_grid_stack_and_border] ( identifier[self] , identifier[grid_stack] , identifier[border] ):
literal[string]
keyword[if] identifier[border] keyword[is] keyword[not] keyword[None] :
identifier[relocated_grid_stack] = identifier[border] . identifier[relocated_grid_stack_from_grid_stack] ( identifier[grid_stack] )
keyword[else] :
identifier[relocated_grid_stack] = identifier[grid_stack]
identifier[geometry] = identifier[self] . identifier[geometry_from_grid] ( identifier[grid] = identifier[relocated_grid_stack] . identifier[sub] )
keyword[return] identifier[mappers] . identifier[RectangularMapper] ( identifier[pixels] = identifier[self] . identifier[pixels] , identifier[grid_stack] = identifier[relocated_grid_stack] , identifier[border] = identifier[border] ,
identifier[shape] = identifier[self] . identifier[shape] , identifier[geometry] = identifier[geometry] ) | def mapper_from_grid_stack_and_border(self, grid_stack, border):
"""Setup a rectangular mapper from a rectangular pixelization, as follows:
1) If a border is supplied, relocate all of the grid-stack's regular and sub grid pixels beyond the border.
2) Determine the rectangular pixelization's geometry, by laying the pixelization over the sub-grid.
3) Setup the rectangular mapper from the relocated grid-stack and rectangular pixelization.
Parameters
----------
grid_stack : grids.GridStack
A stack of grid describing the observed image's pixel coordinates (e.g. an image-grid, sub-grid, etc.).
border : grids.RegularGridBorder | None
The border of the grid-stack's regular-grid.
"""
if border is not None:
relocated_grid_stack = border.relocated_grid_stack_from_grid_stack(grid_stack) # depends on [control=['if'], data=['border']]
else:
relocated_grid_stack = grid_stack
geometry = self.geometry_from_grid(grid=relocated_grid_stack.sub)
return mappers.RectangularMapper(pixels=self.pixels, grid_stack=relocated_grid_stack, border=border, shape=self.shape, geometry=geometry) |
def member_update(self, repl_id, member_id, params):
"""apply new params to replica set member
Args:
repl_id - replica set identity
member_id - member index
params - new member's params
return True if operation success otherwise False
"""
repl = self[repl_id]
result = repl.member_update(member_id, params)
self[repl_id] = repl
return result | def function[member_update, parameter[self, repl_id, member_id, params]]:
constant[apply new params to replica set member
Args:
repl_id - replica set identity
member_id - member index
params - new member's params
return True if operation success otherwise False
]
variable[repl] assign[=] call[name[self]][name[repl_id]]
variable[result] assign[=] call[name[repl].member_update, parameter[name[member_id], name[params]]]
call[name[self]][name[repl_id]] assign[=] name[repl]
return[name[result]] | keyword[def] identifier[member_update] ( identifier[self] , identifier[repl_id] , identifier[member_id] , identifier[params] ):
literal[string]
identifier[repl] = identifier[self] [ identifier[repl_id] ]
identifier[result] = identifier[repl] . identifier[member_update] ( identifier[member_id] , identifier[params] )
identifier[self] [ identifier[repl_id] ]= identifier[repl]
keyword[return] identifier[result] | def member_update(self, repl_id, member_id, params):
"""apply new params to replica set member
Args:
repl_id - replica set identity
member_id - member index
params - new member's params
return True if operation success otherwise False
"""
repl = self[repl_id]
result = repl.member_update(member_id, params)
self[repl_id] = repl
return result |
def _update_cache(self, course, taskid):
"""
Updates the cache
:param course: a Course object
:param taskid: a (valid) task id
:raise InvalidNameException, TaskNotFoundException, TaskUnreadableException
"""
if not id_checker(taskid):
raise InvalidNameException("Task with invalid name: " + taskid)
task_fs = self.get_task_fs(course.get_id(), taskid)
last_modif, translation_fs, task_content = self._get_last_updates(course, taskid, task_fs, True)
self._cache[(course.get_id(), taskid)] = (
self._task_class(course, taskid, task_content, task_fs, translation_fs, self._hook_manager, self._task_problem_types),
last_modif
) | def function[_update_cache, parameter[self, course, taskid]]:
constant[
Updates the cache
:param course: a Course object
:param taskid: a (valid) task id
:raise InvalidNameException, TaskNotFoundException, TaskUnreadableException
]
if <ast.UnaryOp object at 0x7da18bc73eb0> begin[:]
<ast.Raise object at 0x7da18bc73dc0>
variable[task_fs] assign[=] call[name[self].get_task_fs, parameter[call[name[course].get_id, parameter[]], name[taskid]]]
<ast.Tuple object at 0x7da18bc71990> assign[=] call[name[self]._get_last_updates, parameter[name[course], name[taskid], name[task_fs], constant[True]]]
call[name[self]._cache][tuple[[<ast.Call object at 0x7da18bc71c60>, <ast.Name object at 0x7da18bc70340>]]] assign[=] tuple[[<ast.Call object at 0x7da18bc70a30>, <ast.Name object at 0x7da18f723640>]] | keyword[def] identifier[_update_cache] ( identifier[self] , identifier[course] , identifier[taskid] ):
literal[string]
keyword[if] keyword[not] identifier[id_checker] ( identifier[taskid] ):
keyword[raise] identifier[InvalidNameException] ( literal[string] + identifier[taskid] )
identifier[task_fs] = identifier[self] . identifier[get_task_fs] ( identifier[course] . identifier[get_id] (), identifier[taskid] )
identifier[last_modif] , identifier[translation_fs] , identifier[task_content] = identifier[self] . identifier[_get_last_updates] ( identifier[course] , identifier[taskid] , identifier[task_fs] , keyword[True] )
identifier[self] . identifier[_cache] [( identifier[course] . identifier[get_id] (), identifier[taskid] )]=(
identifier[self] . identifier[_task_class] ( identifier[course] , identifier[taskid] , identifier[task_content] , identifier[task_fs] , identifier[translation_fs] , identifier[self] . identifier[_hook_manager] , identifier[self] . identifier[_task_problem_types] ),
identifier[last_modif]
) | def _update_cache(self, course, taskid):
"""
Updates the cache
:param course: a Course object
:param taskid: a (valid) task id
:raise InvalidNameException, TaskNotFoundException, TaskUnreadableException
"""
if not id_checker(taskid):
raise InvalidNameException('Task with invalid name: ' + taskid) # depends on [control=['if'], data=[]]
task_fs = self.get_task_fs(course.get_id(), taskid)
(last_modif, translation_fs, task_content) = self._get_last_updates(course, taskid, task_fs, True)
self._cache[course.get_id(), taskid] = (self._task_class(course, taskid, task_content, task_fs, translation_fs, self._hook_manager, self._task_problem_types), last_modif) |
def crypto_hash(message):
"""
Hashes and returns the message ``message``.
:param message: bytes
:rtype: bytes
"""
digest = ffi.new("unsigned char[]", crypto_hash_BYTES)
rc = lib.crypto_hash(digest, message, len(message))
ensure(rc == 0,
'Unexpected library error',
raising=exc.RuntimeError)
return ffi.buffer(digest, crypto_hash_BYTES)[:] | def function[crypto_hash, parameter[message]]:
constant[
Hashes and returns the message ``message``.
:param message: bytes
:rtype: bytes
]
variable[digest] assign[=] call[name[ffi].new, parameter[constant[unsigned char[]], name[crypto_hash_BYTES]]]
variable[rc] assign[=] call[name[lib].crypto_hash, parameter[name[digest], name[message], call[name[len], parameter[name[message]]]]]
call[name[ensure], parameter[compare[name[rc] equal[==] constant[0]], constant[Unexpected library error]]]
return[call[call[name[ffi].buffer, parameter[name[digest], name[crypto_hash_BYTES]]]][<ast.Slice object at 0x7da18dc98f40>]] | keyword[def] identifier[crypto_hash] ( identifier[message] ):
literal[string]
identifier[digest] = identifier[ffi] . identifier[new] ( literal[string] , identifier[crypto_hash_BYTES] )
identifier[rc] = identifier[lib] . identifier[crypto_hash] ( identifier[digest] , identifier[message] , identifier[len] ( identifier[message] ))
identifier[ensure] ( identifier[rc] == literal[int] ,
literal[string] ,
identifier[raising] = identifier[exc] . identifier[RuntimeError] )
keyword[return] identifier[ffi] . identifier[buffer] ( identifier[digest] , identifier[crypto_hash_BYTES] )[:] | def crypto_hash(message):
"""
Hashes and returns the message ``message``.
:param message: bytes
:rtype: bytes
"""
digest = ffi.new('unsigned char[]', crypto_hash_BYTES)
rc = lib.crypto_hash(digest, message, len(message))
ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError)
return ffi.buffer(digest, crypto_hash_BYTES)[:] |
def show_external_release_file(root, request):
"""
Download a release from a download url from its package information.
Must be used with :func:`pyshop.helpers.download.renderer_factory`
to download the release file.
:return: download informations
:rtype: dict
"""
session = DBSession()
settings = request.registry.settings
whlify = asbool(settings.get('pyshop.mirror.wheelify', '0'))
release = Release.by_id(session, int(request.matchdict['release_id']))
filename = (release.whlify_download_url_file if whlify else
release.download_url_file)
rv = {'url': release.download_url,
'filename': filename,
'original': release.download_url_file,
'whlify': whlify
}
release.downloads += 1
release.package.downloads += 1
session.add(release.package)
session.add(release)
request.response.date = datetime.datetime.utcnow()
return rv | def function[show_external_release_file, parameter[root, request]]:
constant[
Download a release from a download url from its package information.
Must be used with :func:`pyshop.helpers.download.renderer_factory`
to download the release file.
:return: download informations
:rtype: dict
]
variable[session] assign[=] call[name[DBSession], parameter[]]
variable[settings] assign[=] name[request].registry.settings
variable[whlify] assign[=] call[name[asbool], parameter[call[name[settings].get, parameter[constant[pyshop.mirror.wheelify], constant[0]]]]]
variable[release] assign[=] call[name[Release].by_id, parameter[name[session], call[name[int], parameter[call[name[request].matchdict][constant[release_id]]]]]]
variable[filename] assign[=] <ast.IfExp object at 0x7da20c76e0b0>
variable[rv] assign[=] dictionary[[<ast.Constant object at 0x7da20c76e260>, <ast.Constant object at 0x7da20c76c340>, <ast.Constant object at 0x7da20c76e920>, <ast.Constant object at 0x7da20c76e020>], [<ast.Attribute object at 0x7da20c76d060>, <ast.Name object at 0x7da20c76d660>, <ast.Attribute object at 0x7da20c76fd90>, <ast.Name object at 0x7da20c76d090>]]
<ast.AugAssign object at 0x7da20c76c670>
<ast.AugAssign object at 0x7da20c76fa60>
call[name[session].add, parameter[name[release].package]]
call[name[session].add, parameter[name[release]]]
name[request].response.date assign[=] call[name[datetime].datetime.utcnow, parameter[]]
return[name[rv]] | keyword[def] identifier[show_external_release_file] ( identifier[root] , identifier[request] ):
literal[string]
identifier[session] = identifier[DBSession] ()
identifier[settings] = identifier[request] . identifier[registry] . identifier[settings]
identifier[whlify] = identifier[asbool] ( identifier[settings] . identifier[get] ( literal[string] , literal[string] ))
identifier[release] = identifier[Release] . identifier[by_id] ( identifier[session] , identifier[int] ( identifier[request] . identifier[matchdict] [ literal[string] ]))
identifier[filename] =( identifier[release] . identifier[whlify_download_url_file] keyword[if] identifier[whlify] keyword[else]
identifier[release] . identifier[download_url_file] )
identifier[rv] ={ literal[string] : identifier[release] . identifier[download_url] ,
literal[string] : identifier[filename] ,
literal[string] : identifier[release] . identifier[download_url_file] ,
literal[string] : identifier[whlify]
}
identifier[release] . identifier[downloads] += literal[int]
identifier[release] . identifier[package] . identifier[downloads] += literal[int]
identifier[session] . identifier[add] ( identifier[release] . identifier[package] )
identifier[session] . identifier[add] ( identifier[release] )
identifier[request] . identifier[response] . identifier[date] = identifier[datetime] . identifier[datetime] . identifier[utcnow] ()
keyword[return] identifier[rv] | def show_external_release_file(root, request):
"""
Download a release from a download url from its package information.
Must be used with :func:`pyshop.helpers.download.renderer_factory`
to download the release file.
:return: download informations
:rtype: dict
"""
session = DBSession()
settings = request.registry.settings
whlify = asbool(settings.get('pyshop.mirror.wheelify', '0'))
release = Release.by_id(session, int(request.matchdict['release_id']))
filename = release.whlify_download_url_file if whlify else release.download_url_file
rv = {'url': release.download_url, 'filename': filename, 'original': release.download_url_file, 'whlify': whlify}
release.downloads += 1
release.package.downloads += 1
session.add(release.package)
session.add(release)
request.response.date = datetime.datetime.utcnow()
return rv |
def special_format_field(self, obj, format_spec):
"""Know about any special formats"""
if format_spec == "env":
return "${{{0}}}".format(obj)
elif format_spec == "from_env":
if obj not in os.environ:
raise NoSuchEnvironmentVariable(wanted=obj)
return os.environ[obj] | def function[special_format_field, parameter[self, obj, format_spec]]:
constant[Know about any special formats]
if compare[name[format_spec] equal[==] constant[env]] begin[:]
return[call[constant[${{{0}}}].format, parameter[name[obj]]]] | keyword[def] identifier[special_format_field] ( identifier[self] , identifier[obj] , identifier[format_spec] ):
literal[string]
keyword[if] identifier[format_spec] == literal[string] :
keyword[return] literal[string] . identifier[format] ( identifier[obj] )
keyword[elif] identifier[format_spec] == literal[string] :
keyword[if] identifier[obj] keyword[not] keyword[in] identifier[os] . identifier[environ] :
keyword[raise] identifier[NoSuchEnvironmentVariable] ( identifier[wanted] = identifier[obj] )
keyword[return] identifier[os] . identifier[environ] [ identifier[obj] ] | def special_format_field(self, obj, format_spec):
"""Know about any special formats"""
if format_spec == 'env':
return '${{{0}}}'.format(obj) # depends on [control=['if'], data=[]]
elif format_spec == 'from_env':
if obj not in os.environ:
raise NoSuchEnvironmentVariable(wanted=obj) # depends on [control=['if'], data=['obj']]
return os.environ[obj] # depends on [control=['if'], data=[]] |
def _try_assign_utc_time(self, raw_time, time_base):
"""Try to assign a UTC time to this reading."""
# Check if the raw time is encoded UTC since y2k or just uptime
if raw_time != IOTileEvent.InvalidRawTime and (raw_time & (1 << 31)):
y2k_offset = self.raw_time ^ (1 << 31)
return self._Y2KReference + datetime.timedelta(seconds=y2k_offset)
if time_base is not None:
return time_base + datetime.timedelta(seconds=raw_time)
return None | def function[_try_assign_utc_time, parameter[self, raw_time, time_base]]:
constant[Try to assign a UTC time to this reading.]
if <ast.BoolOp object at 0x7da2041dacb0> begin[:]
variable[y2k_offset] assign[=] binary_operation[name[self].raw_time <ast.BitXor object at 0x7da2590d6b00> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> constant[31]]]
return[binary_operation[name[self]._Y2KReference + call[name[datetime].timedelta, parameter[]]]]
if compare[name[time_base] is_not constant[None]] begin[:]
return[binary_operation[name[time_base] + call[name[datetime].timedelta, parameter[]]]]
return[constant[None]] | keyword[def] identifier[_try_assign_utc_time] ( identifier[self] , identifier[raw_time] , identifier[time_base] ):
literal[string]
keyword[if] identifier[raw_time] != identifier[IOTileEvent] . identifier[InvalidRawTime] keyword[and] ( identifier[raw_time] &( literal[int] << literal[int] )):
identifier[y2k_offset] = identifier[self] . identifier[raw_time] ^( literal[int] << literal[int] )
keyword[return] identifier[self] . identifier[_Y2KReference] + identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[y2k_offset] )
keyword[if] identifier[time_base] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[time_base] + identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[raw_time] )
keyword[return] keyword[None] | def _try_assign_utc_time(self, raw_time, time_base):
"""Try to assign a UTC time to this reading."""
# Check if the raw time is encoded UTC since y2k or just uptime
if raw_time != IOTileEvent.InvalidRawTime and raw_time & 1 << 31:
y2k_offset = self.raw_time ^ 1 << 31
return self._Y2KReference + datetime.timedelta(seconds=y2k_offset) # depends on [control=['if'], data=[]]
if time_base is not None:
return time_base + datetime.timedelta(seconds=raw_time) # depends on [control=['if'], data=['time_base']]
return None |
def _evaluate(self, indices, norm_distances, out=None):
"""Evaluate linear interpolation.
Modified for in-place evaluation and treatment of out-of-bounds
points by implicitly assuming 0 at the next node."""
# slice for broadcasting over trailing dimensions in self.values
vslice = (slice(None),) + (None,) * (self.values.ndim - len(indices))
if out is None:
out_shape = out_shape_from_meshgrid(norm_distances)
out_dtype = self.values.dtype
out = np.zeros(out_shape, dtype=out_dtype)
else:
out[:] = 0.0
# Weights and indices (per axis)
low_weights, high_weights, edge_indices = _create_weight_edge_lists(
indices, norm_distances, self.schemes, self.nn_variants)
# Iterate over all possible combinations of [i, i+1] for each
# axis, resulting in a loop of length 2**ndim
for lo_hi, edge in zip(product(*([['l', 'h']] * len(indices))),
product(*edge_indices)):
weight = 1.0
# TODO: determine best summation order from array strides
for lh, w_lo, w_hi in zip(lo_hi, low_weights, high_weights):
# We don't multiply in-place to exploit the cheap operations
# in the beginning: sizes grow gradually as following:
# (n, 1, 1, ...) -> (n, m, 1, ...) -> ...
# Hence, it is faster to build up the weight array instead
# of doing full-size operations from the beginning.
if lh == 'l':
weight = weight * w_lo
else:
weight = weight * w_hi
out += np.asarray(self.values[edge]) * weight[vslice]
return np.array(out, copy=False, ndmin=1) | def function[_evaluate, parameter[self, indices, norm_distances, out]]:
constant[Evaluate linear interpolation.
Modified for in-place evaluation and treatment of out-of-bounds
points by implicitly assuming 0 at the next node.]
variable[vslice] assign[=] binary_operation[tuple[[<ast.Call object at 0x7da20c794d30>]] + binary_operation[tuple[[<ast.Constant object at 0x7da20c795b70>]] * binary_operation[name[self].values.ndim - call[name[len], parameter[name[indices]]]]]]
if compare[name[out] is constant[None]] begin[:]
variable[out_shape] assign[=] call[name[out_shape_from_meshgrid], parameter[name[norm_distances]]]
variable[out_dtype] assign[=] name[self].values.dtype
variable[out] assign[=] call[name[np].zeros, parameter[name[out_shape]]]
<ast.Tuple object at 0x7da1b1ec59f0> assign[=] call[name[_create_weight_edge_lists], parameter[name[indices], name[norm_distances], name[self].schemes, name[self].nn_variants]]
for taget[tuple[[<ast.Name object at 0x7da1b1ec7b20>, <ast.Name object at 0x7da1b1ec5f30>]]] in starred[call[name[zip], parameter[call[name[product], parameter[<ast.Starred object at 0x7da1b1ec7910>]], call[name[product], parameter[<ast.Starred object at 0x7da1b1e7ae90>]]]]] begin[:]
variable[weight] assign[=] constant[1.0]
for taget[tuple[[<ast.Name object at 0x7da1b1e7af50>, <ast.Name object at 0x7da1b1e78f40>, <ast.Name object at 0x7da1b1e7aef0>]]] in starred[call[name[zip], parameter[name[lo_hi], name[low_weights], name[high_weights]]]] begin[:]
if compare[name[lh] equal[==] constant[l]] begin[:]
variable[weight] assign[=] binary_operation[name[weight] * name[w_lo]]
<ast.AugAssign object at 0x7da1b1ec52d0>
return[call[name[np].array, parameter[name[out]]]] | keyword[def] identifier[_evaluate] ( identifier[self] , identifier[indices] , identifier[norm_distances] , identifier[out] = keyword[None] ):
literal[string]
identifier[vslice] =( identifier[slice] ( keyword[None] ),)+( keyword[None] ,)*( identifier[self] . identifier[values] . identifier[ndim] - identifier[len] ( identifier[indices] ))
keyword[if] identifier[out] keyword[is] keyword[None] :
identifier[out_shape] = identifier[out_shape_from_meshgrid] ( identifier[norm_distances] )
identifier[out_dtype] = identifier[self] . identifier[values] . identifier[dtype]
identifier[out] = identifier[np] . identifier[zeros] ( identifier[out_shape] , identifier[dtype] = identifier[out_dtype] )
keyword[else] :
identifier[out] [:]= literal[int]
identifier[low_weights] , identifier[high_weights] , identifier[edge_indices] = identifier[_create_weight_edge_lists] (
identifier[indices] , identifier[norm_distances] , identifier[self] . identifier[schemes] , identifier[self] . identifier[nn_variants] )
keyword[for] identifier[lo_hi] , identifier[edge] keyword[in] identifier[zip] ( identifier[product] (*([[ literal[string] , literal[string] ]]* identifier[len] ( identifier[indices] ))),
identifier[product] (* identifier[edge_indices] )):
identifier[weight] = literal[int]
keyword[for] identifier[lh] , identifier[w_lo] , identifier[w_hi] keyword[in] identifier[zip] ( identifier[lo_hi] , identifier[low_weights] , identifier[high_weights] ):
keyword[if] identifier[lh] == literal[string] :
identifier[weight] = identifier[weight] * identifier[w_lo]
keyword[else] :
identifier[weight] = identifier[weight] * identifier[w_hi]
identifier[out] += identifier[np] . identifier[asarray] ( identifier[self] . identifier[values] [ identifier[edge] ])* identifier[weight] [ identifier[vslice] ]
keyword[return] identifier[np] . identifier[array] ( identifier[out] , identifier[copy] = keyword[False] , identifier[ndmin] = literal[int] ) | def _evaluate(self, indices, norm_distances, out=None):
"""Evaluate linear interpolation.
Modified for in-place evaluation and treatment of out-of-bounds
points by implicitly assuming 0 at the next node."""
# slice for broadcasting over trailing dimensions in self.values
vslice = (slice(None),) + (None,) * (self.values.ndim - len(indices))
if out is None:
out_shape = out_shape_from_meshgrid(norm_distances)
out_dtype = self.values.dtype
out = np.zeros(out_shape, dtype=out_dtype) # depends on [control=['if'], data=['out']]
else:
out[:] = 0.0
# Weights and indices (per axis)
(low_weights, high_weights, edge_indices) = _create_weight_edge_lists(indices, norm_distances, self.schemes, self.nn_variants)
# Iterate over all possible combinations of [i, i+1] for each
# axis, resulting in a loop of length 2**ndim
for (lo_hi, edge) in zip(product(*[['l', 'h']] * len(indices)), product(*edge_indices)):
weight = 1.0
# TODO: determine best summation order from array strides
for (lh, w_lo, w_hi) in zip(lo_hi, low_weights, high_weights):
# We don't multiply in-place to exploit the cheap operations
# in the beginning: sizes grow gradually as following:
# (n, 1, 1, ...) -> (n, m, 1, ...) -> ...
# Hence, it is faster to build up the weight array instead
# of doing full-size operations from the beginning.
if lh == 'l':
weight = weight * w_lo # depends on [control=['if'], data=[]]
else:
weight = weight * w_hi # depends on [control=['for'], data=[]]
out += np.asarray(self.values[edge]) * weight[vslice] # depends on [control=['for'], data=[]]
return np.array(out, copy=False, ndmin=1) |
def from_connections(cls, caption, connections):
"""Create a new Data Source give a list of Connections."""
root = ET.Element('datasource', caption=caption, version='10.0', inline='true')
outer_connection = ET.SubElement(root, 'connection')
outer_connection.set('class', 'federated')
named_conns = ET.SubElement(outer_connection, 'named-connections')
for conn in connections:
nc = ET.SubElement(named_conns,
'named-connection',
name=_make_unique_name(conn.dbclass),
caption=conn.server)
nc.append(conn._connectionXML)
return cls(root) | def function[from_connections, parameter[cls, caption, connections]]:
constant[Create a new Data Source give a list of Connections.]
variable[root] assign[=] call[name[ET].Element, parameter[constant[datasource]]]
variable[outer_connection] assign[=] call[name[ET].SubElement, parameter[name[root], constant[connection]]]
call[name[outer_connection].set, parameter[constant[class], constant[federated]]]
variable[named_conns] assign[=] call[name[ET].SubElement, parameter[name[outer_connection], constant[named-connections]]]
for taget[name[conn]] in starred[name[connections]] begin[:]
variable[nc] assign[=] call[name[ET].SubElement, parameter[name[named_conns], constant[named-connection]]]
call[name[nc].append, parameter[name[conn]._connectionXML]]
return[call[name[cls], parameter[name[root]]]] | keyword[def] identifier[from_connections] ( identifier[cls] , identifier[caption] , identifier[connections] ):
literal[string]
identifier[root] = identifier[ET] . identifier[Element] ( literal[string] , identifier[caption] = identifier[caption] , identifier[version] = literal[string] , identifier[inline] = literal[string] )
identifier[outer_connection] = identifier[ET] . identifier[SubElement] ( identifier[root] , literal[string] )
identifier[outer_connection] . identifier[set] ( literal[string] , literal[string] )
identifier[named_conns] = identifier[ET] . identifier[SubElement] ( identifier[outer_connection] , literal[string] )
keyword[for] identifier[conn] keyword[in] identifier[connections] :
identifier[nc] = identifier[ET] . identifier[SubElement] ( identifier[named_conns] ,
literal[string] ,
identifier[name] = identifier[_make_unique_name] ( identifier[conn] . identifier[dbclass] ),
identifier[caption] = identifier[conn] . identifier[server] )
identifier[nc] . identifier[append] ( identifier[conn] . identifier[_connectionXML] )
keyword[return] identifier[cls] ( identifier[root] ) | def from_connections(cls, caption, connections):
"""Create a new Data Source give a list of Connections."""
root = ET.Element('datasource', caption=caption, version='10.0', inline='true')
outer_connection = ET.SubElement(root, 'connection')
outer_connection.set('class', 'federated')
named_conns = ET.SubElement(outer_connection, 'named-connections')
for conn in connections:
nc = ET.SubElement(named_conns, 'named-connection', name=_make_unique_name(conn.dbclass), caption=conn.server)
nc.append(conn._connectionXML) # depends on [control=['for'], data=['conn']]
return cls(root) |
def db_exec_with_cursor(self, cursor, sql: str, *args) -> int:
"""Executes SQL on a supplied cursor, with "?" placeholders,
substituting in the arguments. Returns number of rows affected."""
sql = self.localize_sql(sql)
try:
debug_sql(sql, args)
cursor.execute(sql, args)
return cursor.rowcount
except: # nopep8
log.exception("db_exec_with_cursor: SQL was: " + sql)
raise | def function[db_exec_with_cursor, parameter[self, cursor, sql]]:
constant[Executes SQL on a supplied cursor, with "?" placeholders,
substituting in the arguments. Returns number of rows affected.]
variable[sql] assign[=] call[name[self].localize_sql, parameter[name[sql]]]
<ast.Try object at 0x7da1b1728f70> | keyword[def] identifier[db_exec_with_cursor] ( identifier[self] , identifier[cursor] , identifier[sql] : identifier[str] ,* identifier[args] )-> identifier[int] :
literal[string]
identifier[sql] = identifier[self] . identifier[localize_sql] ( identifier[sql] )
keyword[try] :
identifier[debug_sql] ( identifier[sql] , identifier[args] )
identifier[cursor] . identifier[execute] ( identifier[sql] , identifier[args] )
keyword[return] identifier[cursor] . identifier[rowcount]
keyword[except] :
identifier[log] . identifier[exception] ( literal[string] + identifier[sql] )
keyword[raise] | def db_exec_with_cursor(self, cursor, sql: str, *args) -> int:
"""Executes SQL on a supplied cursor, with "?" placeholders,
substituting in the arguments. Returns number of rows affected."""
sql = self.localize_sql(sql)
try:
debug_sql(sql, args)
cursor.execute(sql, args)
return cursor.rowcount # depends on [control=['try'], data=[]]
except: # nopep8
log.exception('db_exec_with_cursor: SQL was: ' + sql)
raise # depends on [control=['except'], data=[]] |
def shortenIDs(doc, prefix, unprotectedElements=None):
"""
Shortens ID names used in the document. ID names referenced the most often are assigned the
shortest ID names.
If the list unprotectedElements is provided, only IDs from this list will be shortened.
Returns the number of bytes saved by shortening ID names in the document.
"""
num = 0
identifiedElements = findElementsWithId(doc.documentElement)
if unprotectedElements is None:
unprotectedElements = identifiedElements
referencedIDs = findReferencedElements(doc.documentElement)
# Make idList (list of idnames) sorted by reference count
# descending, so the highest reference count is first.
# First check that there's actually a defining element for the current ID name.
# (Cyn: I've seen documents with #id references but no element with that ID!)
idList = [(len(referencedIDs[rid]), rid) for rid in referencedIDs
if rid in unprotectedElements]
idList.sort(reverse=True)
idList = [rid for count, rid in idList]
# Add unreferenced IDs to end of idList in arbitrary order
idList.extend([rid for rid in unprotectedElements if rid not in idList])
curIdNum = 1
for rid in idList:
curId = intToID(curIdNum, prefix)
# First make sure that *this* element isn't already using
# the ID name we want to give it.
if curId != rid:
# Then, skip ahead if the new ID is already in identifiedElement.
while curId in identifiedElements:
curIdNum += 1
curId = intToID(curIdNum, prefix)
# Then go rename it.
num += renameID(doc, rid, curId, identifiedElements, referencedIDs)
curIdNum += 1
return num | def function[shortenIDs, parameter[doc, prefix, unprotectedElements]]:
constant[
Shortens ID names used in the document. ID names referenced the most often are assigned the
shortest ID names.
If the list unprotectedElements is provided, only IDs from this list will be shortened.
Returns the number of bytes saved by shortening ID names in the document.
]
variable[num] assign[=] constant[0]
variable[identifiedElements] assign[=] call[name[findElementsWithId], parameter[name[doc].documentElement]]
if compare[name[unprotectedElements] is constant[None]] begin[:]
variable[unprotectedElements] assign[=] name[identifiedElements]
variable[referencedIDs] assign[=] call[name[findReferencedElements], parameter[name[doc].documentElement]]
variable[idList] assign[=] <ast.ListComp object at 0x7da18f58f970>
call[name[idList].sort, parameter[]]
variable[idList] assign[=] <ast.ListComp object at 0x7da18f58de40>
call[name[idList].extend, parameter[<ast.ListComp object at 0x7da18f58cb50>]]
variable[curIdNum] assign[=] constant[1]
for taget[name[rid]] in starred[name[idList]] begin[:]
variable[curId] assign[=] call[name[intToID], parameter[name[curIdNum], name[prefix]]]
if compare[name[curId] not_equal[!=] name[rid]] begin[:]
while compare[name[curId] in name[identifiedElements]] begin[:]
<ast.AugAssign object at 0x7da2043449d0>
variable[curId] assign[=] call[name[intToID], parameter[name[curIdNum], name[prefix]]]
<ast.AugAssign object at 0x7da2043463e0>
<ast.AugAssign object at 0x7da2043447f0>
return[name[num]] | keyword[def] identifier[shortenIDs] ( identifier[doc] , identifier[prefix] , identifier[unprotectedElements] = keyword[None] ):
literal[string]
identifier[num] = literal[int]
identifier[identifiedElements] = identifier[findElementsWithId] ( identifier[doc] . identifier[documentElement] )
keyword[if] identifier[unprotectedElements] keyword[is] keyword[None] :
identifier[unprotectedElements] = identifier[identifiedElements]
identifier[referencedIDs] = identifier[findReferencedElements] ( identifier[doc] . identifier[documentElement] )
identifier[idList] =[( identifier[len] ( identifier[referencedIDs] [ identifier[rid] ]), identifier[rid] ) keyword[for] identifier[rid] keyword[in] identifier[referencedIDs]
keyword[if] identifier[rid] keyword[in] identifier[unprotectedElements] ]
identifier[idList] . identifier[sort] ( identifier[reverse] = keyword[True] )
identifier[idList] =[ identifier[rid] keyword[for] identifier[count] , identifier[rid] keyword[in] identifier[idList] ]
identifier[idList] . identifier[extend] ([ identifier[rid] keyword[for] identifier[rid] keyword[in] identifier[unprotectedElements] keyword[if] identifier[rid] keyword[not] keyword[in] identifier[idList] ])
identifier[curIdNum] = literal[int]
keyword[for] identifier[rid] keyword[in] identifier[idList] :
identifier[curId] = identifier[intToID] ( identifier[curIdNum] , identifier[prefix] )
keyword[if] identifier[curId] != identifier[rid] :
keyword[while] identifier[curId] keyword[in] identifier[identifiedElements] :
identifier[curIdNum] += literal[int]
identifier[curId] = identifier[intToID] ( identifier[curIdNum] , identifier[prefix] )
identifier[num] += identifier[renameID] ( identifier[doc] , identifier[rid] , identifier[curId] , identifier[identifiedElements] , identifier[referencedIDs] )
identifier[curIdNum] += literal[int]
keyword[return] identifier[num] | def shortenIDs(doc, prefix, unprotectedElements=None):
"""
Shortens ID names used in the document. ID names referenced the most often are assigned the
shortest ID names.
If the list unprotectedElements is provided, only IDs from this list will be shortened.
Returns the number of bytes saved by shortening ID names in the document.
"""
num = 0
identifiedElements = findElementsWithId(doc.documentElement)
if unprotectedElements is None:
unprotectedElements = identifiedElements # depends on [control=['if'], data=['unprotectedElements']]
referencedIDs = findReferencedElements(doc.documentElement)
# Make idList (list of idnames) sorted by reference count
# descending, so the highest reference count is first.
# First check that there's actually a defining element for the current ID name.
# (Cyn: I've seen documents with #id references but no element with that ID!)
idList = [(len(referencedIDs[rid]), rid) for rid in referencedIDs if rid in unprotectedElements]
idList.sort(reverse=True)
idList = [rid for (count, rid) in idList]
# Add unreferenced IDs to end of idList in arbitrary order
idList.extend([rid for rid in unprotectedElements if rid not in idList])
curIdNum = 1
for rid in idList:
curId = intToID(curIdNum, prefix)
# First make sure that *this* element isn't already using
# the ID name we want to give it.
if curId != rid:
# Then, skip ahead if the new ID is already in identifiedElement.
while curId in identifiedElements:
curIdNum += 1
curId = intToID(curIdNum, prefix) # depends on [control=['while'], data=['curId']]
# Then go rename it.
num += renameID(doc, rid, curId, identifiedElements, referencedIDs) # depends on [control=['if'], data=['curId', 'rid']]
curIdNum += 1 # depends on [control=['for'], data=['rid']]
return num |
def light_travel_time_to_detector(self, det):
""" Return the light travel time from this detector
Parameters
----------
det: Detector
The other detector to determine the light travel time to.
Returns
-------
time: float
The light travel time in seconds
"""
d = self.location - det.location
return float(d.dot(d)**0.5 / constants.c.value) | def function[light_travel_time_to_detector, parameter[self, det]]:
constant[ Return the light travel time from this detector
Parameters
----------
det: Detector
The other detector to determine the light travel time to.
Returns
-------
time: float
The light travel time in seconds
]
variable[d] assign[=] binary_operation[name[self].location - name[det].location]
return[call[name[float], parameter[binary_operation[binary_operation[call[name[d].dot, parameter[name[d]]] ** constant[0.5]] / name[constants].c.value]]]] | keyword[def] identifier[light_travel_time_to_detector] ( identifier[self] , identifier[det] ):
literal[string]
identifier[d] = identifier[self] . identifier[location] - identifier[det] . identifier[location]
keyword[return] identifier[float] ( identifier[d] . identifier[dot] ( identifier[d] )** literal[int] / identifier[constants] . identifier[c] . identifier[value] ) | def light_travel_time_to_detector(self, det):
""" Return the light travel time from this detector
Parameters
----------
det: Detector
The other detector to determine the light travel time to.
Returns
-------
time: float
The light travel time in seconds
"""
d = self.location - det.location
return float(d.dot(d) ** 0.5 / constants.c.value) |
def get_trees(self, data, showerrors = False): # -> list:
""" returns a list of trees with valid guesses """
if not all(check(self._productionset.alphabet, [x]) for x in data):
raise ValueError("Unknown element in {}, alphabet:{}".format(str(data), self.productionset.alphabet))
result = self.__recursive_parser(self._productionset.initialsymbol, data, self._productionset.main_production, showerrors)
finalresult = []
for eresult in result:
if eresult.left == 0 and eresult.right == len(data) and eresult not in finalresult:
finalresult.append(eresult)
return finalresult | def function[get_trees, parameter[self, data, showerrors]]:
constant[ returns a list of trees with valid guesses ]
if <ast.UnaryOp object at 0x7da20c795990> begin[:]
<ast.Raise object at 0x7da20c7950f0>
variable[result] assign[=] call[name[self].__recursive_parser, parameter[name[self]._productionset.initialsymbol, name[data], name[self]._productionset.main_production, name[showerrors]]]
variable[finalresult] assign[=] list[[]]
for taget[name[eresult]] in starred[name[result]] begin[:]
if <ast.BoolOp object at 0x7da20c795840> begin[:]
call[name[finalresult].append, parameter[name[eresult]]]
return[name[finalresult]] | keyword[def] identifier[get_trees] ( identifier[self] , identifier[data] , identifier[showerrors] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[all] ( identifier[check] ( identifier[self] . identifier[_productionset] . identifier[alphabet] ,[ identifier[x] ]) keyword[for] identifier[x] keyword[in] identifier[data] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[str] ( identifier[data] ), identifier[self] . identifier[productionset] . identifier[alphabet] ))
identifier[result] = identifier[self] . identifier[__recursive_parser] ( identifier[self] . identifier[_productionset] . identifier[initialsymbol] , identifier[data] , identifier[self] . identifier[_productionset] . identifier[main_production] , identifier[showerrors] )
identifier[finalresult] =[]
keyword[for] identifier[eresult] keyword[in] identifier[result] :
keyword[if] identifier[eresult] . identifier[left] == literal[int] keyword[and] identifier[eresult] . identifier[right] == identifier[len] ( identifier[data] ) keyword[and] identifier[eresult] keyword[not] keyword[in] identifier[finalresult] :
identifier[finalresult] . identifier[append] ( identifier[eresult] )
keyword[return] identifier[finalresult] | def get_trees(self, data, showerrors=False): # -> list:
' returns a list of trees with valid guesses '
if not all((check(self._productionset.alphabet, [x]) for x in data)):
raise ValueError('Unknown element in {}, alphabet:{}'.format(str(data), self.productionset.alphabet)) # depends on [control=['if'], data=[]]
result = self.__recursive_parser(self._productionset.initialsymbol, data, self._productionset.main_production, showerrors)
finalresult = []
for eresult in result:
if eresult.left == 0 and eresult.right == len(data) and (eresult not in finalresult):
finalresult.append(eresult) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['eresult']]
return finalresult |
def scrub(zpool, stop=False, pause=False):
'''
Scrub a storage pool
zpool : string
Name of storage pool
stop : boolean
If ``True``, cancel ongoing scrub
pause : boolean
If ``True``, pause ongoing scrub
.. versionadded:: 2018.3.0
.. note::
Pause is only available on recent versions of ZFS.
If both ``pause`` and ``stop`` are ``True``, then ``stop`` will
win.
CLI Example:
.. code-block:: bash
salt '*' zpool.scrub myzpool
'''
## select correct action
if stop:
action = ['-s']
elif pause:
action = ['-p']
else:
action = None
## Scrub storage pool
res = __salt__['cmd.run_all'](
__utils__['zfs.zpool_command'](
command='scrub',
flags=action,
target=zpool,
),
python_shell=False,
)
if res['retcode'] != 0:
return __utils__['zfs.parse_command_result'](res, 'scrubbing')
ret = OrderedDict()
if stop or pause:
ret['scrubbing'] = False
else:
ret['scrubbing'] = True
return ret | def function[scrub, parameter[zpool, stop, pause]]:
constant[
Scrub a storage pool
zpool : string
Name of storage pool
stop : boolean
If ``True``, cancel ongoing scrub
pause : boolean
If ``True``, pause ongoing scrub
.. versionadded:: 2018.3.0
.. note::
Pause is only available on recent versions of ZFS.
If both ``pause`` and ``stop`` are ``True``, then ``stop`` will
win.
CLI Example:
.. code-block:: bash
salt '*' zpool.scrub myzpool
]
if name[stop] begin[:]
variable[action] assign[=] list[[<ast.Constant object at 0x7da1b2195ab0>]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[call[name[__utils__]][constant[zfs.zpool_command]], parameter[]]]]
if compare[call[name[res]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
return[call[call[name[__utils__]][constant[zfs.parse_command_result]], parameter[name[res], constant[scrubbing]]]]
variable[ret] assign[=] call[name[OrderedDict], parameter[]]
if <ast.BoolOp object at 0x7da1b2195e40> begin[:]
call[name[ret]][constant[scrubbing]] assign[=] constant[False]
return[name[ret]] | keyword[def] identifier[scrub] ( identifier[zpool] , identifier[stop] = keyword[False] , identifier[pause] = keyword[False] ):
literal[string]
keyword[if] identifier[stop] :
identifier[action] =[ literal[string] ]
keyword[elif] identifier[pause] :
identifier[action] =[ literal[string] ]
keyword[else] :
identifier[action] = keyword[None]
identifier[res] = identifier[__salt__] [ literal[string] ](
identifier[__utils__] [ literal[string] ](
identifier[command] = literal[string] ,
identifier[flags] = identifier[action] ,
identifier[target] = identifier[zpool] ,
),
identifier[python_shell] = keyword[False] ,
)
keyword[if] identifier[res] [ literal[string] ]!= literal[int] :
keyword[return] identifier[__utils__] [ literal[string] ]( identifier[res] , literal[string] )
identifier[ret] = identifier[OrderedDict] ()
keyword[if] identifier[stop] keyword[or] identifier[pause] :
identifier[ret] [ literal[string] ]= keyword[False]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[True]
keyword[return] identifier[ret] | def scrub(zpool, stop=False, pause=False):
"""
Scrub a storage pool
zpool : string
Name of storage pool
stop : boolean
If ``True``, cancel ongoing scrub
pause : boolean
If ``True``, pause ongoing scrub
.. versionadded:: 2018.3.0
.. note::
Pause is only available on recent versions of ZFS.
If both ``pause`` and ``stop`` are ``True``, then ``stop`` will
win.
CLI Example:
.. code-block:: bash
salt '*' zpool.scrub myzpool
"""
## select correct action
if stop:
action = ['-s'] # depends on [control=['if'], data=[]]
elif pause:
action = ['-p'] # depends on [control=['if'], data=[]]
else:
action = None
## Scrub storage pool
res = __salt__['cmd.run_all'](__utils__['zfs.zpool_command'](command='scrub', flags=action, target=zpool), python_shell=False)
if res['retcode'] != 0:
return __utils__['zfs.parse_command_result'](res, 'scrubbing') # depends on [control=['if'], data=[]]
ret = OrderedDict()
if stop or pause:
ret['scrubbing'] = False # depends on [control=['if'], data=[]]
else:
ret['scrubbing'] = True
return ret |
def _merge_fields(a, b):
"""Merge two lists of fields.
Fields in `b` override fields in `a`. Fields in `a` are output first.
"""
a_names = set(x[0] for x in a)
b_names = set(x[0] for x in b)
a_keep = a_names - b_names
fields = []
for name, field in a:
if name in a_keep:
fields.append((name, field))
fields.extend(b)
return fields | def function[_merge_fields, parameter[a, b]]:
constant[Merge two lists of fields.
Fields in `b` override fields in `a`. Fields in `a` are output first.
]
variable[a_names] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c9903d0>]]
variable[b_names] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c992350>]]
variable[a_keep] assign[=] binary_operation[name[a_names] - name[b_names]]
variable[fields] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c992f80>, <ast.Name object at 0x7da20c992380>]]] in starred[name[a]] begin[:]
if compare[name[name] in name[a_keep]] begin[:]
call[name[fields].append, parameter[tuple[[<ast.Name object at 0x7da18bcc9540>, <ast.Name object at 0x7da18bcc8c70>]]]]
call[name[fields].extend, parameter[name[b]]]
return[name[fields]] | keyword[def] identifier[_merge_fields] ( identifier[a] , identifier[b] ):
literal[string]
identifier[a_names] = identifier[set] ( identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[a] )
identifier[b_names] = identifier[set] ( identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[b] )
identifier[a_keep] = identifier[a_names] - identifier[b_names]
identifier[fields] =[]
keyword[for] identifier[name] , identifier[field] keyword[in] identifier[a] :
keyword[if] identifier[name] keyword[in] identifier[a_keep] :
identifier[fields] . identifier[append] (( identifier[name] , identifier[field] ))
identifier[fields] . identifier[extend] ( identifier[b] )
keyword[return] identifier[fields] | def _merge_fields(a, b):
"""Merge two lists of fields.
Fields in `b` override fields in `a`. Fields in `a` are output first.
"""
a_names = set((x[0] for x in a))
b_names = set((x[0] for x in b))
a_keep = a_names - b_names
fields = []
for (name, field) in a:
if name in a_keep:
fields.append((name, field)) # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=[]]
fields.extend(b)
return fields |
def stationary_distribution(self):
r""" Compute stationary distribution of hidden states if possible.
Raises
------
ValueError if the HMM is not stationary
"""
assert _tmatrix_disconnected.is_connected(self._Tij, strong=False), \
'No unique stationary distribution because transition matrix is not connected'
import msmtools.analysis as msmana
return msmana.stationary_distribution(self._Tij) | def function[stationary_distribution, parameter[self]]:
constant[ Compute stationary distribution of hidden states if possible.
Raises
------
ValueError if the HMM is not stationary
]
assert[call[name[_tmatrix_disconnected].is_connected, parameter[name[self]._Tij]]]
import module[msmtools.analysis] as alias[msmana]
return[call[name[msmana].stationary_distribution, parameter[name[self]._Tij]]] | keyword[def] identifier[stationary_distribution] ( identifier[self] ):
literal[string]
keyword[assert] identifier[_tmatrix_disconnected] . identifier[is_connected] ( identifier[self] . identifier[_Tij] , identifier[strong] = keyword[False] ), literal[string]
keyword[import] identifier[msmtools] . identifier[analysis] keyword[as] identifier[msmana]
keyword[return] identifier[msmana] . identifier[stationary_distribution] ( identifier[self] . identifier[_Tij] ) | def stationary_distribution(self):
""" Compute stationary distribution of hidden states if possible.
Raises
------
ValueError if the HMM is not stationary
"""
assert _tmatrix_disconnected.is_connected(self._Tij, strong=False), 'No unique stationary distribution because transition matrix is not connected'
import msmtools.analysis as msmana
return msmana.stationary_distribution(self._Tij) |
def open_gsr(self):
"""
Open the GSR file located in the in self.outdir.
Returns :class:`GsrFile` object, None if file could not be found or file is not readable.
"""
gsr_path = self.gsr_path
if not gsr_path:
if self.status == self.S_OK:
logger.critical("%s reached S_OK but didn't produce a GSR file in %s" % (self, self.outdir))
return None
# Open the GSR file.
from abipy.electrons.gsr import GsrFile
try:
return GsrFile(gsr_path)
except Exception as exc:
logger.critical("Exception while reading GSR file at %s:\n%s" % (gsr_path, str(exc)))
return None | def function[open_gsr, parameter[self]]:
constant[
Open the GSR file located in the in self.outdir.
Returns :class:`GsrFile` object, None if file could not be found or file is not readable.
]
variable[gsr_path] assign[=] name[self].gsr_path
if <ast.UnaryOp object at 0x7da20c6ab190> begin[:]
if compare[name[self].status equal[==] name[self].S_OK] begin[:]
call[name[logger].critical, parameter[binary_operation[constant[%s reached S_OK but didn't produce a GSR file in %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc98b20>, <ast.Attribute object at 0x7da18dc98eb0>]]]]]
return[constant[None]]
from relative_module[abipy.electrons.gsr] import module[GsrFile]
<ast.Try object at 0x7da18dc9abc0> | keyword[def] identifier[open_gsr] ( identifier[self] ):
literal[string]
identifier[gsr_path] = identifier[self] . identifier[gsr_path]
keyword[if] keyword[not] identifier[gsr_path] :
keyword[if] identifier[self] . identifier[status] == identifier[self] . identifier[S_OK] :
identifier[logger] . identifier[critical] ( literal[string] %( identifier[self] , identifier[self] . identifier[outdir] ))
keyword[return] keyword[None]
keyword[from] identifier[abipy] . identifier[electrons] . identifier[gsr] keyword[import] identifier[GsrFile]
keyword[try] :
keyword[return] identifier[GsrFile] ( identifier[gsr_path] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[logger] . identifier[critical] ( literal[string] %( identifier[gsr_path] , identifier[str] ( identifier[exc] )))
keyword[return] keyword[None] | def open_gsr(self):
"""
Open the GSR file located in the in self.outdir.
Returns :class:`GsrFile` object, None if file could not be found or file is not readable.
"""
gsr_path = self.gsr_path
if not gsr_path:
if self.status == self.S_OK:
logger.critical("%s reached S_OK but didn't produce a GSR file in %s" % (self, self.outdir)) # depends on [control=['if'], data=[]]
return None # depends on [control=['if'], data=[]]
# Open the GSR file.
from abipy.electrons.gsr import GsrFile
try:
return GsrFile(gsr_path) # depends on [control=['try'], data=[]]
except Exception as exc:
logger.critical('Exception while reading GSR file at %s:\n%s' % (gsr_path, str(exc)))
return None # depends on [control=['except'], data=['exc']] |
def clustal_align_protein(recs, work_dir, outfmt="fasta"):
"""
Align given proteins with clustalw.
recs are iterable of Biopython SeqIO objects
"""
fasta_file = op.join(work_dir, "prot-start.fasta")
align_file = op.join(work_dir, "prot.aln")
SeqIO.write(recs, file(fasta_file, "w"), "fasta")
clustal_cl = ClustalwCommandline(cmd=CLUSTALW_BIN("clustalw2"),
infile=fasta_file, outfile=align_file, outorder="INPUT",
type="PROTEIN")
stdout, stderr = clustal_cl()
aln_file = file(clustal_cl.outfile)
alignment = AlignIO.read(aln_file, "clustal")
print("\tDoing clustalw alignment: %s" % clustal_cl, file=sys.stderr)
if outfmt == "fasta":
return alignment.format("fasta")
if outfmt == "clustal":
return alignment | def function[clustal_align_protein, parameter[recs, work_dir, outfmt]]:
constant[
Align given proteins with clustalw.
recs are iterable of Biopython SeqIO objects
]
variable[fasta_file] assign[=] call[name[op].join, parameter[name[work_dir], constant[prot-start.fasta]]]
variable[align_file] assign[=] call[name[op].join, parameter[name[work_dir], constant[prot.aln]]]
call[name[SeqIO].write, parameter[name[recs], call[name[file], parameter[name[fasta_file], constant[w]]], constant[fasta]]]
variable[clustal_cl] assign[=] call[name[ClustalwCommandline], parameter[]]
<ast.Tuple object at 0x7da18f00d0f0> assign[=] call[name[clustal_cl], parameter[]]
variable[aln_file] assign[=] call[name[file], parameter[name[clustal_cl].outfile]]
variable[alignment] assign[=] call[name[AlignIO].read, parameter[name[aln_file], constant[clustal]]]
call[name[print], parameter[binary_operation[constant[ Doing clustalw alignment: %s] <ast.Mod object at 0x7da2590d6920> name[clustal_cl]]]]
if compare[name[outfmt] equal[==] constant[fasta]] begin[:]
return[call[name[alignment].format, parameter[constant[fasta]]]]
if compare[name[outfmt] equal[==] constant[clustal]] begin[:]
return[name[alignment]] | keyword[def] identifier[clustal_align_protein] ( identifier[recs] , identifier[work_dir] , identifier[outfmt] = literal[string] ):
literal[string]
identifier[fasta_file] = identifier[op] . identifier[join] ( identifier[work_dir] , literal[string] )
identifier[align_file] = identifier[op] . identifier[join] ( identifier[work_dir] , literal[string] )
identifier[SeqIO] . identifier[write] ( identifier[recs] , identifier[file] ( identifier[fasta_file] , literal[string] ), literal[string] )
identifier[clustal_cl] = identifier[ClustalwCommandline] ( identifier[cmd] = identifier[CLUSTALW_BIN] ( literal[string] ),
identifier[infile] = identifier[fasta_file] , identifier[outfile] = identifier[align_file] , identifier[outorder] = literal[string] ,
identifier[type] = literal[string] )
identifier[stdout] , identifier[stderr] = identifier[clustal_cl] ()
identifier[aln_file] = identifier[file] ( identifier[clustal_cl] . identifier[outfile] )
identifier[alignment] = identifier[AlignIO] . identifier[read] ( identifier[aln_file] , literal[string] )
identifier[print] ( literal[string] % identifier[clustal_cl] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[if] identifier[outfmt] == literal[string] :
keyword[return] identifier[alignment] . identifier[format] ( literal[string] )
keyword[if] identifier[outfmt] == literal[string] :
keyword[return] identifier[alignment] | def clustal_align_protein(recs, work_dir, outfmt='fasta'):
"""
Align given proteins with clustalw.
recs are iterable of Biopython SeqIO objects
"""
fasta_file = op.join(work_dir, 'prot-start.fasta')
align_file = op.join(work_dir, 'prot.aln')
SeqIO.write(recs, file(fasta_file, 'w'), 'fasta')
clustal_cl = ClustalwCommandline(cmd=CLUSTALW_BIN('clustalw2'), infile=fasta_file, outfile=align_file, outorder='INPUT', type='PROTEIN')
(stdout, stderr) = clustal_cl()
aln_file = file(clustal_cl.outfile)
alignment = AlignIO.read(aln_file, 'clustal')
print('\tDoing clustalw alignment: %s' % clustal_cl, file=sys.stderr)
if outfmt == 'fasta':
return alignment.format('fasta') # depends on [control=['if'], data=[]]
if outfmt == 'clustal':
return alignment # depends on [control=['if'], data=[]] |
def set_status(self, status):
"""
Save the new status and call all defined callbacks
"""
self.status = status
for callback in self._update_status_callbacks:
callback(self) | def function[set_status, parameter[self, status]]:
constant[
Save the new status and call all defined callbacks
]
name[self].status assign[=] name[status]
for taget[name[callback]] in starred[name[self]._update_status_callbacks] begin[:]
call[name[callback], parameter[name[self]]] | keyword[def] identifier[set_status] ( identifier[self] , identifier[status] ):
literal[string]
identifier[self] . identifier[status] = identifier[status]
keyword[for] identifier[callback] keyword[in] identifier[self] . identifier[_update_status_callbacks] :
identifier[callback] ( identifier[self] ) | def set_status(self, status):
"""
Save the new status and call all defined callbacks
"""
self.status = status
for callback in self._update_status_callbacks:
callback(self) # depends on [control=['for'], data=['callback']] |
def transform(self, context, handler, result):
"""Transform the value returned by the controller endpoint.
This extension transforms returned values if the endpoint has a return type annotation.
"""
handler = handler.__func__ if hasattr(handler, '__func__') else handler
annotation = getattr(handler, '__annotations__', {}).get('return', None)
if annotation:
return (annotation, result)
return result | def function[transform, parameter[self, context, handler, result]]:
constant[Transform the value returned by the controller endpoint.
This extension transforms returned values if the endpoint has a return type annotation.
]
variable[handler] assign[=] <ast.IfExp object at 0x7da2043460e0>
variable[annotation] assign[=] call[call[name[getattr], parameter[name[handler], constant[__annotations__], dictionary[[], []]]].get, parameter[constant[return], constant[None]]]
if name[annotation] begin[:]
return[tuple[[<ast.Name object at 0x7da204345b10>, <ast.Name object at 0x7da204346a10>]]]
return[name[result]] | keyword[def] identifier[transform] ( identifier[self] , identifier[context] , identifier[handler] , identifier[result] ):
literal[string]
identifier[handler] = identifier[handler] . identifier[__func__] keyword[if] identifier[hasattr] ( identifier[handler] , literal[string] ) keyword[else] identifier[handler]
identifier[annotation] = identifier[getattr] ( identifier[handler] , literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[annotation] :
keyword[return] ( identifier[annotation] , identifier[result] )
keyword[return] identifier[result] | def transform(self, context, handler, result):
"""Transform the value returned by the controller endpoint.
This extension transforms returned values if the endpoint has a return type annotation.
"""
handler = handler.__func__ if hasattr(handler, '__func__') else handler
annotation = getattr(handler, '__annotations__', {}).get('return', None)
if annotation:
return (annotation, result) # depends on [control=['if'], data=[]]
return result |
def key_callback(self, window, key, scancode, action, mods):
"""press ESCAPE to quite the application"""
if key == glfw.KEY_ESCAPE and action == glfw.PRESS:
glfw.SetWindowShouldClose(self.window, True) | def function[key_callback, parameter[self, window, key, scancode, action, mods]]:
constant[press ESCAPE to quite the application]
if <ast.BoolOp object at 0x7da20c76cca0> begin[:]
call[name[glfw].SetWindowShouldClose, parameter[name[self].window, constant[True]]] | keyword[def] identifier[key_callback] ( identifier[self] , identifier[window] , identifier[key] , identifier[scancode] , identifier[action] , identifier[mods] ):
literal[string]
keyword[if] identifier[key] == identifier[glfw] . identifier[KEY_ESCAPE] keyword[and] identifier[action] == identifier[glfw] . identifier[PRESS] :
identifier[glfw] . identifier[SetWindowShouldClose] ( identifier[self] . identifier[window] , keyword[True] ) | def key_callback(self, window, key, scancode, action, mods):
"""press ESCAPE to quite the application"""
if key == glfw.KEY_ESCAPE and action == glfw.PRESS:
glfw.SetWindowShouldClose(self.window, True) # depends on [control=['if'], data=[]] |
def set_creation_date(self, p_date=date.today()):
"""
Sets the creation date of a todo. Should be passed a date object.
"""
self.fields['creationDate'] = p_date
# not particularly pretty, but inspired by
# http://bugs.python.org/issue1519638 non-existent matches trigger
# exceptions, hence the lambda
self.src = re.sub(
r'^(x \d{4}-\d{2}-\d{2} |\([A-Z]\) )?(\d{4}-\d{2}-\d{2} )?(.*)$',
lambda m:
u"{}{} {}".format(m.group(1) or '', p_date.isoformat(),
m.group(3)), self.src) | def function[set_creation_date, parameter[self, p_date]]:
constant[
Sets the creation date of a todo. Should be passed a date object.
]
call[name[self].fields][constant[creationDate]] assign[=] name[p_date]
name[self].src assign[=] call[name[re].sub, parameter[constant[^(x \d{4}-\d{2}-\d{2} |\([A-Z]\) )?(\d{4}-\d{2}-\d{2} )?(.*)$], <ast.Lambda object at 0x7da20e9b3880>, name[self].src]] | keyword[def] identifier[set_creation_date] ( identifier[self] , identifier[p_date] = identifier[date] . identifier[today] ()):
literal[string]
identifier[self] . identifier[fields] [ literal[string] ]= identifier[p_date]
identifier[self] . identifier[src] = identifier[re] . identifier[sub] (
literal[string] ,
keyword[lambda] identifier[m] :
literal[string] . identifier[format] ( identifier[m] . identifier[group] ( literal[int] ) keyword[or] literal[string] , identifier[p_date] . identifier[isoformat] (),
identifier[m] . identifier[group] ( literal[int] )), identifier[self] . identifier[src] ) | def set_creation_date(self, p_date=date.today()):
"""
Sets the creation date of a todo. Should be passed a date object.
"""
self.fields['creationDate'] = p_date
# not particularly pretty, but inspired by
# http://bugs.python.org/issue1519638 non-existent matches trigger
# exceptions, hence the lambda
self.src = re.sub('^(x \\d{4}-\\d{2}-\\d{2} |\\([A-Z]\\) )?(\\d{4}-\\d{2}-\\d{2} )?(.*)$', lambda m: u'{}{} {}'.format(m.group(1) or '', p_date.isoformat(), m.group(3)), self.src) |
def count(self, other, r, attrs=None, info={}):
""" Gray & Moore based fast dual tree counting.
r is the edge of bins:
-inf or r[i-1] < count[i] <= r[i]
attrs: None or tuple
if tuple, attrs = (attr_self, attr_other)
Returns: count,
count, weight of attrs is not None
"""
r = numpy.array(r, dtype='f8')
return _core.KDNode.count(self, other, r, attrs, info=info) | def function[count, parameter[self, other, r, attrs, info]]:
constant[ Gray & Moore based fast dual tree counting.
r is the edge of bins:
-inf or r[i-1] < count[i] <= r[i]
attrs: None or tuple
if tuple, attrs = (attr_self, attr_other)
Returns: count,
count, weight of attrs is not None
]
variable[r] assign[=] call[name[numpy].array, parameter[name[r]]]
return[call[name[_core].KDNode.count, parameter[name[self], name[other], name[r], name[attrs]]]] | keyword[def] identifier[count] ( identifier[self] , identifier[other] , identifier[r] , identifier[attrs] = keyword[None] , identifier[info] ={}):
literal[string]
identifier[r] = identifier[numpy] . identifier[array] ( identifier[r] , identifier[dtype] = literal[string] )
keyword[return] identifier[_core] . identifier[KDNode] . identifier[count] ( identifier[self] , identifier[other] , identifier[r] , identifier[attrs] , identifier[info] = identifier[info] ) | def count(self, other, r, attrs=None, info={}):
""" Gray & Moore based fast dual tree counting.
r is the edge of bins:
-inf or r[i-1] < count[i] <= r[i]
attrs: None or tuple
if tuple, attrs = (attr_self, attr_other)
Returns: count,
count, weight of attrs is not None
"""
r = numpy.array(r, dtype='f8')
return _core.KDNode.count(self, other, r, attrs, info=info) |
def get_result(self, client, check):
"""
Returns an event for a given client & result name.
"""
data = self._request('GET', '/results/{}/{}'.format(client, check))
return data.json() | def function[get_result, parameter[self, client, check]]:
constant[
Returns an event for a given client & result name.
]
variable[data] assign[=] call[name[self]._request, parameter[constant[GET], call[constant[/results/{}/{}].format, parameter[name[client], name[check]]]]]
return[call[name[data].json, parameter[]]] | keyword[def] identifier[get_result] ( identifier[self] , identifier[client] , identifier[check] ):
literal[string]
identifier[data] = identifier[self] . identifier[_request] ( literal[string] , literal[string] . identifier[format] ( identifier[client] , identifier[check] ))
keyword[return] identifier[data] . identifier[json] () | def get_result(self, client, check):
"""
Returns an event for a given client & result name.
"""
data = self._request('GET', '/results/{}/{}'.format(client, check))
return data.json() |
def has_data(self):
"""Return whether or not the stash has any data available or not."""
if not hasattr(self, '_has_data'):
try:
next(iter(self.delegate.keys()))
self._has_data = True
except StopIteration:
self._has_data = False
return self._has_data | def function[has_data, parameter[self]]:
constant[Return whether or not the stash has any data available or not.]
if <ast.UnaryOp object at 0x7da1b11c4700> begin[:]
<ast.Try object at 0x7da1b11c4580>
return[name[self]._has_data] | keyword[def] identifier[has_data] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[try] :
identifier[next] ( identifier[iter] ( identifier[self] . identifier[delegate] . identifier[keys] ()))
identifier[self] . identifier[_has_data] = keyword[True]
keyword[except] identifier[StopIteration] :
identifier[self] . identifier[_has_data] = keyword[False]
keyword[return] identifier[self] . identifier[_has_data] | def has_data(self):
"""Return whether or not the stash has any data available or not."""
if not hasattr(self, '_has_data'):
try:
next(iter(self.delegate.keys()))
self._has_data = True # depends on [control=['try'], data=[]]
except StopIteration:
self._has_data = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return self._has_data |
def listwrap(value):
"""
PERFORMS THE FOLLOWING TRANSLATION
None -> []
value -> [value]
[...] -> [...] (unchanged list)
##MOTIVATION##
OFTEN IT IS NICE TO ALLOW FUNCTION PARAMETERS TO BE ASSIGNED A VALUE,
OR A list-OF-VALUES, OR NULL. CHECKING FOR WHICH THE CALLER USED IS
TEDIOUS. INSTEAD WE CAST FROM THOSE THREE CASES TO THE SINGLE CASE
OF A LIST
# BEFORE
def do_it(a):
if a is None:
return
if not isinstance(a, list):
a=[a]
for x in a:
# do something
# AFTER
def do_it(a):
for x in listwrap(a):
# do something
"""
if value == None:
return FlatList()
elif is_list(value):
return wrap(value)
elif isinstance(value, set):
return wrap(list(value))
else:
return wrap([unwrap(value)]) | def function[listwrap, parameter[value]]:
constant[
PERFORMS THE FOLLOWING TRANSLATION
None -> []
value -> [value]
[...] -> [...] (unchanged list)
##MOTIVATION##
OFTEN IT IS NICE TO ALLOW FUNCTION PARAMETERS TO BE ASSIGNED A VALUE,
OR A list-OF-VALUES, OR NULL. CHECKING FOR WHICH THE CALLER USED IS
TEDIOUS. INSTEAD WE CAST FROM THOSE THREE CASES TO THE SINGLE CASE
OF A LIST
# BEFORE
def do_it(a):
if a is None:
return
if not isinstance(a, list):
a=[a]
for x in a:
# do something
# AFTER
def do_it(a):
for x in listwrap(a):
# do something
]
if compare[name[value] equal[==] constant[None]] begin[:]
return[call[name[FlatList], parameter[]]] | keyword[def] identifier[listwrap] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] == keyword[None] :
keyword[return] identifier[FlatList] ()
keyword[elif] identifier[is_list] ( identifier[value] ):
keyword[return] identifier[wrap] ( identifier[value] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[set] ):
keyword[return] identifier[wrap] ( identifier[list] ( identifier[value] ))
keyword[else] :
keyword[return] identifier[wrap] ([ identifier[unwrap] ( identifier[value] )]) | def listwrap(value):
"""
PERFORMS THE FOLLOWING TRANSLATION
None -> []
value -> [value]
[...] -> [...] (unchanged list)
##MOTIVATION##
OFTEN IT IS NICE TO ALLOW FUNCTION PARAMETERS TO BE ASSIGNED A VALUE,
OR A list-OF-VALUES, OR NULL. CHECKING FOR WHICH THE CALLER USED IS
TEDIOUS. INSTEAD WE CAST FROM THOSE THREE CASES TO THE SINGLE CASE
OF A LIST
# BEFORE
def do_it(a):
if a is None:
return
if not isinstance(a, list):
a=[a]
for x in a:
# do something
# AFTER
def do_it(a):
for x in listwrap(a):
# do something
"""
if value == None:
return FlatList() # depends on [control=['if'], data=[]]
elif is_list(value):
return wrap(value) # depends on [control=['if'], data=[]]
elif isinstance(value, set):
return wrap(list(value)) # depends on [control=['if'], data=[]]
else:
return wrap([unwrap(value)]) |
def generate_astrometric_catalog(imglist, **pars):
"""Generates a catalog of all sources from an existing astrometric catalog are in or near the FOVs of the images in
the input list.
Parameters
----------
imglist : list
List of one or more calibrated fits images that will be used for catalog generation.
Returns
=======
ref_table : object
Astropy Table object of the catalog
"""
# generate catalog
temp_pars = pars.copy()
if pars['output'] == True:
pars['output'] = 'ref_cat.ecsv'
else:
pars['output'] = None
out_catalog = amutils.create_astrometric_catalog(imglist,**pars)
pars = temp_pars.copy()
#if the catalog has contents, write the catalog to ascii text file
if len(out_catalog) > 0 and pars['output']:
catalog_filename = "refcatalog.cat"
out_catalog.write(catalog_filename, format="ascii.fast_commented_header")
log.info("Wrote reference catalog {}.".format(catalog_filename))
return(out_catalog) | def function[generate_astrometric_catalog, parameter[imglist]]:
constant[Generates a catalog of all sources from an existing astrometric catalog are in or near the FOVs of the images in
the input list.
Parameters
----------
imglist : list
List of one or more calibrated fits images that will be used for catalog generation.
Returns
=======
ref_table : object
Astropy Table object of the catalog
]
variable[temp_pars] assign[=] call[name[pars].copy, parameter[]]
if compare[call[name[pars]][constant[output]] equal[==] constant[True]] begin[:]
call[name[pars]][constant[output]] assign[=] constant[ref_cat.ecsv]
variable[out_catalog] assign[=] call[name[amutils].create_astrometric_catalog, parameter[name[imglist]]]
variable[pars] assign[=] call[name[temp_pars].copy, parameter[]]
if <ast.BoolOp object at 0x7da1b1bd0340> begin[:]
variable[catalog_filename] assign[=] constant[refcatalog.cat]
call[name[out_catalog].write, parameter[name[catalog_filename]]]
call[name[log].info, parameter[call[constant[Wrote reference catalog {}.].format, parameter[name[catalog_filename]]]]]
return[name[out_catalog]] | keyword[def] identifier[generate_astrometric_catalog] ( identifier[imglist] ,** identifier[pars] ):
literal[string]
identifier[temp_pars] = identifier[pars] . identifier[copy] ()
keyword[if] identifier[pars] [ literal[string] ]== keyword[True] :
identifier[pars] [ literal[string] ]= literal[string]
keyword[else] :
identifier[pars] [ literal[string] ]= keyword[None]
identifier[out_catalog] = identifier[amutils] . identifier[create_astrometric_catalog] ( identifier[imglist] ,** identifier[pars] )
identifier[pars] = identifier[temp_pars] . identifier[copy] ()
keyword[if] identifier[len] ( identifier[out_catalog] )> literal[int] keyword[and] identifier[pars] [ literal[string] ]:
identifier[catalog_filename] = literal[string]
identifier[out_catalog] . identifier[write] ( identifier[catalog_filename] , identifier[format] = literal[string] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[catalog_filename] ))
keyword[return] ( identifier[out_catalog] ) | def generate_astrometric_catalog(imglist, **pars):
"""Generates a catalog of all sources from an existing astrometric catalog are in or near the FOVs of the images in
the input list.
Parameters
----------
imglist : list
List of one or more calibrated fits images that will be used for catalog generation.
Returns
=======
ref_table : object
Astropy Table object of the catalog
"""
# generate catalog
temp_pars = pars.copy()
if pars['output'] == True:
pars['output'] = 'ref_cat.ecsv' # depends on [control=['if'], data=[]]
else:
pars['output'] = None
out_catalog = amutils.create_astrometric_catalog(imglist, **pars)
pars = temp_pars.copy()
#if the catalog has contents, write the catalog to ascii text file
if len(out_catalog) > 0 and pars['output']:
catalog_filename = 'refcatalog.cat'
out_catalog.write(catalog_filename, format='ascii.fast_commented_header')
log.info('Wrote reference catalog {}.'.format(catalog_filename)) # depends on [control=['if'], data=[]]
return out_catalog |
def instantiate(self, **extra_args):
""" Instantiate the model """
input_block = self.input_block.instantiate()
backbone = self.backbone.instantiate(**extra_args)
return QDuelingModel(input_block, backbone, extra_args['action_space']) | def function[instantiate, parameter[self]]:
constant[ Instantiate the model ]
variable[input_block] assign[=] call[name[self].input_block.instantiate, parameter[]]
variable[backbone] assign[=] call[name[self].backbone.instantiate, parameter[]]
return[call[name[QDuelingModel], parameter[name[input_block], name[backbone], call[name[extra_args]][constant[action_space]]]]] | keyword[def] identifier[instantiate] ( identifier[self] ,** identifier[extra_args] ):
literal[string]
identifier[input_block] = identifier[self] . identifier[input_block] . identifier[instantiate] ()
identifier[backbone] = identifier[self] . identifier[backbone] . identifier[instantiate] (** identifier[extra_args] )
keyword[return] identifier[QDuelingModel] ( identifier[input_block] , identifier[backbone] , identifier[extra_args] [ literal[string] ]) | def instantiate(self, **extra_args):
""" Instantiate the model """
input_block = self.input_block.instantiate()
backbone = self.backbone.instantiate(**extra_args)
return QDuelingModel(input_block, backbone, extra_args['action_space']) |
def cyclic_dt_feat_names(time:bool=True, add_linear:bool=False)->List[str]:
"Return feature names of date/time cycles as produced by `cyclic_dt_features`."
fs = ['cos','sin']
attr = [f'{r}_{f}' for r in 'weekday day_month month_year day_year'.split() for f in fs]
if time: attr += [f'{r}_{f}' for r in 'hour clock min sec'.split() for f in fs]
if add_linear: attr.append('year_lin')
return attr | def function[cyclic_dt_feat_names, parameter[time, add_linear]]:
constant[Return feature names of date/time cycles as produced by `cyclic_dt_features`.]
variable[fs] assign[=] list[[<ast.Constant object at 0x7da1b1dd9ba0>, <ast.Constant object at 0x7da1b1ddafb0>]]
variable[attr] assign[=] <ast.ListComp object at 0x7da1b1dda8f0>
if name[time] begin[:]
<ast.AugAssign object at 0x7da1b1dd9d50>
if name[add_linear] begin[:]
call[name[attr].append, parameter[constant[year_lin]]]
return[name[attr]] | keyword[def] identifier[cyclic_dt_feat_names] ( identifier[time] : identifier[bool] = keyword[True] , identifier[add_linear] : identifier[bool] = keyword[False] )-> identifier[List] [ identifier[str] ]:
literal[string]
identifier[fs] =[ literal[string] , literal[string] ]
identifier[attr] =[ literal[string] keyword[for] identifier[r] keyword[in] literal[string] . identifier[split] () keyword[for] identifier[f] keyword[in] identifier[fs] ]
keyword[if] identifier[time] : identifier[attr] +=[ literal[string] keyword[for] identifier[r] keyword[in] literal[string] . identifier[split] () keyword[for] identifier[f] keyword[in] identifier[fs] ]
keyword[if] identifier[add_linear] : identifier[attr] . identifier[append] ( literal[string] )
keyword[return] identifier[attr] | def cyclic_dt_feat_names(time: bool=True, add_linear: bool=False) -> List[str]:
"""Return feature names of date/time cycles as produced by `cyclic_dt_features`."""
fs = ['cos', 'sin']
attr = [f'{r}_{f}' for r in 'weekday day_month month_year day_year'.split() for f in fs]
if time:
attr += [f'{r}_{f}' for r in 'hour clock min sec'.split() for f in fs] # depends on [control=['if'], data=[]]
if add_linear:
attr.append('year_lin') # depends on [control=['if'], data=[]]
return attr |
def allowPatternsForNameChecking(self, patternsFunc, patternsClass):
"""
Allow name exceptions by given patterns.
@param patternsFunc: patterns of special function names
@param patternsClass: patterns of special class names
"""
cfgParser = self.linter.cfgfile_parser
nameChecker = self.getCheckerByName(NameChecker)
if not nameChecker:
return
if patternsFunc:
regexFuncAdd = "|((%s).+)$" % "|".join(patternsFunc)
else:
regexFuncAdd = ""
if patternsClass:
regexClassAdd = "|((%s).+)$" % "|".join(patternsClass)
else:
regexClassAdd = ""
# Modify regex for function, method and class name.
regexMethod = cfgParser.get("BASIC", "method-rgx") + regexFuncAdd
regexFunction = cfgParser.get("BASIC", "function-rgx") + regexFuncAdd
regexClass = cfgParser.get("BASIC", "class-rgx") + regexClassAdd
# Save to config parser.
cfgParser.set("BASIC", "method-rgx", regexMethod)
cfgParser.set("BASIC", "function-rgx", regexFunction)
cfgParser.set("BASIC", "class-rgx", regexClass)
# Save to name checker.
nameChecker.config.method_rgx = re.compile(regexMethod)
nameChecker.config.function_rgx = re.compile(regexFunction)
nameChecker.config.class_rgx = re.compile(regexClass) | def function[allowPatternsForNameChecking, parameter[self, patternsFunc, patternsClass]]:
constant[
Allow name exceptions by given patterns.
@param patternsFunc: patterns of special function names
@param patternsClass: patterns of special class names
]
variable[cfgParser] assign[=] name[self].linter.cfgfile_parser
variable[nameChecker] assign[=] call[name[self].getCheckerByName, parameter[name[NameChecker]]]
if <ast.UnaryOp object at 0x7da1b2544610> begin[:]
return[None]
if name[patternsFunc] begin[:]
variable[regexFuncAdd] assign[=] binary_operation[constant[|((%s).+)$] <ast.Mod object at 0x7da2590d6920> call[constant[|].join, parameter[name[patternsFunc]]]]
if name[patternsClass] begin[:]
variable[regexClassAdd] assign[=] binary_operation[constant[|((%s).+)$] <ast.Mod object at 0x7da2590d6920> call[constant[|].join, parameter[name[patternsClass]]]]
variable[regexMethod] assign[=] binary_operation[call[name[cfgParser].get, parameter[constant[BASIC], constant[method-rgx]]] + name[regexFuncAdd]]
variable[regexFunction] assign[=] binary_operation[call[name[cfgParser].get, parameter[constant[BASIC], constant[function-rgx]]] + name[regexFuncAdd]]
variable[regexClass] assign[=] binary_operation[call[name[cfgParser].get, parameter[constant[BASIC], constant[class-rgx]]] + name[regexClassAdd]]
call[name[cfgParser].set, parameter[constant[BASIC], constant[method-rgx], name[regexMethod]]]
call[name[cfgParser].set, parameter[constant[BASIC], constant[function-rgx], name[regexFunction]]]
call[name[cfgParser].set, parameter[constant[BASIC], constant[class-rgx], name[regexClass]]]
name[nameChecker].config.method_rgx assign[=] call[name[re].compile, parameter[name[regexMethod]]]
name[nameChecker].config.function_rgx assign[=] call[name[re].compile, parameter[name[regexFunction]]]
name[nameChecker].config.class_rgx assign[=] call[name[re].compile, parameter[name[regexClass]]] | keyword[def] identifier[allowPatternsForNameChecking] ( identifier[self] , identifier[patternsFunc] , identifier[patternsClass] ):
literal[string]
identifier[cfgParser] = identifier[self] . identifier[linter] . identifier[cfgfile_parser]
identifier[nameChecker] = identifier[self] . identifier[getCheckerByName] ( identifier[NameChecker] )
keyword[if] keyword[not] identifier[nameChecker] :
keyword[return]
keyword[if] identifier[patternsFunc] :
identifier[regexFuncAdd] = literal[string] % literal[string] . identifier[join] ( identifier[patternsFunc] )
keyword[else] :
identifier[regexFuncAdd] = literal[string]
keyword[if] identifier[patternsClass] :
identifier[regexClassAdd] = literal[string] % literal[string] . identifier[join] ( identifier[patternsClass] )
keyword[else] :
identifier[regexClassAdd] = literal[string]
identifier[regexMethod] = identifier[cfgParser] . identifier[get] ( literal[string] , literal[string] )+ identifier[regexFuncAdd]
identifier[regexFunction] = identifier[cfgParser] . identifier[get] ( literal[string] , literal[string] )+ identifier[regexFuncAdd]
identifier[regexClass] = identifier[cfgParser] . identifier[get] ( literal[string] , literal[string] )+ identifier[regexClassAdd]
identifier[cfgParser] . identifier[set] ( literal[string] , literal[string] , identifier[regexMethod] )
identifier[cfgParser] . identifier[set] ( literal[string] , literal[string] , identifier[regexFunction] )
identifier[cfgParser] . identifier[set] ( literal[string] , literal[string] , identifier[regexClass] )
identifier[nameChecker] . identifier[config] . identifier[method_rgx] = identifier[re] . identifier[compile] ( identifier[regexMethod] )
identifier[nameChecker] . identifier[config] . identifier[function_rgx] = identifier[re] . identifier[compile] ( identifier[regexFunction] )
identifier[nameChecker] . identifier[config] . identifier[class_rgx] = identifier[re] . identifier[compile] ( identifier[regexClass] ) | def allowPatternsForNameChecking(self, patternsFunc, patternsClass):
"""
Allow name exceptions by given patterns.
@param patternsFunc: patterns of special function names
@param patternsClass: patterns of special class names
"""
cfgParser = self.linter.cfgfile_parser
nameChecker = self.getCheckerByName(NameChecker)
if not nameChecker:
return # depends on [control=['if'], data=[]]
if patternsFunc:
regexFuncAdd = '|((%s).+)$' % '|'.join(patternsFunc) # depends on [control=['if'], data=[]]
else:
regexFuncAdd = ''
if patternsClass:
regexClassAdd = '|((%s).+)$' % '|'.join(patternsClass) # depends on [control=['if'], data=[]]
else:
regexClassAdd = ''
# Modify regex for function, method and class name.
regexMethod = cfgParser.get('BASIC', 'method-rgx') + regexFuncAdd
regexFunction = cfgParser.get('BASIC', 'function-rgx') + regexFuncAdd
regexClass = cfgParser.get('BASIC', 'class-rgx') + regexClassAdd
# Save to config parser.
cfgParser.set('BASIC', 'method-rgx', regexMethod)
cfgParser.set('BASIC', 'function-rgx', regexFunction)
cfgParser.set('BASIC', 'class-rgx', regexClass)
# Save to name checker.
nameChecker.config.method_rgx = re.compile(regexMethod)
nameChecker.config.function_rgx = re.compile(regexFunction)
nameChecker.config.class_rgx = re.compile(regexClass) |
def in_interactive_session():
""" check if we're running in an interactive shell
returns True if running under python/ipython interactive shell
"""
from pandas import get_option
def check_main():
try:
import __main__ as main
except ModuleNotFoundError:
return get_option('mode.sim_interactive')
return (not hasattr(main, '__file__') or
get_option('mode.sim_interactive'))
try:
return __IPYTHON__ or check_main() # noqa
except NameError:
return check_main() | def function[in_interactive_session, parameter[]]:
constant[ check if we're running in an interactive shell
returns True if running under python/ipython interactive shell
]
from relative_module[pandas] import module[get_option]
def function[check_main, parameter[]]:
<ast.Try object at 0x7da1b206af80>
return[<ast.BoolOp object at 0x7da1b26ac640>]
<ast.Try object at 0x7da204344e20> | keyword[def] identifier[in_interactive_session] ():
literal[string]
keyword[from] identifier[pandas] keyword[import] identifier[get_option]
keyword[def] identifier[check_main] ():
keyword[try] :
keyword[import] identifier[__main__] keyword[as] identifier[main]
keyword[except] identifier[ModuleNotFoundError] :
keyword[return] identifier[get_option] ( literal[string] )
keyword[return] ( keyword[not] identifier[hasattr] ( identifier[main] , literal[string] ) keyword[or]
identifier[get_option] ( literal[string] ))
keyword[try] :
keyword[return] identifier[__IPYTHON__] keyword[or] identifier[check_main] ()
keyword[except] identifier[NameError] :
keyword[return] identifier[check_main] () | def in_interactive_session():
""" check if we're running in an interactive shell
returns True if running under python/ipython interactive shell
"""
from pandas import get_option
def check_main():
try:
import __main__ as main # depends on [control=['try'], data=[]]
except ModuleNotFoundError:
return get_option('mode.sim_interactive') # depends on [control=['except'], data=[]]
return not hasattr(main, '__file__') or get_option('mode.sim_interactive')
try:
return __IPYTHON__ or check_main() # noqa # depends on [control=['try'], data=[]]
except NameError:
return check_main() # depends on [control=['except'], data=[]] |
def Network_setCacheDisabled(self, cacheDisabled):
"""
Function path: Network.setCacheDisabled
Domain: Network
Method name: setCacheDisabled
Parameters:
Required arguments:
'cacheDisabled' (type: boolean) -> Cache disabled state.
No return value.
Description: Toggles ignoring cache for each request. If <code>true</code>, cache will not be used.
"""
assert isinstance(cacheDisabled, (bool,)
), "Argument 'cacheDisabled' must be of type '['bool']'. Received type: '%s'" % type(
cacheDisabled)
subdom_funcs = self.synchronous_command('Network.setCacheDisabled',
cacheDisabled=cacheDisabled)
return subdom_funcs | def function[Network_setCacheDisabled, parameter[self, cacheDisabled]]:
constant[
Function path: Network.setCacheDisabled
Domain: Network
Method name: setCacheDisabled
Parameters:
Required arguments:
'cacheDisabled' (type: boolean) -> Cache disabled state.
No return value.
Description: Toggles ignoring cache for each request. If <code>true</code>, cache will not be used.
]
assert[call[name[isinstance], parameter[name[cacheDisabled], tuple[[<ast.Name object at 0x7da1b1116b90>]]]]]
variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Network.setCacheDisabled]]]
return[name[subdom_funcs]] | keyword[def] identifier[Network_setCacheDisabled] ( identifier[self] , identifier[cacheDisabled] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[cacheDisabled] ,( identifier[bool] ,)
), literal[string] % identifier[type] (
identifier[cacheDisabled] )
identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] ,
identifier[cacheDisabled] = identifier[cacheDisabled] )
keyword[return] identifier[subdom_funcs] | def Network_setCacheDisabled(self, cacheDisabled):
"""
Function path: Network.setCacheDisabled
Domain: Network
Method name: setCacheDisabled
Parameters:
Required arguments:
'cacheDisabled' (type: boolean) -> Cache disabled state.
No return value.
Description: Toggles ignoring cache for each request. If <code>true</code>, cache will not be used.
"""
assert isinstance(cacheDisabled, (bool,)), "Argument 'cacheDisabled' must be of type '['bool']'. Received type: '%s'" % type(cacheDisabled)
subdom_funcs = self.synchronous_command('Network.setCacheDisabled', cacheDisabled=cacheDisabled)
return subdom_funcs |
def updateMappingsOnDeviceType(self, thingTypeId, logicalInterfaceId, mappingsObject, notificationStrategy = "never"):
"""
Add mappings for a thing type.
Parameters:
- thingTypeId (string) - the thing type
- logicalInterfaceId (string) - the id of the application interface these mappings are for
- notificationStrategy (string) - the notification strategy to use for these mappings
- mappingsObject (Python dictionary corresponding to JSON object) example:
{ # eventid -> { property -> eventid property expression }
"status" : {
"eventCount" : "($state.eventCount == -1) ? $event.d.count : ($state.eventCount+1)",
}
}
Throws APIException on failure.
"""
req = ApiClient.oneThingTypeMappingUrl % (self.host, "/draft", thingTypeId, logicalInterfaceId)
try:
mappings = json.dumps({
"logicalInterfaceId" : logicalInterfaceId,
"notificationStrategy" : notificationStrategy,
"propertyMappings" : mappingsObject
})
except Exception as exc:
raise ibmiotf.APIException(-1, "Exception formatting mappings object to JSON", exc)
resp = requests.put(req, auth=self.credentials, headers={"Content-Type":"application/json"}, data=mappings,
verify=self.verify)
if resp.status_code == 200:
self.logger.debug("Thing type mappings updated for logical interface")
else:
raise ibmiotf.APIException(resp.status_code, "HTTP error updating thing type mappings for logical interface", resp)
return resp.json() | def function[updateMappingsOnDeviceType, parameter[self, thingTypeId, logicalInterfaceId, mappingsObject, notificationStrategy]]:
constant[
Add mappings for a thing type.
Parameters:
- thingTypeId (string) - the thing type
- logicalInterfaceId (string) - the id of the application interface these mappings are for
- notificationStrategy (string) - the notification strategy to use for these mappings
- mappingsObject (Python dictionary corresponding to JSON object) example:
{ # eventid -> { property -> eventid property expression }
"status" : {
"eventCount" : "($state.eventCount == -1) ? $event.d.count : ($state.eventCount+1)",
}
}
Throws APIException on failure.
]
variable[req] assign[=] binary_operation[name[ApiClient].oneThingTypeMappingUrl <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18fe91600>, <ast.Constant object at 0x7da18fe91c30>, <ast.Name object at 0x7da18fe93a30>, <ast.Name object at 0x7da18fe91390>]]]
<ast.Try object at 0x7da18fe91b70>
variable[resp] assign[=] call[name[requests].put, parameter[name[req]]]
if compare[name[resp].status_code equal[==] constant[200]] begin[:]
call[name[self].logger.debug, parameter[constant[Thing type mappings updated for logical interface]]]
return[call[name[resp].json, parameter[]]] | keyword[def] identifier[updateMappingsOnDeviceType] ( identifier[self] , identifier[thingTypeId] , identifier[logicalInterfaceId] , identifier[mappingsObject] , identifier[notificationStrategy] = literal[string] ):
literal[string]
identifier[req] = identifier[ApiClient] . identifier[oneThingTypeMappingUrl] %( identifier[self] . identifier[host] , literal[string] , identifier[thingTypeId] , identifier[logicalInterfaceId] )
keyword[try] :
identifier[mappings] = identifier[json] . identifier[dumps] ({
literal[string] : identifier[logicalInterfaceId] ,
literal[string] : identifier[notificationStrategy] ,
literal[string] : identifier[mappingsObject]
})
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[raise] identifier[ibmiotf] . identifier[APIException] (- literal[int] , literal[string] , identifier[exc] )
identifier[resp] = identifier[requests] . identifier[put] ( identifier[req] , identifier[auth] = identifier[self] . identifier[credentials] , identifier[headers] ={ literal[string] : literal[string] }, identifier[data] = identifier[mappings] ,
identifier[verify] = identifier[self] . identifier[verify] )
keyword[if] identifier[resp] . identifier[status_code] == literal[int] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ibmiotf] . identifier[APIException] ( identifier[resp] . identifier[status_code] , literal[string] , identifier[resp] )
keyword[return] identifier[resp] . identifier[json] () | def updateMappingsOnDeviceType(self, thingTypeId, logicalInterfaceId, mappingsObject, notificationStrategy='never'):
"""
Add mappings for a thing type.
Parameters:
- thingTypeId (string) - the thing type
- logicalInterfaceId (string) - the id of the application interface these mappings are for
- notificationStrategy (string) - the notification strategy to use for these mappings
- mappingsObject (Python dictionary corresponding to JSON object) example:
{ # eventid -> { property -> eventid property expression }
"status" : {
"eventCount" : "($state.eventCount == -1) ? $event.d.count : ($state.eventCount+1)",
}
}
Throws APIException on failure.
"""
req = ApiClient.oneThingTypeMappingUrl % (self.host, '/draft', thingTypeId, logicalInterfaceId)
try:
mappings = json.dumps({'logicalInterfaceId': logicalInterfaceId, 'notificationStrategy': notificationStrategy, 'propertyMappings': mappingsObject}) # depends on [control=['try'], data=[]]
except Exception as exc:
raise ibmiotf.APIException(-1, 'Exception formatting mappings object to JSON', exc) # depends on [control=['except'], data=['exc']]
resp = requests.put(req, auth=self.credentials, headers={'Content-Type': 'application/json'}, data=mappings, verify=self.verify)
if resp.status_code == 200:
self.logger.debug('Thing type mappings updated for logical interface') # depends on [control=['if'], data=[]]
else:
raise ibmiotf.APIException(resp.status_code, 'HTTP error updating thing type mappings for logical interface', resp)
return resp.json() |
def _parse_resolution(res):
""" Helper method for parsing given resolution. It will also try to parse a string into float
:return: A float value of resolution
:rtype: float
"""
if isinstance(res, str):
return float(res.strip('m'))
if isinstance(res, (int, float)):
return float(res)
raise TypeError('Resolution should be a float, got resolution of type {}'.format(type(res))) | def function[_parse_resolution, parameter[res]]:
constant[ Helper method for parsing given resolution. It will also try to parse a string into float
:return: A float value of resolution
:rtype: float
]
if call[name[isinstance], parameter[name[res], name[str]]] begin[:]
return[call[name[float], parameter[call[name[res].strip, parameter[constant[m]]]]]]
if call[name[isinstance], parameter[name[res], tuple[[<ast.Name object at 0x7da20c6a9e40>, <ast.Name object at 0x7da20c6aac20>]]]] begin[:]
return[call[name[float], parameter[name[res]]]]
<ast.Raise object at 0x7da20c6a8dc0> | keyword[def] identifier[_parse_resolution] ( identifier[res] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[res] , identifier[str] ):
keyword[return] identifier[float] ( identifier[res] . identifier[strip] ( literal[string] ))
keyword[if] identifier[isinstance] ( identifier[res] ,( identifier[int] , identifier[float] )):
keyword[return] identifier[float] ( identifier[res] )
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[res] ))) | def _parse_resolution(res):
""" Helper method for parsing given resolution. It will also try to parse a string into float
:return: A float value of resolution
:rtype: float
"""
if isinstance(res, str):
return float(res.strip('m')) # depends on [control=['if'], data=[]]
if isinstance(res, (int, float)):
return float(res) # depends on [control=['if'], data=[]]
raise TypeError('Resolution should be a float, got resolution of type {}'.format(type(res))) |
def add_step(self, step, step_id):
""" Add a step to the list. The first step added becomes the initial
step. """
assert step_id not in self._steps
assert step_id not in self._order
assert isinstance(step, Step)
self._steps[step_id] = step
self._order.append(step_id) | def function[add_step, parameter[self, step, step_id]]:
constant[ Add a step to the list. The first step added becomes the initial
step. ]
assert[compare[name[step_id] <ast.NotIn object at 0x7da2590d7190> name[self]._steps]]
assert[compare[name[step_id] <ast.NotIn object at 0x7da2590d7190> name[self]._order]]
assert[call[name[isinstance], parameter[name[step], name[Step]]]]
call[name[self]._steps][name[step_id]] assign[=] name[step]
call[name[self]._order.append, parameter[name[step_id]]] | keyword[def] identifier[add_step] ( identifier[self] , identifier[step] , identifier[step_id] ):
literal[string]
keyword[assert] identifier[step_id] keyword[not] keyword[in] identifier[self] . identifier[_steps]
keyword[assert] identifier[step_id] keyword[not] keyword[in] identifier[self] . identifier[_order]
keyword[assert] identifier[isinstance] ( identifier[step] , identifier[Step] )
identifier[self] . identifier[_steps] [ identifier[step_id] ]= identifier[step]
identifier[self] . identifier[_order] . identifier[append] ( identifier[step_id] ) | def add_step(self, step, step_id):
""" Add a step to the list. The first step added becomes the initial
step. """
assert step_id not in self._steps
assert step_id not in self._order
assert isinstance(step, Step)
self._steps[step_id] = step
self._order.append(step_id) |
def _readse(self, pos):
"""Return interpretation of next bits as a signed exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
codenum, pos = self._readue(pos)
m = (codenum + 1) // 2
if not codenum % 2:
return -m, pos
else:
return m, pos | def function[_readse, parameter[self, pos]]:
constant[Return interpretation of next bits as a signed exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
]
<ast.Tuple object at 0x7da1b104af80> assign[=] call[name[self]._readue, parameter[name[pos]]]
variable[m] assign[=] binary_operation[binary_operation[name[codenum] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
if <ast.UnaryOp object at 0x7da1b104b850> begin[:]
return[tuple[[<ast.UnaryOp object at 0x7da1b101bb50>, <ast.Name object at 0x7da1b101afb0>]]] | keyword[def] identifier[_readse] ( identifier[self] , identifier[pos] ):
literal[string]
identifier[codenum] , identifier[pos] = identifier[self] . identifier[_readue] ( identifier[pos] )
identifier[m] =( identifier[codenum] + literal[int] )// literal[int]
keyword[if] keyword[not] identifier[codenum] % literal[int] :
keyword[return] - identifier[m] , identifier[pos]
keyword[else] :
keyword[return] identifier[m] , identifier[pos] | def _readse(self, pos):
"""Return interpretation of next bits as a signed exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
(codenum, pos) = self._readue(pos)
m = (codenum + 1) // 2
if not codenum % 2:
return (-m, pos) # depends on [control=['if'], data=[]]
else:
return (m, pos) |
def exists(cls, excludes_, **filters):
""" Return `True` if objects matching the provided filters and excludes
exist if not return false.
Calls the `filter` method by default, but can be overridden for better and
quicker implementations that may be supported by a database.
:param excludes_: entities without this combination of field name and
values will be returned
"""
results = cls.query.filter(**filters).exclude(**excludes_)
return bool(results) | def function[exists, parameter[cls, excludes_]]:
constant[ Return `True` if objects matching the provided filters and excludes
exist if not return false.
Calls the `filter` method by default, but can be overridden for better and
quicker implementations that may be supported by a database.
:param excludes_: entities without this combination of field name and
values will be returned
]
variable[results] assign[=] call[call[name[cls].query.filter, parameter[]].exclude, parameter[]]
return[call[name[bool], parameter[name[results]]]] | keyword[def] identifier[exists] ( identifier[cls] , identifier[excludes_] ,** identifier[filters] ):
literal[string]
identifier[results] = identifier[cls] . identifier[query] . identifier[filter] (** identifier[filters] ). identifier[exclude] (** identifier[excludes_] )
keyword[return] identifier[bool] ( identifier[results] ) | def exists(cls, excludes_, **filters):
""" Return `True` if objects matching the provided filters and excludes
exist if not return false.
Calls the `filter` method by default, but can be overridden for better and
quicker implementations that may be supported by a database.
:param excludes_: entities without this combination of field name and
values will be returned
"""
results = cls.query.filter(**filters).exclude(**excludes_)
return bool(results) |
def to_dict(self):
"""Returns a dict with the representation of this task configuration object."""
properties = find_class_properties(self.__class__)
config = {
name: self.__getattribute__(name) for name, _ in properties
}
return config | def function[to_dict, parameter[self]]:
constant[Returns a dict with the representation of this task configuration object.]
variable[properties] assign[=] call[name[find_class_properties], parameter[name[self].__class__]]
variable[config] assign[=] <ast.DictComp object at 0x7da20c6a9000>
return[name[config]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[properties] = identifier[find_class_properties] ( identifier[self] . identifier[__class__] )
identifier[config] ={
identifier[name] : identifier[self] . identifier[__getattribute__] ( identifier[name] ) keyword[for] identifier[name] , identifier[_] keyword[in] identifier[properties]
}
keyword[return] identifier[config] | def to_dict(self):
"""Returns a dict with the representation of this task configuration object."""
properties = find_class_properties(self.__class__)
config = {name: self.__getattribute__(name) for (name, _) in properties}
return config |
def get_selinux_status():
"""
get SELinux status of host
:return: string, one of Enforced, Permissive, Disabled
"""
getenforce_command_exists()
# alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is
# empty (why?) and enforce doesn't tell whether SELinux is disabled or not
o = run_cmd(["getenforce"], return_output=True).strip() # libselinux-utils
logger.debug("SELinux is %r", o)
return o | def function[get_selinux_status, parameter[]]:
constant[
get SELinux status of host
:return: string, one of Enforced, Permissive, Disabled
]
call[name[getenforce_command_exists], parameter[]]
variable[o] assign[=] call[call[name[run_cmd], parameter[list[[<ast.Constant object at 0x7da1b11b9150>]]]].strip, parameter[]]
call[name[logger].debug, parameter[constant[SELinux is %r], name[o]]]
return[name[o]] | keyword[def] identifier[get_selinux_status] ():
literal[string]
identifier[getenforce_command_exists] ()
identifier[o] = identifier[run_cmd] ([ literal[string] ], identifier[return_output] = keyword[True] ). identifier[strip] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[o] )
keyword[return] identifier[o] | def get_selinux_status():
"""
get SELinux status of host
:return: string, one of Enforced, Permissive, Disabled
"""
getenforce_command_exists()
# alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is
# empty (why?) and enforce doesn't tell whether SELinux is disabled or not
o = run_cmd(['getenforce'], return_output=True).strip() # libselinux-utils
logger.debug('SELinux is %r', o)
return o |
def _analytic_host_file_directory(self):
"""
Return the analytic directory to write depending of the matched
status.
"""
# We construct the path to the analytic directory.
output_dir = (
self.output_parent_dir
+ PyFunceble.OUTPUTS["analytic"]["directories"]["parent"]
)
if self.domain_status.lower() in PyFunceble.STATUS["list"]["potentially_up"]:
# The status is in the list of analytic up status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS["analytic"]["directories"][
"potentially_up"
]
elif (
self.domain_status.lower() in PyFunceble.STATUS["list"]["potentially_down"]
):
# The status is in the list of analytic down status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS["analytic"]["directories"][
"potentially_down"
]
elif self.domain_status.lower() in PyFunceble.STATUS["list"]["suspicious"]:
# The status is in the list of analytic suspicious status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS["analytic"]["directories"]["suspicious"]
else:
# The status is not in the list of analytic down or up status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS["analytic"]["directories"]["up"]
return output_dir | def function[_analytic_host_file_directory, parameter[self]]:
constant[
Return the analytic directory to write depending of the matched
status.
]
variable[output_dir] assign[=] binary_operation[name[self].output_parent_dir + call[call[call[name[PyFunceble].OUTPUTS][constant[analytic]]][constant[directories]]][constant[parent]]]
if compare[call[name[self].domain_status.lower, parameter[]] in call[call[name[PyFunceble].STATUS][constant[list]]][constant[potentially_up]]] begin[:]
<ast.AugAssign object at 0x7da1b23447c0>
return[name[output_dir]] | keyword[def] identifier[_analytic_host_file_directory] ( identifier[self] ):
literal[string]
identifier[output_dir] =(
identifier[self] . identifier[output_parent_dir]
+ identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ][ literal[string] ][ literal[string] ]
)
keyword[if] identifier[self] . identifier[domain_status] . identifier[lower] () keyword[in] identifier[PyFunceble] . identifier[STATUS] [ literal[string] ][ literal[string] ]:
identifier[output_dir] += identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ][ literal[string] ][
literal[string]
]
keyword[elif] (
identifier[self] . identifier[domain_status] . identifier[lower] () keyword[in] identifier[PyFunceble] . identifier[STATUS] [ literal[string] ][ literal[string] ]
):
identifier[output_dir] += identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ][ literal[string] ][
literal[string]
]
keyword[elif] identifier[self] . identifier[domain_status] . identifier[lower] () keyword[in] identifier[PyFunceble] . identifier[STATUS] [ literal[string] ][ literal[string] ]:
identifier[output_dir] += identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[else] :
identifier[output_dir] += identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[return] identifier[output_dir] | def _analytic_host_file_directory(self):
"""
Return the analytic directory to write depending of the matched
status.
"""
# We construct the path to the analytic directory.
output_dir = self.output_parent_dir + PyFunceble.OUTPUTS['analytic']['directories']['parent']
if self.domain_status.lower() in PyFunceble.STATUS['list']['potentially_up']:
# The status is in the list of analytic up status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS['analytic']['directories']['potentially_up'] # depends on [control=['if'], data=[]]
elif self.domain_status.lower() in PyFunceble.STATUS['list']['potentially_down']:
# The status is in the list of analytic down status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS['analytic']['directories']['potentially_down'] # depends on [control=['if'], data=[]]
elif self.domain_status.lower() in PyFunceble.STATUS['list']['suspicious']:
# The status is in the list of analytic suspicious status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS['analytic']['directories']['suspicious'] # depends on [control=['if'], data=[]]
else:
# The status is not in the list of analytic down or up status.
# We complete the output directory.
output_dir += PyFunceble.OUTPUTS['analytic']['directories']['up']
return output_dir |
def equalizer(self, frequency, width_q, gain_db):
'''Apply a two-pole peaking equalisation (EQ) filter to boost or
reduce around a given frequency.
This effect can be applied multiple times to produce complex EQ curves.
Parameters
----------
frequency : float
The filter's central frequency in Hz.
width_q : float
The filter's width as a Q-factor.
gain_db : float
The filter's gain in dB.
See Also
--------
bass, treble
'''
if not is_number(frequency) or frequency <= 0:
raise ValueError("frequency must be a positive number.")
if not is_number(width_q) or width_q <= 0:
raise ValueError("width_q must be a positive number.")
if not is_number(gain_db):
raise ValueError("gain_db must be a number.")
effect_args = [
'equalizer',
'{:f}'.format(frequency),
'{:f}q'.format(width_q),
'{:f}'.format(gain_db)
]
self.effects.extend(effect_args)
self.effects_log.append('equalizer')
return self | def function[equalizer, parameter[self, frequency, width_q, gain_db]]:
constant[Apply a two-pole peaking equalisation (EQ) filter to boost or
reduce around a given frequency.
This effect can be applied multiple times to produce complex EQ curves.
Parameters
----------
frequency : float
The filter's central frequency in Hz.
width_q : float
The filter's width as a Q-factor.
gain_db : float
The filter's gain in dB.
See Also
--------
bass, treble
]
if <ast.BoolOp object at 0x7da18c4cdc60> begin[:]
<ast.Raise object at 0x7da18c4cfaf0>
if <ast.BoolOp object at 0x7da18c4cf970> begin[:]
<ast.Raise object at 0x7da18f09ee00>
if <ast.UnaryOp object at 0x7da18f09dd50> begin[:]
<ast.Raise object at 0x7da18f09f700>
variable[effect_args] assign[=] list[[<ast.Constant object at 0x7da18f09e1d0>, <ast.Call object at 0x7da18f09e530>, <ast.Call object at 0x7da18f09cf70>, <ast.Call object at 0x7da18f09f6d0>]]
call[name[self].effects.extend, parameter[name[effect_args]]]
call[name[self].effects_log.append, parameter[constant[equalizer]]]
return[name[self]] | keyword[def] identifier[equalizer] ( identifier[self] , identifier[frequency] , identifier[width_q] , identifier[gain_db] ):
literal[string]
keyword[if] keyword[not] identifier[is_number] ( identifier[frequency] ) keyword[or] identifier[frequency] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[is_number] ( identifier[width_q] ) keyword[or] identifier[width_q] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[is_number] ( identifier[gain_db] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[effect_args] =[
literal[string] ,
literal[string] . identifier[format] ( identifier[frequency] ),
literal[string] . identifier[format] ( identifier[width_q] ),
literal[string] . identifier[format] ( identifier[gain_db] )
]
identifier[self] . identifier[effects] . identifier[extend] ( identifier[effect_args] )
identifier[self] . identifier[effects_log] . identifier[append] ( literal[string] )
keyword[return] identifier[self] | def equalizer(self, frequency, width_q, gain_db):
"""Apply a two-pole peaking equalisation (EQ) filter to boost or
reduce around a given frequency.
This effect can be applied multiple times to produce complex EQ curves.
Parameters
----------
frequency : float
The filter's central frequency in Hz.
width_q : float
The filter's width as a Q-factor.
gain_db : float
The filter's gain in dB.
See Also
--------
bass, treble
"""
if not is_number(frequency) or frequency <= 0:
raise ValueError('frequency must be a positive number.') # depends on [control=['if'], data=[]]
if not is_number(width_q) or width_q <= 0:
raise ValueError('width_q must be a positive number.') # depends on [control=['if'], data=[]]
if not is_number(gain_db):
raise ValueError('gain_db must be a number.') # depends on [control=['if'], data=[]]
effect_args = ['equalizer', '{:f}'.format(frequency), '{:f}q'.format(width_q), '{:f}'.format(gain_db)]
self.effects.extend(effect_args)
self.effects_log.append('equalizer')
return self |
def _on_remove_library(self, *event):
"""Callback method handling the removal of an existing library
"""
self.view['library_tree_view'].grab_focus()
if react_to_event(self.view, self.view['library_tree_view'], event):
path = self.view["library_tree_view"].get_cursor()[0]
if path is not None:
library_name = self.library_list_store[int(path[0])][0]
library_config = self.core_config_model.get_current_config_value("LIBRARY_PATHS", use_preliminary=True,
default={})
del library_config[library_name]
self.core_config_model.set_preliminary_config_value("LIBRARY_PATHS", library_config)
if len(self.library_list_store) > 0:
self.view['library_tree_view'].set_cursor(min(path[0], len(self.library_list_store) - 1))
return True | def function[_on_remove_library, parameter[self]]:
constant[Callback method handling the removal of an existing library
]
call[call[name[self].view][constant[library_tree_view]].grab_focus, parameter[]]
if call[name[react_to_event], parameter[name[self].view, call[name[self].view][constant[library_tree_view]], name[event]]] begin[:]
variable[path] assign[=] call[call[call[name[self].view][constant[library_tree_view]].get_cursor, parameter[]]][constant[0]]
if compare[name[path] is_not constant[None]] begin[:]
variable[library_name] assign[=] call[call[name[self].library_list_store][call[name[int], parameter[call[name[path]][constant[0]]]]]][constant[0]]
variable[library_config] assign[=] call[name[self].core_config_model.get_current_config_value, parameter[constant[LIBRARY_PATHS]]]
<ast.Delete object at 0x7da20c6ab880>
call[name[self].core_config_model.set_preliminary_config_value, parameter[constant[LIBRARY_PATHS], name[library_config]]]
if compare[call[name[len], parameter[name[self].library_list_store]] greater[>] constant[0]] begin[:]
call[call[name[self].view][constant[library_tree_view]].set_cursor, parameter[call[name[min], parameter[call[name[path]][constant[0]], binary_operation[call[name[len], parameter[name[self].library_list_store]] - constant[1]]]]]]
return[constant[True]] | keyword[def] identifier[_on_remove_library] ( identifier[self] ,* identifier[event] ):
literal[string]
identifier[self] . identifier[view] [ literal[string] ]. identifier[grab_focus] ()
keyword[if] identifier[react_to_event] ( identifier[self] . identifier[view] , identifier[self] . identifier[view] [ literal[string] ], identifier[event] ):
identifier[path] = identifier[self] . identifier[view] [ literal[string] ]. identifier[get_cursor] ()[ literal[int] ]
keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] :
identifier[library_name] = identifier[self] . identifier[library_list_store] [ identifier[int] ( identifier[path] [ literal[int] ])][ literal[int] ]
identifier[library_config] = identifier[self] . identifier[core_config_model] . identifier[get_current_config_value] ( literal[string] , identifier[use_preliminary] = keyword[True] ,
identifier[default] ={})
keyword[del] identifier[library_config] [ identifier[library_name] ]
identifier[self] . identifier[core_config_model] . identifier[set_preliminary_config_value] ( literal[string] , identifier[library_config] )
keyword[if] identifier[len] ( identifier[self] . identifier[library_list_store] )> literal[int] :
identifier[self] . identifier[view] [ literal[string] ]. identifier[set_cursor] ( identifier[min] ( identifier[path] [ literal[int] ], identifier[len] ( identifier[self] . identifier[library_list_store] )- literal[int] ))
keyword[return] keyword[True] | def _on_remove_library(self, *event):
"""Callback method handling the removal of an existing library
"""
self.view['library_tree_view'].grab_focus()
if react_to_event(self.view, self.view['library_tree_view'], event):
path = self.view['library_tree_view'].get_cursor()[0]
if path is not None:
library_name = self.library_list_store[int(path[0])][0]
library_config = self.core_config_model.get_current_config_value('LIBRARY_PATHS', use_preliminary=True, default={})
del library_config[library_name]
self.core_config_model.set_preliminary_config_value('LIBRARY_PATHS', library_config)
if len(self.library_list_store) > 0:
self.view['library_tree_view'].set_cursor(min(path[0], len(self.library_list_store) - 1)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['path']]
return True # depends on [control=['if'], data=[]] |
def exportCertificate(self, certName, outFolder=None):
"""
This operation downloads an SSL certificate. The file returned by
the server is an X.509 certificate. The downloaded certificate can
be imported into a client that is making HTTP requests.
Inputs:
certName - name of the cert to export
outFolder - folder on disk to save the certificate.
"""
params = {"f" : "json"}
url = self._url + "/sslCertificates/{cert}/export".format(
cert=certName)
if outFolder is None:
outFolder = tempfile.gettempdir()
return self._post(url=url, param_dict=params,
out_folder=outFolder,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url) | def function[exportCertificate, parameter[self, certName, outFolder]]:
constant[
This operation downloads an SSL certificate. The file returned by
the server is an X.509 certificate. The downloaded certificate can
be imported into a client that is making HTTP requests.
Inputs:
certName - name of the cert to export
outFolder - folder on disk to save the certificate.
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2041d9e70>], [<ast.Constant object at 0x7da2041dbe50>]]
variable[url] assign[=] binary_operation[name[self]._url + call[constant[/sslCertificates/{cert}/export].format, parameter[]]]
if compare[name[outFolder] is constant[None]] begin[:]
variable[outFolder] assign[=] call[name[tempfile].gettempdir, parameter[]]
return[call[name[self]._post, parameter[]]] | keyword[def] identifier[exportCertificate] ( identifier[self] , identifier[certName] , identifier[outFolder] = keyword[None] ):
literal[string]
identifier[params] ={ literal[string] : literal[string] }
identifier[url] = identifier[self] . identifier[_url] + literal[string] . identifier[format] (
identifier[cert] = identifier[certName] )
keyword[if] identifier[outFolder] keyword[is] keyword[None] :
identifier[outFolder] = identifier[tempfile] . identifier[gettempdir] ()
keyword[return] identifier[self] . identifier[_post] ( identifier[url] = identifier[url] , identifier[param_dict] = identifier[params] ,
identifier[out_folder] = identifier[outFolder] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] ) | def exportCertificate(self, certName, outFolder=None):
"""
This operation downloads an SSL certificate. The file returned by
the server is an X.509 certificate. The downloaded certificate can
be imported into a client that is making HTTP requests.
Inputs:
certName - name of the cert to export
outFolder - folder on disk to save the certificate.
"""
params = {'f': 'json'}
url = self._url + '/sslCertificates/{cert}/export'.format(cert=certName)
if outFolder is None:
outFolder = tempfile.gettempdir() # depends on [control=['if'], data=['outFolder']]
return self._post(url=url, param_dict=params, out_folder=outFolder, proxy_port=self._proxy_port, proxy_url=self._proxy_url) |
def Vector(self, off):
"""Vector retrieves the start of data of the vector whose offset is
stored at "off" in this object."""
N.enforce_number(off, N.UOffsetTFlags)
off += self.Pos
x = off + self.Get(N.UOffsetTFlags, off)
# data starts after metadata containing the vector length
x += N.UOffsetTFlags.bytewidth
return x | def function[Vector, parameter[self, off]]:
constant[Vector retrieves the start of data of the vector whose offset is
stored at "off" in this object.]
call[name[N].enforce_number, parameter[name[off], name[N].UOffsetTFlags]]
<ast.AugAssign object at 0x7da18fe90b20>
variable[x] assign[=] binary_operation[name[off] + call[name[self].Get, parameter[name[N].UOffsetTFlags, name[off]]]]
<ast.AugAssign object at 0x7da18bc73d90>
return[name[x]] | keyword[def] identifier[Vector] ( identifier[self] , identifier[off] ):
literal[string]
identifier[N] . identifier[enforce_number] ( identifier[off] , identifier[N] . identifier[UOffsetTFlags] )
identifier[off] += identifier[self] . identifier[Pos]
identifier[x] = identifier[off] + identifier[self] . identifier[Get] ( identifier[N] . identifier[UOffsetTFlags] , identifier[off] )
identifier[x] += identifier[N] . identifier[UOffsetTFlags] . identifier[bytewidth]
keyword[return] identifier[x] | def Vector(self, off):
"""Vector retrieves the start of data of the vector whose offset is
stored at "off" in this object."""
N.enforce_number(off, N.UOffsetTFlags)
off += self.Pos
x = off + self.Get(N.UOffsetTFlags, off)
# data starts after metadata containing the vector length
x += N.UOffsetTFlags.bytewidth
return x |
def _load(self):
"""Load the session from the store, by the id from cookie"""
self.session_id = self._session_object.get_session_id()
# protection against session_id tampering
if self.session_id and not self._valid_session_id(self.session_id):
self.session_id = None
if self.session_id:
d = self.store[self.session_id]
if isinstance(d, dict) and d:
self.update(d)
if not self.session_id:
self.session_id = self._session_object.generate_session_id()
if not self._data:
if self._initializer and isinstance(self._initializer, dict):
self.update(deepcopy(self._initializer))
self._session_object.set_session_id(self.session_id) | def function[_load, parameter[self]]:
constant[Load the session from the store, by the id from cookie]
name[self].session_id assign[=] call[name[self]._session_object.get_session_id, parameter[]]
if <ast.BoolOp object at 0x7da1b05d9600> begin[:]
name[self].session_id assign[=] constant[None]
if name[self].session_id begin[:]
variable[d] assign[=] call[name[self].store][name[self].session_id]
if <ast.BoolOp object at 0x7da1b05d9c30> begin[:]
call[name[self].update, parameter[name[d]]]
if <ast.UnaryOp object at 0x7da1b05d9390> begin[:]
name[self].session_id assign[=] call[name[self]._session_object.generate_session_id, parameter[]]
if <ast.UnaryOp object at 0x7da1b05d8370> begin[:]
if <ast.BoolOp object at 0x7da1b05db1f0> begin[:]
call[name[self].update, parameter[call[name[deepcopy], parameter[name[self]._initializer]]]]
call[name[self]._session_object.set_session_id, parameter[name[self].session_id]] | keyword[def] identifier[_load] ( identifier[self] ):
literal[string]
identifier[self] . identifier[session_id] = identifier[self] . identifier[_session_object] . identifier[get_session_id] ()
keyword[if] identifier[self] . identifier[session_id] keyword[and] keyword[not] identifier[self] . identifier[_valid_session_id] ( identifier[self] . identifier[session_id] ):
identifier[self] . identifier[session_id] = keyword[None]
keyword[if] identifier[self] . identifier[session_id] :
identifier[d] = identifier[self] . identifier[store] [ identifier[self] . identifier[session_id] ]
keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] ) keyword[and] identifier[d] :
identifier[self] . identifier[update] ( identifier[d] )
keyword[if] keyword[not] identifier[self] . identifier[session_id] :
identifier[self] . identifier[session_id] = identifier[self] . identifier[_session_object] . identifier[generate_session_id] ()
keyword[if] keyword[not] identifier[self] . identifier[_data] :
keyword[if] identifier[self] . identifier[_initializer] keyword[and] identifier[isinstance] ( identifier[self] . identifier[_initializer] , identifier[dict] ):
identifier[self] . identifier[update] ( identifier[deepcopy] ( identifier[self] . identifier[_initializer] ))
identifier[self] . identifier[_session_object] . identifier[set_session_id] ( identifier[self] . identifier[session_id] ) | def _load(self):
"""Load the session from the store, by the id from cookie"""
self.session_id = self._session_object.get_session_id()
# protection against session_id tampering
if self.session_id and (not self._valid_session_id(self.session_id)):
self.session_id = None # depends on [control=['if'], data=[]]
if self.session_id:
d = self.store[self.session_id]
if isinstance(d, dict) and d:
self.update(d) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not self.session_id:
self.session_id = self._session_object.generate_session_id() # depends on [control=['if'], data=[]]
if not self._data:
if self._initializer and isinstance(self._initializer, dict):
self.update(deepcopy(self._initializer)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self._session_object.set_session_id(self.session_id) |
def _scaled_int(s):
r"""Convert a 3 byte string to a signed integer value."""
s = bytearray(s) # For Python 2
# Get leftmost bit (sign) as 1 (if 0) or -1 (if 1)
sign = 1 - ((s[0] & 0x80) >> 6)
# Combine remaining bits
int_val = (((s[0] & 0x7f) << 16) | (s[1] << 8) | s[2])
log.debug('Source: %s Int: %x Sign: %d', ' '.join(hex(c) for c in s), int_val, sign)
# Return scaled and with proper sign
return (sign * int_val) / 10000. | def function[_scaled_int, parameter[s]]:
constant[Convert a 3 byte string to a signed integer value.]
variable[s] assign[=] call[name[bytearray], parameter[name[s]]]
variable[sign] assign[=] binary_operation[constant[1] - binary_operation[binary_operation[call[name[s]][constant[0]] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] <ast.RShift object at 0x7da2590d6a40> constant[6]]]
variable[int_val] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[call[name[s]][constant[0]] <ast.BitAnd object at 0x7da2590d6b60> constant[127]] <ast.LShift object at 0x7da2590d69e0> constant[16]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[call[name[s]][constant[1]] <ast.LShift object at 0x7da2590d69e0> constant[8]]] <ast.BitOr object at 0x7da2590d6aa0> call[name[s]][constant[2]]]
call[name[log].debug, parameter[constant[Source: %s Int: %x Sign: %d], call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b22af940>]], name[int_val], name[sign]]]
return[binary_operation[binary_operation[name[sign] * name[int_val]] / constant[10000.0]]] | keyword[def] identifier[_scaled_int] ( identifier[s] ):
literal[string]
identifier[s] = identifier[bytearray] ( identifier[s] )
identifier[sign] = literal[int] -(( identifier[s] [ literal[int] ]& literal[int] )>> literal[int] )
identifier[int_val] =((( identifier[s] [ literal[int] ]& literal[int] )<< literal[int] )|( identifier[s] [ literal[int] ]<< literal[int] )| identifier[s] [ literal[int] ])
identifier[log] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[hex] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[s] ), identifier[int_val] , identifier[sign] )
keyword[return] ( identifier[sign] * identifier[int_val] )/ literal[int] | def _scaled_int(s):
"""Convert a 3 byte string to a signed integer value."""
s = bytearray(s) # For Python 2
# Get leftmost bit (sign) as 1 (if 0) or -1 (if 1)
sign = 1 - ((s[0] & 128) >> 6)
# Combine remaining bits
int_val = (s[0] & 127) << 16 | s[1] << 8 | s[2]
log.debug('Source: %s Int: %x Sign: %d', ' '.join((hex(c) for c in s)), int_val, sign)
# Return scaled and with proper sign
return sign * int_val / 10000.0 |
def get_top(self):
'''
Returns the high data derived from the top file
'''
tops, errors = self.get_tops()
try:
merged_tops = self.merge_tops(tops)
except TypeError as err:
merged_tops = OrderedDict()
errors.append('Error encountered while rendering pillar top file.')
return merged_tops, errors | def function[get_top, parameter[self]]:
constant[
Returns the high data derived from the top file
]
<ast.Tuple object at 0x7da1b21f1630> assign[=] call[name[self].get_tops, parameter[]]
<ast.Try object at 0x7da18dc9aa10>
return[tuple[[<ast.Name object at 0x7da204623eb0>, <ast.Name object at 0x7da204623df0>]]] | keyword[def] identifier[get_top] ( identifier[self] ):
literal[string]
identifier[tops] , identifier[errors] = identifier[self] . identifier[get_tops] ()
keyword[try] :
identifier[merged_tops] = identifier[self] . identifier[merge_tops] ( identifier[tops] )
keyword[except] identifier[TypeError] keyword[as] identifier[err] :
identifier[merged_tops] = identifier[OrderedDict] ()
identifier[errors] . identifier[append] ( literal[string] )
keyword[return] identifier[merged_tops] , identifier[errors] | def get_top(self):
"""
Returns the high data derived from the top file
"""
(tops, errors) = self.get_tops()
try:
merged_tops = self.merge_tops(tops) # depends on [control=['try'], data=[]]
except TypeError as err:
merged_tops = OrderedDict()
errors.append('Error encountered while rendering pillar top file.') # depends on [control=['except'], data=[]]
return (merged_tops, errors) |
def capitalize_keys(suspect):
"""
:param suspect:
:return new dict with capitalized keys:
"""
if not isinstance(suspect, dict):
raise TypeError('you must pass a dict.')
converted = {}
for key in list(suspect):
if not isinstance(key, six.string_types):
continue
if key[0].isupper():
continue
if '_' in key:
new_key = ''.join([chunk.capitalize() for index, chunk in enumerate(key.split('_'))])
else:
new_key = key.capitalize()
value = suspect.get(key)
if type(value) is dict:
converted[new_key] = capitalize_keys(value)
elif type(value) is list:
converted[new_key] = [capitalize_keys(x) if isinstance(x, dict) else x for x in value]
else:
converted[new_key] = value
return converted | def function[capitalize_keys, parameter[suspect]]:
constant[
:param suspect:
:return new dict with capitalized keys:
]
if <ast.UnaryOp object at 0x7da18f810190> begin[:]
<ast.Raise object at 0x7da18f8100d0>
variable[converted] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[list], parameter[name[suspect]]]] begin[:]
if <ast.UnaryOp object at 0x7da18f811810> begin[:]
continue
if call[call[name[key]][constant[0]].isupper, parameter[]] begin[:]
continue
if compare[constant[_] in name[key]] begin[:]
variable[new_key] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da18f812140>]]
variable[value] assign[=] call[name[suspect].get, parameter[name[key]]]
if compare[call[name[type], parameter[name[value]]] is name[dict]] begin[:]
call[name[converted]][name[new_key]] assign[=] call[name[capitalize_keys], parameter[name[value]]]
return[name[converted]] | keyword[def] identifier[capitalize_keys] ( identifier[suspect] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[suspect] , identifier[dict] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[converted] ={}
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[suspect] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[six] . identifier[string_types] ):
keyword[continue]
keyword[if] identifier[key] [ literal[int] ]. identifier[isupper] ():
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[key] :
identifier[new_key] = literal[string] . identifier[join] ([ identifier[chunk] . identifier[capitalize] () keyword[for] identifier[index] , identifier[chunk] keyword[in] identifier[enumerate] ( identifier[key] . identifier[split] ( literal[string] ))])
keyword[else] :
identifier[new_key] = identifier[key] . identifier[capitalize] ()
identifier[value] = identifier[suspect] . identifier[get] ( identifier[key] )
keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[dict] :
identifier[converted] [ identifier[new_key] ]= identifier[capitalize_keys] ( identifier[value] )
keyword[elif] identifier[type] ( identifier[value] ) keyword[is] identifier[list] :
identifier[converted] [ identifier[new_key] ]=[ identifier[capitalize_keys] ( identifier[x] ) keyword[if] identifier[isinstance] ( identifier[x] , identifier[dict] ) keyword[else] identifier[x] keyword[for] identifier[x] keyword[in] identifier[value] ]
keyword[else] :
identifier[converted] [ identifier[new_key] ]= identifier[value]
keyword[return] identifier[converted] | def capitalize_keys(suspect):
"""
:param suspect:
:return new dict with capitalized keys:
"""
if not isinstance(suspect, dict):
raise TypeError('you must pass a dict.') # depends on [control=['if'], data=[]]
converted = {}
for key in list(suspect):
if not isinstance(key, six.string_types):
continue # depends on [control=['if'], data=[]]
if key[0].isupper():
continue # depends on [control=['if'], data=[]]
if '_' in key:
new_key = ''.join([chunk.capitalize() for (index, chunk) in enumerate(key.split('_'))]) # depends on [control=['if'], data=['key']]
else:
new_key = key.capitalize()
value = suspect.get(key)
if type(value) is dict:
converted[new_key] = capitalize_keys(value) # depends on [control=['if'], data=[]]
elif type(value) is list:
converted[new_key] = [capitalize_keys(x) if isinstance(x, dict) else x for x in value] # depends on [control=['if'], data=[]]
else:
converted[new_key] = value # depends on [control=['for'], data=['key']]
return converted |
def parse(date, dayfirst=True):
'''Parse a `date` into a `FlexiDate`.
@param date: the date to parse - may be a string, datetime.date,
datetime.datetime or FlexiDate.
TODO: support for quarters e.g. Q4 1980 or 1954 Q3
TODO: support latin stuff like M.DCC.LIII
TODO: convert '-' to '?' when used that way
e.g. had this date [181-]
'''
if not date:
return None
if isinstance(date, FlexiDate):
return date
if isinstance(date, int):
return FlexiDate(year=date)
elif isinstance(date, datetime.datetime):
parser = PythonDateTimeParser()
return parser.parse(date)
elif isinstance(date, datetime.date):
parser = PythonDateParser()
return parser.parse(date)
else: # assuming its a string
parser = DateutilDateParser()
out = parser.parse(date, **{'dayfirst': dayfirst})
if out is not None:
return out
# msg = 'Unable to parse %s' % date
# raise ValueError(date)
val = 'UNPARSED: %s' % date
val = val.encode('ascii', 'ignore')
return FlexiDate(qualifier=val) | def function[parse, parameter[date, dayfirst]]:
constant[Parse a `date` into a `FlexiDate`.
@param date: the date to parse - may be a string, datetime.date,
datetime.datetime or FlexiDate.
TODO: support for quarters e.g. Q4 1980 or 1954 Q3
TODO: support latin stuff like M.DCC.LIII
TODO: convert '-' to '?' when used that way
e.g. had this date [181-]
]
if <ast.UnaryOp object at 0x7da1b0c14c70> begin[:]
return[constant[None]]
if call[name[isinstance], parameter[name[date], name[FlexiDate]]] begin[:]
return[name[date]]
if call[name[isinstance], parameter[name[date], name[int]]] begin[:]
return[call[name[FlexiDate], parameter[]]] | keyword[def] identifier[parse] ( identifier[date] , identifier[dayfirst] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[date] :
keyword[return] keyword[None]
keyword[if] identifier[isinstance] ( identifier[date] , identifier[FlexiDate] ):
keyword[return] identifier[date]
keyword[if] identifier[isinstance] ( identifier[date] , identifier[int] ):
keyword[return] identifier[FlexiDate] ( identifier[year] = identifier[date] )
keyword[elif] identifier[isinstance] ( identifier[date] , identifier[datetime] . identifier[datetime] ):
identifier[parser] = identifier[PythonDateTimeParser] ()
keyword[return] identifier[parser] . identifier[parse] ( identifier[date] )
keyword[elif] identifier[isinstance] ( identifier[date] , identifier[datetime] . identifier[date] ):
identifier[parser] = identifier[PythonDateParser] ()
keyword[return] identifier[parser] . identifier[parse] ( identifier[date] )
keyword[else] :
identifier[parser] = identifier[DateutilDateParser] ()
identifier[out] = identifier[parser] . identifier[parse] ( identifier[date] ,**{ literal[string] : identifier[dayfirst] })
keyword[if] identifier[out] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[out]
identifier[val] = literal[string] % identifier[date]
identifier[val] = identifier[val] . identifier[encode] ( literal[string] , literal[string] )
keyword[return] identifier[FlexiDate] ( identifier[qualifier] = identifier[val] ) | def parse(date, dayfirst=True):
"""Parse a `date` into a `FlexiDate`.
@param date: the date to parse - may be a string, datetime.date,
datetime.datetime or FlexiDate.
TODO: support for quarters e.g. Q4 1980 or 1954 Q3
TODO: support latin stuff like M.DCC.LIII
TODO: convert '-' to '?' when used that way
e.g. had this date [181-]
"""
if not date:
return None # depends on [control=['if'], data=[]]
if isinstance(date, FlexiDate):
return date # depends on [control=['if'], data=[]]
if isinstance(date, int):
return FlexiDate(year=date) # depends on [control=['if'], data=[]]
elif isinstance(date, datetime.datetime):
parser = PythonDateTimeParser()
return parser.parse(date) # depends on [control=['if'], data=[]]
elif isinstance(date, datetime.date):
parser = PythonDateParser()
return parser.parse(date) # depends on [control=['if'], data=[]]
else: # assuming its a string
parser = DateutilDateParser()
out = parser.parse(date, **{'dayfirst': dayfirst})
if out is not None:
return out # depends on [control=['if'], data=['out']]
# msg = 'Unable to parse %s' % date
# raise ValueError(date)
val = 'UNPARSED: %s' % date
val = val.encode('ascii', 'ignore')
return FlexiDate(qualifier=val) |
def get_session_info(self, session=0):
"""get info about a session
Parameters
----------
session : int
Session number to retrieve. The current session is 0, and negative
numbers count back from current session, so -1 is previous session.
Returns
-------
(session_id [int], start [datetime], end [datetime], num_cmds [int],
remark [unicode])
Sessions that are running or did not exit cleanly will have `end=None`
and `num_cmds=None`.
"""
if session <= 0:
session += self.session_number
query = "SELECT * from sessions where session == ?"
return self.db.execute(query, (session,)).fetchone() | def function[get_session_info, parameter[self, session]]:
constant[get info about a session
Parameters
----------
session : int
Session number to retrieve. The current session is 0, and negative
numbers count back from current session, so -1 is previous session.
Returns
-------
(session_id [int], start [datetime], end [datetime], num_cmds [int],
remark [unicode])
Sessions that are running or did not exit cleanly will have `end=None`
and `num_cmds=None`.
]
if compare[name[session] less_or_equal[<=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da20c6a9fc0>
variable[query] assign[=] constant[SELECT * from sessions where session == ?]
return[call[call[name[self].db.execute, parameter[name[query], tuple[[<ast.Name object at 0x7da207f9ab90>]]]].fetchone, parameter[]]] | keyword[def] identifier[get_session_info] ( identifier[self] , identifier[session] = literal[int] ):
literal[string]
keyword[if] identifier[session] <= literal[int] :
identifier[session] += identifier[self] . identifier[session_number]
identifier[query] = literal[string]
keyword[return] identifier[self] . identifier[db] . identifier[execute] ( identifier[query] ,( identifier[session] ,)). identifier[fetchone] () | def get_session_info(self, session=0):
"""get info about a session
Parameters
----------
session : int
Session number to retrieve. The current session is 0, and negative
numbers count back from current session, so -1 is previous session.
Returns
-------
(session_id [int], start [datetime], end [datetime], num_cmds [int],
remark [unicode])
Sessions that are running or did not exit cleanly will have `end=None`
and `num_cmds=None`.
"""
if session <= 0:
session += self.session_number # depends on [control=['if'], data=['session']]
query = 'SELECT * from sessions where session == ?'
return self.db.execute(query, (session,)).fetchone() |
def a_send_line(text, ctx):
"""Send text line to the controller followed by `os.linesep`."""
if hasattr(text, '__iter__'):
try:
ctx.ctrl.sendline(text.next())
except StopIteration:
ctx.finished = True
else:
ctx.ctrl.sendline(text)
return True | def function[a_send_line, parameter[text, ctx]]:
constant[Send text line to the controller followed by `os.linesep`.]
if call[name[hasattr], parameter[name[text], constant[__iter__]]] begin[:]
<ast.Try object at 0x7da18eb54160>
return[constant[True]] | keyword[def] identifier[a_send_line] ( identifier[text] , identifier[ctx] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[text] , literal[string] ):
keyword[try] :
identifier[ctx] . identifier[ctrl] . identifier[sendline] ( identifier[text] . identifier[next] ())
keyword[except] identifier[StopIteration] :
identifier[ctx] . identifier[finished] = keyword[True]
keyword[else] :
identifier[ctx] . identifier[ctrl] . identifier[sendline] ( identifier[text] )
keyword[return] keyword[True] | def a_send_line(text, ctx):
"""Send text line to the controller followed by `os.linesep`."""
if hasattr(text, '__iter__'):
try:
ctx.ctrl.sendline(text.next()) # depends on [control=['try'], data=[]]
except StopIteration:
ctx.finished = True # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
ctx.ctrl.sendline(text)
return True |
def serve(request, tail, server):
"""
Django adapter. It has three arguments:
#. ``request`` is a Django request object,
#. ``tail`` is everything that's left from an URL, which adapter is
attached to,
#. ``server`` is a pyws server object.
First two are the context of an application, function ``serve`` transforms
them into a pyws request object. Then it feeds the request to the server,
gets the response and transforms it into a Django response object.
"""
if request.GET:
body = ''
else:
try:
body = request.body
except AttributeError:
body = request.raw_post_data
request = Request(
tail,
body,
parse_qs(request.META['QUERY_STRING']),
parse_qs(body),
request.COOKIES,
)
response = server.process_request(request)
return HttpResponse(
response.text, content_type=response.content_type,
status=get_http_response_code_num(response)) | def function[serve, parameter[request, tail, server]]:
constant[
Django adapter. It has three arguments:
#. ``request`` is a Django request object,
#. ``tail`` is everything that's left from an URL, which adapter is
attached to,
#. ``server`` is a pyws server object.
First two are the context of an application, function ``serve`` transforms
them into a pyws request object. Then it feeds the request to the server,
gets the response and transforms it into a Django response object.
]
if name[request].GET begin[:]
variable[body] assign[=] constant[]
variable[request] assign[=] call[name[Request], parameter[name[tail], name[body], call[name[parse_qs], parameter[call[name[request].META][constant[QUERY_STRING]]]], call[name[parse_qs], parameter[name[body]]], name[request].COOKIES]]
variable[response] assign[=] call[name[server].process_request, parameter[name[request]]]
return[call[name[HttpResponse], parameter[name[response].text]]] | keyword[def] identifier[serve] ( identifier[request] , identifier[tail] , identifier[server] ):
literal[string]
keyword[if] identifier[request] . identifier[GET] :
identifier[body] = literal[string]
keyword[else] :
keyword[try] :
identifier[body] = identifier[request] . identifier[body]
keyword[except] identifier[AttributeError] :
identifier[body] = identifier[request] . identifier[raw_post_data]
identifier[request] = identifier[Request] (
identifier[tail] ,
identifier[body] ,
identifier[parse_qs] ( identifier[request] . identifier[META] [ literal[string] ]),
identifier[parse_qs] ( identifier[body] ),
identifier[request] . identifier[COOKIES] ,
)
identifier[response] = identifier[server] . identifier[process_request] ( identifier[request] )
keyword[return] identifier[HttpResponse] (
identifier[response] . identifier[text] , identifier[content_type] = identifier[response] . identifier[content_type] ,
identifier[status] = identifier[get_http_response_code_num] ( identifier[response] )) | def serve(request, tail, server):
"""
Django adapter. It has three arguments:
#. ``request`` is a Django request object,
#. ``tail`` is everything that's left from an URL, which adapter is
attached to,
#. ``server`` is a pyws server object.
First two are the context of an application, function ``serve`` transforms
them into a pyws request object. Then it feeds the request to the server,
gets the response and transforms it into a Django response object.
"""
if request.GET:
body = '' # depends on [control=['if'], data=[]]
else:
try:
body = request.body # depends on [control=['try'], data=[]]
except AttributeError:
body = request.raw_post_data # depends on [control=['except'], data=[]]
request = Request(tail, body, parse_qs(request.META['QUERY_STRING']), parse_qs(body), request.COOKIES)
response = server.process_request(request)
return HttpResponse(response.text, content_type=response.content_type, status=get_http_response_code_num(response)) |
def wherenotin(self, fieldname, value):
"""
Logical opposite of `wherein`.
"""
return self.wherein(fieldname, value, negate=True) | def function[wherenotin, parameter[self, fieldname, value]]:
constant[
Logical opposite of `wherein`.
]
return[call[name[self].wherein, parameter[name[fieldname], name[value]]]] | keyword[def] identifier[wherenotin] ( identifier[self] , identifier[fieldname] , identifier[value] ):
literal[string]
keyword[return] identifier[self] . identifier[wherein] ( identifier[fieldname] , identifier[value] , identifier[negate] = keyword[True] ) | def wherenotin(self, fieldname, value):
"""
Logical opposite of `wherein`.
"""
return self.wherein(fieldname, value, negate=True) |
def prepare(session_data={}, #pylint: disable=dangerous-default-value
passphrase=None):
"""
Returns *session_dict* as a base64 encrypted json string.
The full encrypted text is special crafted to be compatible
with openssl. It can be decrypted with:
$ echo _full_encypted_ | openssl aes-256-cbc -d -a -k _passphrase_ -p
salt=...
key=...
iv=...
_json_formatted_
"""
if passphrase is None:
passphrase = settings.DJAODJIN_SECRET_KEY
encrypted = crypt.encrypt(
json.dumps(session_data, cls=crypt.JSONEncoder),
passphrase=passphrase,
debug_stmt="encrypted_cookies.SessionStore.prepare")
# b64encode will return `bytes` (Py3) but Django 2.0 is expecting
# a `str` to add to the cookie header, otherwise it wraps those
# `bytes` into a b'***' and adds that to the cookie.
# Note that Django 1.11 will add those `bytes` to the cookie "as-is".
if not isinstance(encrypted, six.string_types):
as_text = encrypted.decode('ascii')
else:
as_text = encrypted
return as_text | def function[prepare, parameter[session_data, passphrase]]:
constant[
Returns *session_dict* as a base64 encrypted json string.
The full encrypted text is special crafted to be compatible
with openssl. It can be decrypted with:
$ echo _full_encypted_ | openssl aes-256-cbc -d -a -k _passphrase_ -p
salt=...
key=...
iv=...
_json_formatted_
]
if compare[name[passphrase] is constant[None]] begin[:]
variable[passphrase] assign[=] name[settings].DJAODJIN_SECRET_KEY
variable[encrypted] assign[=] call[name[crypt].encrypt, parameter[call[name[json].dumps, parameter[name[session_data]]]]]
if <ast.UnaryOp object at 0x7da2044c07c0> begin[:]
variable[as_text] assign[=] call[name[encrypted].decode, parameter[constant[ascii]]]
return[name[as_text]] | keyword[def] identifier[prepare] ( identifier[session_data] ={},
identifier[passphrase] = keyword[None] ):
literal[string]
keyword[if] identifier[passphrase] keyword[is] keyword[None] :
identifier[passphrase] = identifier[settings] . identifier[DJAODJIN_SECRET_KEY]
identifier[encrypted] = identifier[crypt] . identifier[encrypt] (
identifier[json] . identifier[dumps] ( identifier[session_data] , identifier[cls] = identifier[crypt] . identifier[JSONEncoder] ),
identifier[passphrase] = identifier[passphrase] ,
identifier[debug_stmt] = literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[encrypted] , identifier[six] . identifier[string_types] ):
identifier[as_text] = identifier[encrypted] . identifier[decode] ( literal[string] )
keyword[else] :
identifier[as_text] = identifier[encrypted]
keyword[return] identifier[as_text] | def prepare(session_data={}, passphrase=None): #pylint: disable=dangerous-default-value
'\n Returns *session_dict* as a base64 encrypted json string.\n\n The full encrypted text is special crafted to be compatible\n with openssl. It can be decrypted with:\n\n $ echo _full_encypted_ | openssl aes-256-cbc -d -a -k _passphrase_ -p\n salt=...\n key=...\n iv=...\n _json_formatted_\n '
if passphrase is None:
passphrase = settings.DJAODJIN_SECRET_KEY # depends on [control=['if'], data=['passphrase']]
encrypted = crypt.encrypt(json.dumps(session_data, cls=crypt.JSONEncoder), passphrase=passphrase, debug_stmt='encrypted_cookies.SessionStore.prepare')
# b64encode will return `bytes` (Py3) but Django 2.0 is expecting
# a `str` to add to the cookie header, otherwise it wraps those
# `bytes` into a b'***' and adds that to the cookie.
# Note that Django 1.11 will add those `bytes` to the cookie "as-is".
if not isinstance(encrypted, six.string_types):
as_text = encrypted.decode('ascii') # depends on [control=['if'], data=[]]
else:
as_text = encrypted
return as_text |
def hfoslog(*what, **kwargs):
"""Logs all *what arguments.
:param *what: Loggable objects (i.e. they have a string representation)
:param lvl: Debug message level
:param exc: Switch to better handle exceptions, use if logging in an
except clause
:param emitter: Optional log source, where this can't be determined
automatically
:param sourceloc: Give specific source code location hints, used internally
"""
# Count all messages (missing numbers give a hint at too high log level)
global count
global verbosity
count += 1
lvl = kwargs.get('lvl', info)
if lvl < verbosity['global']:
return
emitter = kwargs.get('emitter', 'UNKNOWN')
traceback = kwargs.get('tb', False)
frame_ref = kwargs.get('frame_ref', 0)
output = None
timestamp = time.time()
runtime = timestamp - start
callee = None
exception = kwargs.get('exc', False)
if exception:
exc_type, exc_obj, exc_tb = sys.exc_info() # NOQA
if verbosity['global'] <= debug or traceback:
# Automatically log the current function details.
if 'sourceloc' not in kwargs:
frame = kwargs.get('frame', frame_ref)
# Get the previous frame in the stack, otherwise it would
# be this function
current_frame = inspect.currentframe()
while frame > 0:
frame -= 1
current_frame = current_frame.f_back
func = current_frame.f_code
# Dump the message + the name of this function to the log.
if exception:
line_no = exc_tb.tb_lineno
if lvl <= error:
lvl = error
else:
line_no = func.co_firstlineno
callee = "[%.10s@%s:%i]" % (
func.co_name,
func.co_filename,
line_no
)
else:
callee = kwargs['sourceloc']
now = time.asctime()
msg = "[%s] : %5s : %.5f : %3i : [%5s]" % (now,
lvldata[lvl][0],
runtime,
count,
emitter)
content = ""
if callee:
if not uncut and lvl > 10:
msg += "%-60s" % callee
else:
msg += "%s" % callee
for thing in what:
content += " "
if kwargs.get('pretty', False):
content += pprint.pformat(thing)
else:
content += str(thing)
msg += content
if exception:
msg += "\n" + "".join(format_exception(exc_type, exc_obj, exc_tb))
if is_muted(msg):
return
if not uncut and lvl > 10 and len(msg) > 1000:
msg = msg[:1000]
if lvl >= verbosity['file']:
try:
f = open(logfile, "a")
f.write(msg + '\n')
f.flush()
f.close()
except IOError:
print("Can't open logfile %s for writing!" % logfile)
# sys.exit(23)
if is_marked(msg):
lvl = hilight
if lvl >= verbosity['console']:
output = str(msg)
if six.PY3 and color:
output = lvldata[lvl][1] + output + terminator
try:
print(output)
except UnicodeEncodeError as e:
print(output.encode("utf-8"))
hfoslog("Bad encoding encountered on previous message:", e,
lvl=error)
except BlockingIOError:
hfoslog("Too long log line encountered:", output[:20], lvl=warn)
if live:
item = [now, lvl, runtime, count, emitter, str(content)]
LiveLog.append(item) | def function[hfoslog, parameter[]]:
constant[Logs all *what arguments.
:param *what: Loggable objects (i.e. they have a string representation)
:param lvl: Debug message level
:param exc: Switch to better handle exceptions, use if logging in an
except clause
:param emitter: Optional log source, where this can't be determined
automatically
:param sourceloc: Give specific source code location hints, used internally
]
<ast.Global object at 0x7da1b0f5be50>
<ast.Global object at 0x7da1b0f5bdf0>
<ast.AugAssign object at 0x7da1b0f5bd90>
variable[lvl] assign[=] call[name[kwargs].get, parameter[constant[lvl], name[info]]]
if compare[name[lvl] less[<] call[name[verbosity]][constant[global]]] begin[:]
return[None]
variable[emitter] assign[=] call[name[kwargs].get, parameter[constant[emitter], constant[UNKNOWN]]]
variable[traceback] assign[=] call[name[kwargs].get, parameter[constant[tb], constant[False]]]
variable[frame_ref] assign[=] call[name[kwargs].get, parameter[constant[frame_ref], constant[0]]]
variable[output] assign[=] constant[None]
variable[timestamp] assign[=] call[name[time].time, parameter[]]
variable[runtime] assign[=] binary_operation[name[timestamp] - name[start]]
variable[callee] assign[=] constant[None]
variable[exception] assign[=] call[name[kwargs].get, parameter[constant[exc], constant[False]]]
if name[exception] begin[:]
<ast.Tuple object at 0x7da1b0f5b160> assign[=] call[name[sys].exc_info, parameter[]]
if <ast.BoolOp object at 0x7da1b0f5afe0> begin[:]
if compare[constant[sourceloc] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
variable[frame] assign[=] call[name[kwargs].get, parameter[constant[frame], name[frame_ref]]]
variable[current_frame] assign[=] call[name[inspect].currentframe, parameter[]]
while compare[name[frame] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b0f5aaa0>
variable[current_frame] assign[=] name[current_frame].f_back
variable[func] assign[=] name[current_frame].f_code
if name[exception] begin[:]
variable[line_no] assign[=] name[exc_tb].tb_lineno
if compare[name[lvl] less_or_equal[<=] name[error]] begin[:]
variable[lvl] assign[=] name[error]
variable[callee] assign[=] binary_operation[constant[[%.10s@%s:%i]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0f5a470>, <ast.Attribute object at 0x7da1b0f5a410>, <ast.Name object at 0x7da1b0f5a3b0>]]]
variable[now] assign[=] call[name[time].asctime, parameter[]]
variable[msg] assign[=] binary_operation[constant[[%s] : %5s : %.5f : %3i : [%5s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f5a080>, <ast.Subscript object at 0x7da1b0f5a050>, <ast.Name object at 0x7da1b0f59f60>, <ast.Name object at 0x7da1b0f59f30>, <ast.Name object at 0x7da1b0f59f00>]]]
variable[content] assign[=] constant[]
if name[callee] begin[:]
if <ast.BoolOp object at 0x7da1b0f59db0> begin[:]
<ast.AugAssign object at 0x7da1b0f59c90>
for taget[name[thing]] in starred[name[what]] begin[:]
<ast.AugAssign object at 0x7da1b0f59a20>
if call[name[kwargs].get, parameter[constant[pretty], constant[False]]] begin[:]
<ast.AugAssign object at 0x7da1b0f59870>
<ast.AugAssign object at 0x7da1b0ed9840>
if name[exception] begin[:]
<ast.AugAssign object at 0x7da1b0ed9930>
if call[name[is_muted], parameter[name[msg]]] begin[:]
return[None]
if <ast.BoolOp object at 0x7da1b0f595a0> begin[:]
variable[msg] assign[=] call[name[msg]][<ast.Slice object at 0x7da1b0f592d0>]
if compare[name[lvl] greater_or_equal[>=] call[name[verbosity]][constant[file]]] begin[:]
<ast.Try object at 0x7da1b0f59150>
if call[name[is_marked], parameter[name[msg]]] begin[:]
variable[lvl] assign[=] name[hilight]
if compare[name[lvl] greater_or_equal[>=] call[name[verbosity]][constant[console]]] begin[:]
variable[output] assign[=] call[name[str], parameter[name[msg]]]
if <ast.BoolOp object at 0x7da1b0f58760> begin[:]
variable[output] assign[=] binary_operation[binary_operation[call[call[name[lvldata]][name[lvl]]][constant[1]] + name[output]] + name[terminator]]
<ast.Try object at 0x7da1b0fc4250>
if name[live] begin[:]
variable[item] assign[=] list[[<ast.Name object at 0x7da1b0fc7100>, <ast.Name object at 0x7da1b0fc6ec0>, <ast.Name object at 0x7da1b0fc4cd0>, <ast.Name object at 0x7da1b0fc7130>, <ast.Name object at 0x7da1b0fc4790>, <ast.Call object at 0x7da1b0fc56c0>]]
call[name[LiveLog].append, parameter[name[item]]] | keyword[def] identifier[hfoslog] (* identifier[what] ,** identifier[kwargs] ):
literal[string]
keyword[global] identifier[count]
keyword[global] identifier[verbosity]
identifier[count] += literal[int]
identifier[lvl] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[info] )
keyword[if] identifier[lvl] < identifier[verbosity] [ literal[string] ]:
keyword[return]
identifier[emitter] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[traceback] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[frame_ref] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[output] = keyword[None]
identifier[timestamp] = identifier[time] . identifier[time] ()
identifier[runtime] = identifier[timestamp] - identifier[start]
identifier[callee] = keyword[None]
identifier[exception] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
keyword[if] identifier[exception] :
identifier[exc_type] , identifier[exc_obj] , identifier[exc_tb] = identifier[sys] . identifier[exc_info] ()
keyword[if] identifier[verbosity] [ literal[string] ]<= identifier[debug] keyword[or] identifier[traceback] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[frame] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[frame_ref] )
identifier[current_frame] = identifier[inspect] . identifier[currentframe] ()
keyword[while] identifier[frame] > literal[int] :
identifier[frame] -= literal[int]
identifier[current_frame] = identifier[current_frame] . identifier[f_back]
identifier[func] = identifier[current_frame] . identifier[f_code]
keyword[if] identifier[exception] :
identifier[line_no] = identifier[exc_tb] . identifier[tb_lineno]
keyword[if] identifier[lvl] <= identifier[error] :
identifier[lvl] = identifier[error]
keyword[else] :
identifier[line_no] = identifier[func] . identifier[co_firstlineno]
identifier[callee] = literal[string] %(
identifier[func] . identifier[co_name] ,
identifier[func] . identifier[co_filename] ,
identifier[line_no]
)
keyword[else] :
identifier[callee] = identifier[kwargs] [ literal[string] ]
identifier[now] = identifier[time] . identifier[asctime] ()
identifier[msg] = literal[string] %( identifier[now] ,
identifier[lvldata] [ identifier[lvl] ][ literal[int] ],
identifier[runtime] ,
identifier[count] ,
identifier[emitter] )
identifier[content] = literal[string]
keyword[if] identifier[callee] :
keyword[if] keyword[not] identifier[uncut] keyword[and] identifier[lvl] > literal[int] :
identifier[msg] += literal[string] % identifier[callee]
keyword[else] :
identifier[msg] += literal[string] % identifier[callee]
keyword[for] identifier[thing] keyword[in] identifier[what] :
identifier[content] += literal[string]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ):
identifier[content] += identifier[pprint] . identifier[pformat] ( identifier[thing] )
keyword[else] :
identifier[content] += identifier[str] ( identifier[thing] )
identifier[msg] += identifier[content]
keyword[if] identifier[exception] :
identifier[msg] += literal[string] + literal[string] . identifier[join] ( identifier[format_exception] ( identifier[exc_type] , identifier[exc_obj] , identifier[exc_tb] ))
keyword[if] identifier[is_muted] ( identifier[msg] ):
keyword[return]
keyword[if] keyword[not] identifier[uncut] keyword[and] identifier[lvl] > literal[int] keyword[and] identifier[len] ( identifier[msg] )> literal[int] :
identifier[msg] = identifier[msg] [: literal[int] ]
keyword[if] identifier[lvl] >= identifier[verbosity] [ literal[string] ]:
keyword[try] :
identifier[f] = identifier[open] ( identifier[logfile] , literal[string] )
identifier[f] . identifier[write] ( identifier[msg] + literal[string] )
identifier[f] . identifier[flush] ()
identifier[f] . identifier[close] ()
keyword[except] identifier[IOError] :
identifier[print] ( literal[string] % identifier[logfile] )
keyword[if] identifier[is_marked] ( identifier[msg] ):
identifier[lvl] = identifier[hilight]
keyword[if] identifier[lvl] >= identifier[verbosity] [ literal[string] ]:
identifier[output] = identifier[str] ( identifier[msg] )
keyword[if] identifier[six] . identifier[PY3] keyword[and] identifier[color] :
identifier[output] = identifier[lvldata] [ identifier[lvl] ][ literal[int] ]+ identifier[output] + identifier[terminator]
keyword[try] :
identifier[print] ( identifier[output] )
keyword[except] identifier[UnicodeEncodeError] keyword[as] identifier[e] :
identifier[print] ( identifier[output] . identifier[encode] ( literal[string] ))
identifier[hfoslog] ( literal[string] , identifier[e] ,
identifier[lvl] = identifier[error] )
keyword[except] identifier[BlockingIOError] :
identifier[hfoslog] ( literal[string] , identifier[output] [: literal[int] ], identifier[lvl] = identifier[warn] )
keyword[if] identifier[live] :
identifier[item] =[ identifier[now] , identifier[lvl] , identifier[runtime] , identifier[count] , identifier[emitter] , identifier[str] ( identifier[content] )]
identifier[LiveLog] . identifier[append] ( identifier[item] ) | def hfoslog(*what, **kwargs):
"""Logs all *what arguments.
:param *what: Loggable objects (i.e. they have a string representation)
:param lvl: Debug message level
:param exc: Switch to better handle exceptions, use if logging in an
except clause
:param emitter: Optional log source, where this can't be determined
automatically
:param sourceloc: Give specific source code location hints, used internally
"""
# Count all messages (missing numbers give a hint at too high log level)
global count
global verbosity
count += 1
lvl = kwargs.get('lvl', info)
if lvl < verbosity['global']:
return # depends on [control=['if'], data=[]]
emitter = kwargs.get('emitter', 'UNKNOWN')
traceback = kwargs.get('tb', False)
frame_ref = kwargs.get('frame_ref', 0)
output = None
timestamp = time.time()
runtime = timestamp - start
callee = None
exception = kwargs.get('exc', False)
if exception:
(exc_type, exc_obj, exc_tb) = sys.exc_info() # NOQA # depends on [control=['if'], data=[]]
if verbosity['global'] <= debug or traceback:
# Automatically log the current function details.
if 'sourceloc' not in kwargs:
frame = kwargs.get('frame', frame_ref)
# Get the previous frame in the stack, otherwise it would
# be this function
current_frame = inspect.currentframe()
while frame > 0:
frame -= 1
current_frame = current_frame.f_back # depends on [control=['while'], data=['frame']]
func = current_frame.f_code
# Dump the message + the name of this function to the log.
if exception:
line_no = exc_tb.tb_lineno
if lvl <= error:
lvl = error # depends on [control=['if'], data=['lvl', 'error']] # depends on [control=['if'], data=[]]
else:
line_no = func.co_firstlineno
callee = '[%.10s@%s:%i]' % (func.co_name, func.co_filename, line_no) # depends on [control=['if'], data=['kwargs']]
else:
callee = kwargs['sourceloc'] # depends on [control=['if'], data=[]]
now = time.asctime()
msg = '[%s] : %5s : %.5f : %3i : [%5s]' % (now, lvldata[lvl][0], runtime, count, emitter)
content = ''
if callee:
if not uncut and lvl > 10:
msg += '%-60s' % callee # depends on [control=['if'], data=[]]
else:
msg += '%s' % callee # depends on [control=['if'], data=[]]
for thing in what:
content += ' '
if kwargs.get('pretty', False):
content += pprint.pformat(thing) # depends on [control=['if'], data=[]]
else:
content += str(thing) # depends on [control=['for'], data=['thing']]
msg += content
if exception:
msg += '\n' + ''.join(format_exception(exc_type, exc_obj, exc_tb)) # depends on [control=['if'], data=[]]
if is_muted(msg):
return # depends on [control=['if'], data=[]]
if not uncut and lvl > 10 and (len(msg) > 1000):
msg = msg[:1000] # depends on [control=['if'], data=[]]
if lvl >= verbosity['file']:
try:
f = open(logfile, 'a')
f.write(msg + '\n')
f.flush()
f.close() # depends on [control=['try'], data=[]]
except IOError:
print("Can't open logfile %s for writing!" % logfile) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# sys.exit(23)
if is_marked(msg):
lvl = hilight # depends on [control=['if'], data=[]]
if lvl >= verbosity['console']:
output = str(msg)
if six.PY3 and color:
output = lvldata[lvl][1] + output + terminator # depends on [control=['if'], data=[]]
try:
print(output) # depends on [control=['try'], data=[]]
except UnicodeEncodeError as e:
print(output.encode('utf-8'))
hfoslog('Bad encoding encountered on previous message:', e, lvl=error) # depends on [control=['except'], data=['e']]
except BlockingIOError:
hfoslog('Too long log line encountered:', output[:20], lvl=warn) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['lvl']]
if live:
item = [now, lvl, runtime, count, emitter, str(content)]
LiveLog.append(item) # depends on [control=['if'], data=[]] |
def nvmlDeviceGetPowerState(handle):
r"""
/**
* Deprecated: Use \ref nvmlDeviceGetPerformanceState. This function exposes an incorrect generalization.
*
* Retrieve the current performance state for the device.
*
* For Fermi &tm; or newer fully supported devices.
*
* See \ref nvmlPstates_t for details on allowed performance states.
*
* @param device The identifier of the target device
* @param pState Reference in which to return the performance state reading
*
* @return
* - \ref NVML_SUCCESS if \a pState has been set
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid or \a pState is NULL
* - \ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetPowerState
"""
c_pstate = _nvmlPstates_t()
fn = _nvmlGetFunctionPointer("nvmlDeviceGetPowerState")
ret = fn(handle, byref(c_pstate))
_nvmlCheckReturn(ret)
return bytes_to_str(c_pstate.value) | def function[nvmlDeviceGetPowerState, parameter[handle]]:
constant[
/**
* Deprecated: Use \ref nvmlDeviceGetPerformanceState. This function exposes an incorrect generalization.
*
* Retrieve the current performance state for the device.
*
* For Fermi &tm; or newer fully supported devices.
*
* See \ref nvmlPstates_t for details on allowed performance states.
*
* @param device The identifier of the target device
* @param pState Reference in which to return the performance state reading
*
* @return
* - \ref NVML_SUCCESS if \a pState has been set
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid or \a pState is NULL
* - \ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetPowerState
]
variable[c_pstate] assign[=] call[name[_nvmlPstates_t], parameter[]]
variable[fn] assign[=] call[name[_nvmlGetFunctionPointer], parameter[constant[nvmlDeviceGetPowerState]]]
variable[ret] assign[=] call[name[fn], parameter[name[handle], call[name[byref], parameter[name[c_pstate]]]]]
call[name[_nvmlCheckReturn], parameter[name[ret]]]
return[call[name[bytes_to_str], parameter[name[c_pstate].value]]] | keyword[def] identifier[nvmlDeviceGetPowerState] ( identifier[handle] ):
literal[string]
identifier[c_pstate] = identifier[_nvmlPstates_t] ()
identifier[fn] = identifier[_nvmlGetFunctionPointer] ( literal[string] )
identifier[ret] = identifier[fn] ( identifier[handle] , identifier[byref] ( identifier[c_pstate] ))
identifier[_nvmlCheckReturn] ( identifier[ret] )
keyword[return] identifier[bytes_to_str] ( identifier[c_pstate] . identifier[value] ) | def nvmlDeviceGetPowerState(handle):
"""
/**
* Deprecated: Use \\ref nvmlDeviceGetPerformanceState. This function exposes an incorrect generalization.
*
* Retrieve the current performance state for the device.
*
* For Fermi &tm; or newer fully supported devices.
*
* See \\ref nvmlPstates_t for details on allowed performance states.
*
* @param device The identifier of the target device
* @param pState Reference in which to return the performance state reading
*
* @return
* - \\ref NVML_SUCCESS if \\a pState has been set
* - \\ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \\ref NVML_ERROR_INVALID_ARGUMENT if \\a device is invalid or \\a pState is NULL
* - \\ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature
* - \\ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \\ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetPowerState
"""
c_pstate = _nvmlPstates_t()
fn = _nvmlGetFunctionPointer('nvmlDeviceGetPowerState')
ret = fn(handle, byref(c_pstate))
_nvmlCheckReturn(ret)
return bytes_to_str(c_pstate.value) |
def _validate_error_response_model(self, paths, mods):
'''
Helper function to help validate the convention established in the swagger file on how
to handle response code mapping/integration
'''
for path, ops in paths:
for opname, opobj in six.iteritems(ops):
if opname not in _Swagger.SWAGGER_OPERATION_NAMES:
continue
if 'responses' not in opobj:
raise ValueError('missing mandatory responses field in path item object')
for rescode, resobj in six.iteritems(opobj.get('responses')):
if not self._is_http_error_rescode(str(rescode)): # future lint: disable=blacklisted-function
continue
# only check for response code from 400-599
if 'schema' not in resobj:
raise ValueError('missing schema field in path {0}, '
'op {1}, response {2}'.format(path, opname, rescode))
schemaobj = resobj.get('schema')
if '$ref' not in schemaobj:
raise ValueError('missing $ref field under schema in '
'path {0}, op {1}, response {2}'.format(path, opname, rescode))
schemaobjref = schemaobj.get('$ref', '/')
modelname = schemaobjref.split('/')[-1]
if modelname not in mods:
raise ValueError('model schema {0} reference not found '
'under /definitions'.format(schemaobjref))
model = mods.get(modelname)
if model.get('type') != 'object':
raise ValueError('model schema {0} must be type object'.format(modelname))
if 'properties' not in model:
raise ValueError('model schema {0} must have properties fields'.format(modelname))
modelprops = model.get('properties')
if 'errorMessage' not in modelprops:
raise ValueError('model schema {0} must have errorMessage as a property to '
'match AWS convention. If pattern is not set, .+ will '
'be used'.format(modelname)) | def function[_validate_error_response_model, parameter[self, paths, mods]]:
constant[
Helper function to help validate the convention established in the swagger file on how
to handle response code mapping/integration
]
for taget[tuple[[<ast.Name object at 0x7da1b1c16fe0>, <ast.Name object at 0x7da1b1c153f0>]]] in starred[name[paths]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1c165f0>, <ast.Name object at 0x7da1b1c16800>]]] in starred[call[name[six].iteritems, parameter[name[ops]]]] begin[:]
if compare[name[opname] <ast.NotIn object at 0x7da2590d7190> name[_Swagger].SWAGGER_OPERATION_NAMES] begin[:]
continue
if compare[constant[responses] <ast.NotIn object at 0x7da2590d7190> name[opobj]] begin[:]
<ast.Raise object at 0x7da1b1c17fd0>
for taget[tuple[[<ast.Name object at 0x7da1b1c14ee0>, <ast.Name object at 0x7da1b1c14f70>]]] in starred[call[name[six].iteritems, parameter[call[name[opobj].get, parameter[constant[responses]]]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1c15660> begin[:]
continue
if compare[constant[schema] <ast.NotIn object at 0x7da2590d7190> name[resobj]] begin[:]
<ast.Raise object at 0x7da1b1c17fa0>
variable[schemaobj] assign[=] call[name[resobj].get, parameter[constant[schema]]]
if compare[constant[$ref] <ast.NotIn object at 0x7da2590d7190> name[schemaobj]] begin[:]
<ast.Raise object at 0x7da1b1c158d0>
variable[schemaobjref] assign[=] call[name[schemaobj].get, parameter[constant[$ref], constant[/]]]
variable[modelname] assign[=] call[call[name[schemaobjref].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b1c151b0>]
if compare[name[modelname] <ast.NotIn object at 0x7da2590d7190> name[mods]] begin[:]
<ast.Raise object at 0x7da1b1c14d60>
variable[model] assign[=] call[name[mods].get, parameter[name[modelname]]]
if compare[call[name[model].get, parameter[constant[type]]] not_equal[!=] constant[object]] begin[:]
<ast.Raise object at 0x7da1b1c17520>
if compare[constant[properties] <ast.NotIn object at 0x7da2590d7190> name[model]] begin[:]
<ast.Raise object at 0x7da1b1c17460>
variable[modelprops] assign[=] call[name[model].get, parameter[constant[properties]]]
if compare[constant[errorMessage] <ast.NotIn object at 0x7da2590d7190> name[modelprops]] begin[:]
<ast.Raise object at 0x7da1b1c16b90> | keyword[def] identifier[_validate_error_response_model] ( identifier[self] , identifier[paths] , identifier[mods] ):
literal[string]
keyword[for] identifier[path] , identifier[ops] keyword[in] identifier[paths] :
keyword[for] identifier[opname] , identifier[opobj] keyword[in] identifier[six] . identifier[iteritems] ( identifier[ops] ):
keyword[if] identifier[opname] keyword[not] keyword[in] identifier[_Swagger] . identifier[SWAGGER_OPERATION_NAMES] :
keyword[continue]
keyword[if] literal[string] keyword[not] keyword[in] identifier[opobj] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[rescode] , identifier[resobj] keyword[in] identifier[six] . identifier[iteritems] ( identifier[opobj] . identifier[get] ( literal[string] )):
keyword[if] keyword[not] identifier[self] . identifier[_is_http_error_rescode] ( identifier[str] ( identifier[rescode] )):
keyword[continue]
keyword[if] literal[string] keyword[not] keyword[in] identifier[resobj] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[path] , identifier[opname] , identifier[rescode] ))
identifier[schemaobj] = identifier[resobj] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[schemaobj] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[path] , identifier[opname] , identifier[rescode] ))
identifier[schemaobjref] = identifier[schemaobj] . identifier[get] ( literal[string] , literal[string] )
identifier[modelname] = identifier[schemaobjref] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[if] identifier[modelname] keyword[not] keyword[in] identifier[mods] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[schemaobjref] ))
identifier[model] = identifier[mods] . identifier[get] ( identifier[modelname] )
keyword[if] identifier[model] . identifier[get] ( literal[string] )!= literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[modelname] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[model] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[modelname] ))
identifier[modelprops] = identifier[model] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[modelprops] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[modelname] )) | def _validate_error_response_model(self, paths, mods):
"""
Helper function to help validate the convention established in the swagger file on how
to handle response code mapping/integration
"""
for (path, ops) in paths:
for (opname, opobj) in six.iteritems(ops):
if opname not in _Swagger.SWAGGER_OPERATION_NAMES:
continue # depends on [control=['if'], data=[]]
if 'responses' not in opobj:
raise ValueError('missing mandatory responses field in path item object') # depends on [control=['if'], data=[]]
for (rescode, resobj) in six.iteritems(opobj.get('responses')):
if not self._is_http_error_rescode(str(rescode)): # future lint: disable=blacklisted-function
continue # depends on [control=['if'], data=[]]
# only check for response code from 400-599
if 'schema' not in resobj:
raise ValueError('missing schema field in path {0}, op {1}, response {2}'.format(path, opname, rescode)) # depends on [control=['if'], data=[]]
schemaobj = resobj.get('schema')
if '$ref' not in schemaobj:
raise ValueError('missing $ref field under schema in path {0}, op {1}, response {2}'.format(path, opname, rescode)) # depends on [control=['if'], data=[]]
schemaobjref = schemaobj.get('$ref', '/')
modelname = schemaobjref.split('/')[-1]
if modelname not in mods:
raise ValueError('model schema {0} reference not found under /definitions'.format(schemaobjref)) # depends on [control=['if'], data=[]]
model = mods.get(modelname)
if model.get('type') != 'object':
raise ValueError('model schema {0} must be type object'.format(modelname)) # depends on [control=['if'], data=[]]
if 'properties' not in model:
raise ValueError('model schema {0} must have properties fields'.format(modelname)) # depends on [control=['if'], data=[]]
modelprops = model.get('properties')
if 'errorMessage' not in modelprops:
raise ValueError('model schema {0} must have errorMessage as a property to match AWS convention. If pattern is not set, .+ will be used'.format(modelname)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def split_camel_case(text) -> list:
"""Splits words from CamelCase text."""
return list(reduce(
lambda a, b: (a + [b] if b.isupper() else a[:-1] + [a[-1] + b]),
text,
[]
)) | def function[split_camel_case, parameter[text]]:
constant[Splits words from CamelCase text.]
return[call[name[list], parameter[call[name[reduce], parameter[<ast.Lambda object at 0x7da2041da200>, name[text], list[[]]]]]]] | keyword[def] identifier[split_camel_case] ( identifier[text] )-> identifier[list] :
literal[string]
keyword[return] identifier[list] ( identifier[reduce] (
keyword[lambda] identifier[a] , identifier[b] :( identifier[a] +[ identifier[b] ] keyword[if] identifier[b] . identifier[isupper] () keyword[else] identifier[a] [:- literal[int] ]+[ identifier[a] [- literal[int] ]+ identifier[b] ]),
identifier[text] ,
[]
)) | def split_camel_case(text) -> list:
"""Splits words from CamelCase text."""
return list(reduce(lambda a, b: a + [b] if b.isupper() else a[:-1] + [a[-1] + b], text, [])) |
def info(self, section=None):
"""The INFO command returns information and statistics about the server
in a format that is simple to parse by computers and easy to read by
humans.
The optional parameter can be used to select a specific section of
information:
- server: General information about the Redis server
- clients: Client connections section
- memory: Memory consumption related information
- persistence: RDB and AOF related information
- stats: General statistics
- replication: Master/slave replication information
- cpu: CPU consumption statistics
- commandstats: Redis command statistics
- cluster: Redis Cluster section
- keyspace: Database related statistics
It can also take the following values:
- all: Return all sections
- default: Return only the default set of sections
When no parameter is provided, the default option is assumed.
:param str section: Optional
:return: dict
"""
cmd = [b'INFO']
if section:
cmd.append(section)
return self._execute(cmd, format_callback=common.format_info_response) | def function[info, parameter[self, section]]:
constant[The INFO command returns information and statistics about the server
in a format that is simple to parse by computers and easy to read by
humans.
The optional parameter can be used to select a specific section of
information:
- server: General information about the Redis server
- clients: Client connections section
- memory: Memory consumption related information
- persistence: RDB and AOF related information
- stats: General statistics
- replication: Master/slave replication information
- cpu: CPU consumption statistics
- commandstats: Redis command statistics
- cluster: Redis Cluster section
- keyspace: Database related statistics
It can also take the following values:
- all: Return all sections
- default: Return only the default set of sections
When no parameter is provided, the default option is assumed.
:param str section: Optional
:return: dict
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b0b70f40>]]
if name[section] begin[:]
call[name[cmd].append, parameter[name[section]]]
return[call[name[self]._execute, parameter[name[cmd]]]] | keyword[def] identifier[info] ( identifier[self] , identifier[section] = keyword[None] ):
literal[string]
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[section] :
identifier[cmd] . identifier[append] ( identifier[section] )
keyword[return] identifier[self] . identifier[_execute] ( identifier[cmd] , identifier[format_callback] = identifier[common] . identifier[format_info_response] ) | def info(self, section=None):
"""The INFO command returns information and statistics about the server
in a format that is simple to parse by computers and easy to read by
humans.
The optional parameter can be used to select a specific section of
information:
- server: General information about the Redis server
- clients: Client connections section
- memory: Memory consumption related information
- persistence: RDB and AOF related information
- stats: General statistics
- replication: Master/slave replication information
- cpu: CPU consumption statistics
- commandstats: Redis command statistics
- cluster: Redis Cluster section
- keyspace: Database related statistics
It can also take the following values:
- all: Return all sections
- default: Return only the default set of sections
When no parameter is provided, the default option is assumed.
:param str section: Optional
:return: dict
"""
cmd = [b'INFO']
if section:
cmd.append(section) # depends on [control=['if'], data=[]]
return self._execute(cmd, format_callback=common.format_info_response) |
def from_path(kls, vertices):
"""
Given an Nx3 array of vertices that constitute a single path,
generate a skeleton with appropriate edges.
"""
if vertices.shape[0] == 0:
return PrecomputedSkeleton()
skel = PrecomputedSkeleton(vertices)
edges = np.zeros(shape=(skel.vertices.shape[0] - 1, 2), dtype=np.uint32)
edges[:,0] = np.arange(skel.vertices.shape[0] - 1)
edges[:,1] = np.arange(1, skel.vertices.shape[0])
skel.edges = edges
return skel | def function[from_path, parameter[kls, vertices]]:
constant[
Given an Nx3 array of vertices that constitute a single path,
generate a skeleton with appropriate edges.
]
if compare[call[name[vertices].shape][constant[0]] equal[==] constant[0]] begin[:]
return[call[name[PrecomputedSkeleton], parameter[]]]
variable[skel] assign[=] call[name[PrecomputedSkeleton], parameter[name[vertices]]]
variable[edges] assign[=] call[name[np].zeros, parameter[]]
call[name[edges]][tuple[[<ast.Slice object at 0x7da20c794640>, <ast.Constant object at 0x7da20c794d30>]]] assign[=] call[name[np].arange, parameter[binary_operation[call[name[skel].vertices.shape][constant[0]] - constant[1]]]]
call[name[edges]][tuple[[<ast.Slice object at 0x7da20c795780>, <ast.Constant object at 0x7da20c795510>]]] assign[=] call[name[np].arange, parameter[constant[1], call[name[skel].vertices.shape][constant[0]]]]
name[skel].edges assign[=] name[edges]
return[name[skel]] | keyword[def] identifier[from_path] ( identifier[kls] , identifier[vertices] ):
literal[string]
keyword[if] identifier[vertices] . identifier[shape] [ literal[int] ]== literal[int] :
keyword[return] identifier[PrecomputedSkeleton] ()
identifier[skel] = identifier[PrecomputedSkeleton] ( identifier[vertices] )
identifier[edges] = identifier[np] . identifier[zeros] ( identifier[shape] =( identifier[skel] . identifier[vertices] . identifier[shape] [ literal[int] ]- literal[int] , literal[int] ), identifier[dtype] = identifier[np] . identifier[uint32] )
identifier[edges] [:, literal[int] ]= identifier[np] . identifier[arange] ( identifier[skel] . identifier[vertices] . identifier[shape] [ literal[int] ]- literal[int] )
identifier[edges] [:, literal[int] ]= identifier[np] . identifier[arange] ( literal[int] , identifier[skel] . identifier[vertices] . identifier[shape] [ literal[int] ])
identifier[skel] . identifier[edges] = identifier[edges]
keyword[return] identifier[skel] | def from_path(kls, vertices):
"""
Given an Nx3 array of vertices that constitute a single path,
generate a skeleton with appropriate edges.
"""
if vertices.shape[0] == 0:
return PrecomputedSkeleton() # depends on [control=['if'], data=[]]
skel = PrecomputedSkeleton(vertices)
edges = np.zeros(shape=(skel.vertices.shape[0] - 1, 2), dtype=np.uint32)
edges[:, 0] = np.arange(skel.vertices.shape[0] - 1)
edges[:, 1] = np.arange(1, skel.vertices.shape[0])
skel.edges = edges
return skel |
def map_tqdm(self):
"""
Perform a function on every item while displaying a progress bar.
:return: A list of yielded values
"""
with Pool(self.cpu_count) as pool:
vals = [v for v in tqdm(pool.imap_unordered(self._func, self._iterable), total=len(self._iterable))]
pool.close()
return vals | def function[map_tqdm, parameter[self]]:
constant[
Perform a function on every item while displaying a progress bar.
:return: A list of yielded values
]
with call[name[Pool], parameter[name[self].cpu_count]] begin[:]
variable[vals] assign[=] <ast.ListComp object at 0x7da1b14aadd0>
call[name[pool].close, parameter[]]
return[name[vals]] | keyword[def] identifier[map_tqdm] ( identifier[self] ):
literal[string]
keyword[with] identifier[Pool] ( identifier[self] . identifier[cpu_count] ) keyword[as] identifier[pool] :
identifier[vals] =[ identifier[v] keyword[for] identifier[v] keyword[in] identifier[tqdm] ( identifier[pool] . identifier[imap_unordered] ( identifier[self] . identifier[_func] , identifier[self] . identifier[_iterable] ), identifier[total] = identifier[len] ( identifier[self] . identifier[_iterable] ))]
identifier[pool] . identifier[close] ()
keyword[return] identifier[vals] | def map_tqdm(self):
"""
Perform a function on every item while displaying a progress bar.
:return: A list of yielded values
"""
with Pool(self.cpu_count) as pool:
vals = [v for v in tqdm(pool.imap_unordered(self._func, self._iterable), total=len(self._iterable))]
pool.close()
return vals # depends on [control=['with'], data=['pool']] |
def report_usage_to_host(host_ip, vmid):
#base value
cpu_usage = 0.0
os_mem_usage = 0.0
task_mem_usage = 0.0
io_usage = 0.0
cpu_usage = get_cpu_usage()
os_mem_usage = get_os_mem_usage()
task_mem_usage = get_task_mem_usage()
io_usage = get_io_usage()
usage = str(vmid.strip())+' | '+str(cpu_usage)+' | '+str(os_mem_usage)+' | '+str(task_mem_usage)+' | '+str(io_usage)
#usage = "'cpu |sdbfsj |sdfsdhf |sdfvsdvfgdfvj'"
#cmd = 'python /var/lib/virtdc/vmonere/host/vmonere_listener.py '+usage
'''cmd = '/bin/ssh -n -q -o StrictHostKeyChecking=no root@host_ip \"/bin/nohup /bin/python /var/lib/virtdc/vmonere/host/vmonere_listener.py '+usage+' &\"'
cmd = cmd.replace("host_ip",str(host_ip).strip())'''
#report usage via socket
start_client_socket(host_ip, usage) | def function[report_usage_to_host, parameter[host_ip, vmid]]:
variable[cpu_usage] assign[=] constant[0.0]
variable[os_mem_usage] assign[=] constant[0.0]
variable[task_mem_usage] assign[=] constant[0.0]
variable[io_usage] assign[=] constant[0.0]
variable[cpu_usage] assign[=] call[name[get_cpu_usage], parameter[]]
variable[os_mem_usage] assign[=] call[name[get_os_mem_usage], parameter[]]
variable[task_mem_usage] assign[=] call[name[get_task_mem_usage], parameter[]]
variable[io_usage] assign[=] call[name[get_io_usage], parameter[]]
variable[usage] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[str], parameter[call[name[vmid].strip, parameter[]]]] + constant[ | ]] + call[name[str], parameter[name[cpu_usage]]]] + constant[ | ]] + call[name[str], parameter[name[os_mem_usage]]]] + constant[ | ]] + call[name[str], parameter[name[task_mem_usage]]]] + constant[ | ]] + call[name[str], parameter[name[io_usage]]]]
constant[cmd = '/bin/ssh -n -q -o StrictHostKeyChecking=no root@host_ip "/bin/nohup /bin/python /var/lib/virtdc/vmonere/host/vmonere_listener.py '+usage+' &"'
cmd = cmd.replace("host_ip",str(host_ip).strip())]
call[name[start_client_socket], parameter[name[host_ip], name[usage]]] | keyword[def] identifier[report_usage_to_host] ( identifier[host_ip] , identifier[vmid] ):
identifier[cpu_usage] = literal[int]
identifier[os_mem_usage] = literal[int]
identifier[task_mem_usage] = literal[int]
identifier[io_usage] = literal[int]
identifier[cpu_usage] = identifier[get_cpu_usage] ()
identifier[os_mem_usage] = identifier[get_os_mem_usage] ()
identifier[task_mem_usage] = identifier[get_task_mem_usage] ()
identifier[io_usage] = identifier[get_io_usage] ()
identifier[usage] = identifier[str] ( identifier[vmid] . identifier[strip] ())+ literal[string] + identifier[str] ( identifier[cpu_usage] )+ literal[string] + identifier[str] ( identifier[os_mem_usage] )+ literal[string] + identifier[str] ( identifier[task_mem_usage] )+ literal[string] + identifier[str] ( identifier[io_usage] )
literal[string]
identifier[start_client_socket] ( identifier[host_ip] , identifier[usage] ) | def report_usage_to_host(host_ip, vmid): #base value
cpu_usage = 0.0
os_mem_usage = 0.0
task_mem_usage = 0.0
io_usage = 0.0
cpu_usage = get_cpu_usage()
os_mem_usage = get_os_mem_usage()
task_mem_usage = get_task_mem_usage()
io_usage = get_io_usage()
usage = str(vmid.strip()) + ' | ' + str(cpu_usage) + ' | ' + str(os_mem_usage) + ' | ' + str(task_mem_usage) + ' | ' + str(io_usage) #usage = "'cpu |sdbfsj |sdfsdhf |sdfvsdvfgdfvj'"
#cmd = 'python /var/lib/virtdc/vmonere/host/vmonere_listener.py '+usage
'cmd = \'/bin/ssh -n -q -o StrictHostKeyChecking=no root@host_ip "/bin/nohup /bin/python /var/lib/virtdc/vmonere/host/vmonere_listener.py \'+usage+\' &"\'\n\tcmd = cmd.replace("host_ip",str(host_ip).strip())' #report usage via socket
start_client_socket(host_ip, usage) |
def _get_expiration(self, headers: dict) -> int:
"""Gets the expiration time of the data from the response headers.
Args:
headers: dictionary of headers from ESI
Returns:
value of seconds from now the data expires
"""
expiration_str = headers.get('expires')
if not expiration_str:
return 0
expiration = datetime.strptime(expiration_str, '%a, %d %b %Y %H:%M:%S %Z')
delta = (expiration - datetime.utcnow()).total_seconds()
return math.ceil(abs(delta)) | def function[_get_expiration, parameter[self, headers]]:
constant[Gets the expiration time of the data from the response headers.
Args:
headers: dictionary of headers from ESI
Returns:
value of seconds from now the data expires
]
variable[expiration_str] assign[=] call[name[headers].get, parameter[constant[expires]]]
if <ast.UnaryOp object at 0x7da18c4ce680> begin[:]
return[constant[0]]
variable[expiration] assign[=] call[name[datetime].strptime, parameter[name[expiration_str], constant[%a, %d %b %Y %H:%M:%S %Z]]]
variable[delta] assign[=] call[binary_operation[name[expiration] - call[name[datetime].utcnow, parameter[]]].total_seconds, parameter[]]
return[call[name[math].ceil, parameter[call[name[abs], parameter[name[delta]]]]]] | keyword[def] identifier[_get_expiration] ( identifier[self] , identifier[headers] : identifier[dict] )-> identifier[int] :
literal[string]
identifier[expiration_str] = identifier[headers] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[expiration_str] :
keyword[return] literal[int]
identifier[expiration] = identifier[datetime] . identifier[strptime] ( identifier[expiration_str] , literal[string] )
identifier[delta] =( identifier[expiration] - identifier[datetime] . identifier[utcnow] ()). identifier[total_seconds] ()
keyword[return] identifier[math] . identifier[ceil] ( identifier[abs] ( identifier[delta] )) | def _get_expiration(self, headers: dict) -> int:
"""Gets the expiration time of the data from the response headers.
Args:
headers: dictionary of headers from ESI
Returns:
value of seconds from now the data expires
"""
expiration_str = headers.get('expires')
if not expiration_str:
return 0 # depends on [control=['if'], data=[]]
expiration = datetime.strptime(expiration_str, '%a, %d %b %Y %H:%M:%S %Z')
delta = (expiration - datetime.utcnow()).total_seconds()
return math.ceil(abs(delta)) |
def _get_hosts_from_names(self, names):
""" validate hostnames from a list of names
"""
result = set()
hosts = map(lambda x: x.strip(), names.split(','))
for h in hosts:
if valid_hostname(h.split(':')[0]):
result.add(h if ':' in h else '%s:%d' % (h, self.PORT))
else:
raise conferr('Invalid hostname: %s' % h.split(':')[0])
return list(result) | def function[_get_hosts_from_names, parameter[self, names]]:
constant[ validate hostnames from a list of names
]
variable[result] assign[=] call[name[set], parameter[]]
variable[hosts] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18f09d4b0>, call[name[names].split, parameter[constant[,]]]]]
for taget[name[h]] in starred[name[hosts]] begin[:]
if call[name[valid_hostname], parameter[call[call[name[h].split, parameter[constant[:]]]][constant[0]]]] begin[:]
call[name[result].add, parameter[<ast.IfExp object at 0x7da18f09d990>]]
return[call[name[list], parameter[name[result]]]] | keyword[def] identifier[_get_hosts_from_names] ( identifier[self] , identifier[names] ):
literal[string]
identifier[result] = identifier[set] ()
identifier[hosts] = identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[strip] (), identifier[names] . identifier[split] ( literal[string] ))
keyword[for] identifier[h] keyword[in] identifier[hosts] :
keyword[if] identifier[valid_hostname] ( identifier[h] . identifier[split] ( literal[string] )[ literal[int] ]):
identifier[result] . identifier[add] ( identifier[h] keyword[if] literal[string] keyword[in] identifier[h] keyword[else] literal[string] %( identifier[h] , identifier[self] . identifier[PORT] ))
keyword[else] :
keyword[raise] identifier[conferr] ( literal[string] % identifier[h] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[return] identifier[list] ( identifier[result] ) | def _get_hosts_from_names(self, names):
""" validate hostnames from a list of names
"""
result = set()
hosts = map(lambda x: x.strip(), names.split(','))
for h in hosts:
if valid_hostname(h.split(':')[0]):
result.add(h if ':' in h else '%s:%d' % (h, self.PORT)) # depends on [control=['if'], data=[]]
else:
raise conferr('Invalid hostname: %s' % h.split(':')[0]) # depends on [control=['for'], data=['h']]
return list(result) |
def save_partial(self, data=None, allow_protected_fields=False, **kwargs):
""" Saves just the currently set fields in the database. """
# Backwards compat, deprecated argument
if "dotnotation" in kwargs:
del kwargs["dotnotation"]
if data is None:
data = dotdict(self)
if "_id" not in data:
raise KeyError("_id must be set in order to do a save_partial()")
del data["_id"]
if len(data) == 0:
return
if not allow_protected_fields:
self.mongokat_collection._check_protected_fields(data)
apply_on = dotdict(self)
self._initialized_with_doc = False
self.mongokat_collection.update_one({"_id": self["_id"]}, {"$set": data}, **kwargs)
for k, v in data.items():
apply_on[k] = v
self.update(dict(apply_on)) | def function[save_partial, parameter[self, data, allow_protected_fields]]:
constant[ Saves just the currently set fields in the database. ]
if compare[constant[dotnotation] in name[kwargs]] begin[:]
<ast.Delete object at 0x7da1b26a34f0>
if compare[name[data] is constant[None]] begin[:]
variable[data] assign[=] call[name[dotdict], parameter[name[self]]]
if compare[constant[_id] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
<ast.Raise object at 0x7da1b26a36d0>
<ast.Delete object at 0x7da1b26a1960>
if compare[call[name[len], parameter[name[data]]] equal[==] constant[0]] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b26d6e00> begin[:]
call[name[self].mongokat_collection._check_protected_fields, parameter[name[data]]]
variable[apply_on] assign[=] call[name[dotdict], parameter[name[self]]]
name[self]._initialized_with_doc assign[=] constant[False]
call[name[self].mongokat_collection.update_one, parameter[dictionary[[<ast.Constant object at 0x7da1b26d64d0>], [<ast.Subscript object at 0x7da1b26d5e70>]], dictionary[[<ast.Constant object at 0x7da1b26d59f0>], [<ast.Name object at 0x7da1b26d4160>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b26d6440>, <ast.Name object at 0x7da1b26d5270>]]] in starred[call[name[data].items, parameter[]]] begin[:]
call[name[apply_on]][name[k]] assign[=] name[v]
call[name[self].update, parameter[call[name[dict], parameter[name[apply_on]]]]] | keyword[def] identifier[save_partial] ( identifier[self] , identifier[data] = keyword[None] , identifier[allow_protected_fields] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[del] identifier[kwargs] [ literal[string] ]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] = identifier[dotdict] ( identifier[self] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] :
keyword[raise] identifier[KeyError] ( literal[string] )
keyword[del] identifier[data] [ literal[string] ]
keyword[if] identifier[len] ( identifier[data] )== literal[int] :
keyword[return]
keyword[if] keyword[not] identifier[allow_protected_fields] :
identifier[self] . identifier[mongokat_collection] . identifier[_check_protected_fields] ( identifier[data] )
identifier[apply_on] = identifier[dotdict] ( identifier[self] )
identifier[self] . identifier[_initialized_with_doc] = keyword[False]
identifier[self] . identifier[mongokat_collection] . identifier[update_one] ({ literal[string] : identifier[self] [ literal[string] ]},{ literal[string] : identifier[data] },** identifier[kwargs] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[items] ():
identifier[apply_on] [ identifier[k] ]= identifier[v]
identifier[self] . identifier[update] ( identifier[dict] ( identifier[apply_on] )) | def save_partial(self, data=None, allow_protected_fields=False, **kwargs):
""" Saves just the currently set fields in the database. """
# Backwards compat, deprecated argument
if 'dotnotation' in kwargs:
del kwargs['dotnotation'] # depends on [control=['if'], data=['kwargs']]
if data is None:
data = dotdict(self)
if '_id' not in data:
raise KeyError('_id must be set in order to do a save_partial()') # depends on [control=['if'], data=[]]
del data['_id'] # depends on [control=['if'], data=['data']]
if len(data) == 0:
return # depends on [control=['if'], data=[]]
if not allow_protected_fields:
self.mongokat_collection._check_protected_fields(data) # depends on [control=['if'], data=[]]
apply_on = dotdict(self)
self._initialized_with_doc = False
self.mongokat_collection.update_one({'_id': self['_id']}, {'$set': data}, **kwargs)
for (k, v) in data.items():
apply_on[k] = v # depends on [control=['for'], data=[]]
self.update(dict(apply_on)) |
def run(self, test: unittest.TestSuite):
"""
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
"""
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
test_suites, local_test_suites = self.collect_tests(test)
results_collector = ResultCollector(
self.stream, self.descriptions, self.verbosity,
result_queue=results_queue, test_results=self._makeResult(),
tests=test_suites
)
results_collector.start()
for index, suite in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x)
local_test_suites.run(results_collector)
for i in process:
i.join()
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector | def function[run, parameter[self, test]]:
constant[
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
]
variable[start_time] assign[=] call[name[time].time, parameter[]]
variable[process] assign[=] list[[]]
variable[resource_manager] assign[=] call[name[multiprocessing].Manager, parameter[]]
variable[results_queue] assign[=] call[name[resource_manager].Queue, parameter[]]
variable[tasks_running] assign[=] call[name[resource_manager].BoundedSemaphore, parameter[name[self].process_number]]
<ast.Tuple object at 0x7da2044c35b0> assign[=] call[name[self].collect_tests, parameter[name[test]]]
variable[results_collector] assign[=] call[name[ResultCollector], parameter[name[self].stream, name[self].descriptions, name[self].verbosity]]
call[name[results_collector].start, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2045640a0>, <ast.Name object at 0x7da204565a80>]]] in starred[call[name[enumerate], parameter[name[test_suites]]]] begin[:]
call[name[tasks_running].acquire, parameter[]]
variable[x] assign[=] call[name[self].Process, parameter[name[index], name[suite], name[results_queue], name[tasks_running]]]
call[name[x].start, parameter[]]
call[name[process].append, parameter[name[x]]]
call[name[local_test_suites].run, parameter[name[results_collector]]]
for taget[name[i]] in starred[name[process]] begin[:]
call[name[i].join, parameter[]]
call[name[results_queue].join, parameter[]]
call[name[results_collector].end_collection, parameter[]]
call[name[results_collector].join, parameter[]]
call[name[results_collector].printErrors, parameter[]]
call[name[self].print_summary, parameter[name[results_collector], binary_operation[call[name[time].time, parameter[]] - name[start_time]]]]
return[name[results_collector]] | keyword[def] identifier[run] ( identifier[self] , identifier[test] : identifier[unittest] . identifier[TestSuite] ):
literal[string]
identifier[start_time] = identifier[time] . identifier[time] ()
identifier[process] =[]
identifier[resource_manager] = identifier[multiprocessing] . identifier[Manager] ()
identifier[results_queue] = identifier[resource_manager] . identifier[Queue] ()
identifier[tasks_running] = identifier[resource_manager] . identifier[BoundedSemaphore] ( identifier[self] . identifier[process_number] )
identifier[test_suites] , identifier[local_test_suites] = identifier[self] . identifier[collect_tests] ( identifier[test] )
identifier[results_collector] = identifier[ResultCollector] (
identifier[self] . identifier[stream] , identifier[self] . identifier[descriptions] , identifier[self] . identifier[verbosity] ,
identifier[result_queue] = identifier[results_queue] , identifier[test_results] = identifier[self] . identifier[_makeResult] (),
identifier[tests] = identifier[test_suites]
)
identifier[results_collector] . identifier[start] ()
keyword[for] identifier[index] , identifier[suite] keyword[in] identifier[enumerate] ( identifier[test_suites] ):
identifier[tasks_running] . identifier[acquire] ()
identifier[x] = identifier[self] . identifier[Process] ( identifier[index] , identifier[suite] , identifier[results_queue] , identifier[tasks_running] )
identifier[x] . identifier[start] ()
identifier[process] . identifier[append] ( identifier[x] )
identifier[local_test_suites] . identifier[run] ( identifier[results_collector] )
keyword[for] identifier[i] keyword[in] identifier[process] :
identifier[i] . identifier[join] ()
identifier[results_queue] . identifier[join] ()
identifier[results_collector] . identifier[end_collection] ()
identifier[results_collector] . identifier[join] ()
identifier[results_collector] . identifier[printErrors] ()
identifier[self] . identifier[print_summary] ( identifier[results_collector] , identifier[time] . identifier[time] ()- identifier[start_time] )
keyword[return] identifier[results_collector] | def run(self, test: unittest.TestSuite):
"""
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
"""
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
(test_suites, local_test_suites) = self.collect_tests(test)
results_collector = ResultCollector(self.stream, self.descriptions, self.verbosity, result_queue=results_queue, test_results=self._makeResult(), tests=test_suites)
results_collector.start()
for (index, suite) in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x) # depends on [control=['for'], data=[]]
local_test_suites.run(results_collector)
for i in process:
i.join() # depends on [control=['for'], data=['i']]
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector |
def footer(self):
'''
Returns the axis instance where the footer will be printed
'''
return self.footer_left(on=False), self.footer_center(on=False), \
self.footer_right(on=False) | def function[footer, parameter[self]]:
constant[
Returns the axis instance where the footer will be printed
]
return[tuple[[<ast.Call object at 0x7da1b0eae4d0>, <ast.Call object at 0x7da1b0eae260>, <ast.Call object at 0x7da1b0eae080>]]] | keyword[def] identifier[footer] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[footer_left] ( identifier[on] = keyword[False] ), identifier[self] . identifier[footer_center] ( identifier[on] = keyword[False] ), identifier[self] . identifier[footer_right] ( identifier[on] = keyword[False] ) | def footer(self):
"""
Returns the axis instance where the footer will be printed
"""
return (self.footer_left(on=False), self.footer_center(on=False), self.footer_right(on=False)) |
async def _send_chunk(self, chunk):
"""
Transmit a chunk (no bundling for now).
"""
self.__log_debug('> %s', chunk)
await self.__transport._send_data(serialize_packet(
self._local_port,
self._remote_port,
self._remote_verification_tag,
chunk)) | <ast.AsyncFunctionDef object at 0x7da18dc07fd0> | keyword[async] keyword[def] identifier[_send_chunk] ( identifier[self] , identifier[chunk] ):
literal[string]
identifier[self] . identifier[__log_debug] ( literal[string] , identifier[chunk] )
keyword[await] identifier[self] . identifier[__transport] . identifier[_send_data] ( identifier[serialize_packet] (
identifier[self] . identifier[_local_port] ,
identifier[self] . identifier[_remote_port] ,
identifier[self] . identifier[_remote_verification_tag] ,
identifier[chunk] )) | async def _send_chunk(self, chunk):
"""
Transmit a chunk (no bundling for now).
"""
self.__log_debug('> %s', chunk)
await self.__transport._send_data(serialize_packet(self._local_port, self._remote_port, self._remote_verification_tag, chunk)) |
def main(*argv,
filesystem=None,
do_exit=True,
stdout=None,
stderr=None):
"""Main method for the cli.
We allow the filesystem to be overridden for test purposes."""
try:
mdcli = MdCLI()
mdcli.filesystem = filesystem
mdcli.stdout = stdout or sys.stdout
mdcli.stderr = stderr or sys.stderr
retval = mdcli.main(*argv, loop=LOOP_NEVER)
if do_exit:
sys.exit(retval)
else:
return retval
except KeyboardInterrupt:
pass | def function[main, parameter[]]:
constant[Main method for the cli.
We allow the filesystem to be overridden for test purposes.]
<ast.Try object at 0x7da1b2898100> | keyword[def] identifier[main] (* identifier[argv] ,
identifier[filesystem] = keyword[None] ,
identifier[do_exit] = keyword[True] ,
identifier[stdout] = keyword[None] ,
identifier[stderr] = keyword[None] ):
literal[string]
keyword[try] :
identifier[mdcli] = identifier[MdCLI] ()
identifier[mdcli] . identifier[filesystem] = identifier[filesystem]
identifier[mdcli] . identifier[stdout] = identifier[stdout] keyword[or] identifier[sys] . identifier[stdout]
identifier[mdcli] . identifier[stderr] = identifier[stderr] keyword[or] identifier[sys] . identifier[stderr]
identifier[retval] = identifier[mdcli] . identifier[main] (* identifier[argv] , identifier[loop] = identifier[LOOP_NEVER] )
keyword[if] identifier[do_exit] :
identifier[sys] . identifier[exit] ( identifier[retval] )
keyword[else] :
keyword[return] identifier[retval]
keyword[except] identifier[KeyboardInterrupt] :
keyword[pass] | def main(*argv, filesystem=None, do_exit=True, stdout=None, stderr=None):
"""Main method for the cli.
We allow the filesystem to be overridden for test purposes."""
try:
mdcli = MdCLI()
mdcli.filesystem = filesystem
mdcli.stdout = stdout or sys.stdout
mdcli.stderr = stderr or sys.stderr
retval = mdcli.main(*argv, loop=LOOP_NEVER)
if do_exit:
sys.exit(retval) # depends on [control=['if'], data=[]]
else:
return retval # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
pass # depends on [control=['except'], data=[]] |
def check(text):
"""Check the text."""
err = "misc.illogic"
msg = u"'{}' is illogical."
illogics = [
"preplan",
"more than .{1,10} all",
"appraisal valuations?",
"(?:i|you|he|she|it|y'all|all y'all|you all|they) could care less",
"least worst",
"much-needed gaps?",
"much-needed voids?",
"no longer requires oxygen",
"without scarcely",
]
return existence_check(text, illogics, err, msg, offset=1) | def function[check, parameter[text]]:
constant[Check the text.]
variable[err] assign[=] constant[misc.illogic]
variable[msg] assign[=] constant['{}' is illogical.]
variable[illogics] assign[=] list[[<ast.Constant object at 0x7da1b063abc0>, <ast.Constant object at 0x7da1b063b250>, <ast.Constant object at 0x7da1b063a440>, <ast.Constant object at 0x7da1b063b220>, <ast.Constant object at 0x7da1b063b190>, <ast.Constant object at 0x7da1b063b4c0>, <ast.Constant object at 0x7da1b063b4f0>, <ast.Constant object at 0x7da1b063b490>, <ast.Constant object at 0x7da1b063b430>]]
return[call[name[existence_check], parameter[name[text], name[illogics], name[err], name[msg]]]] | keyword[def] identifier[check] ( identifier[text] ):
literal[string]
identifier[err] = literal[string]
identifier[msg] = literal[string]
identifier[illogics] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
keyword[return] identifier[existence_check] ( identifier[text] , identifier[illogics] , identifier[err] , identifier[msg] , identifier[offset] = literal[int] ) | def check(text):
"""Check the text."""
err = 'misc.illogic'
msg = u"'{}' is illogical."
illogics = ['preplan', 'more than .{1,10} all', 'appraisal valuations?', "(?:i|you|he|she|it|y'all|all y'all|you all|they) could care less", 'least worst', 'much-needed gaps?', 'much-needed voids?', 'no longer requires oxygen', 'without scarcely']
return existence_check(text, illogics, err, msg, offset=1) |
def dcm(self):
"""
Get the DCM
:returns: 3x3 array
"""
if self._dcm is None:
if self._q is not None:
# try to get dcm from q
self._dcm = self._q_to_dcm(self.q)
elif self._euler is not None:
# try to get get dcm from euler
self._dcm = self._euler_to_dcm(self._euler)
return self._dcm | def function[dcm, parameter[self]]:
constant[
Get the DCM
:returns: 3x3 array
]
if compare[name[self]._dcm is constant[None]] begin[:]
if compare[name[self]._q is_not constant[None]] begin[:]
name[self]._dcm assign[=] call[name[self]._q_to_dcm, parameter[name[self].q]]
return[name[self]._dcm] | keyword[def] identifier[dcm] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_dcm] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[_q] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_dcm] = identifier[self] . identifier[_q_to_dcm] ( identifier[self] . identifier[q] )
keyword[elif] identifier[self] . identifier[_euler] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_dcm] = identifier[self] . identifier[_euler_to_dcm] ( identifier[self] . identifier[_euler] )
keyword[return] identifier[self] . identifier[_dcm] | def dcm(self):
"""
Get the DCM
:returns: 3x3 array
"""
if self._dcm is None:
if self._q is not None:
# try to get dcm from q
self._dcm = self._q_to_dcm(self.q) # depends on [control=['if'], data=[]]
elif self._euler is not None:
# try to get get dcm from euler
self._dcm = self._euler_to_dcm(self._euler) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self._dcm |
def setup_Jupyter():
"""
Set up a Jupyter notebook with a few defaults.
"""
sns.set(context = "paper", font = "monospace")
warnings.filterwarnings("ignore")
pd.set_option("display.max_rows", 500)
pd.set_option("display.max_columns", 500)
plt.rcParams["figure.figsize"] = (17, 10) | def function[setup_Jupyter, parameter[]]:
constant[
Set up a Jupyter notebook with a few defaults.
]
call[name[sns].set, parameter[]]
call[name[warnings].filterwarnings, parameter[constant[ignore]]]
call[name[pd].set_option, parameter[constant[display.max_rows], constant[500]]]
call[name[pd].set_option, parameter[constant[display.max_columns], constant[500]]]
call[name[plt].rcParams][constant[figure.figsize]] assign[=] tuple[[<ast.Constant object at 0x7da20c7caad0>, <ast.Constant object at 0x7da20c7cb4c0>]] | keyword[def] identifier[setup_Jupyter] ():
literal[string]
identifier[sns] . identifier[set] ( identifier[context] = literal[string] , identifier[font] = literal[string] )
identifier[warnings] . identifier[filterwarnings] ( literal[string] )
identifier[pd] . identifier[set_option] ( literal[string] , literal[int] )
identifier[pd] . identifier[set_option] ( literal[string] , literal[int] )
identifier[plt] . identifier[rcParams] [ literal[string] ]=( literal[int] , literal[int] ) | def setup_Jupyter():
"""
Set up a Jupyter notebook with a few defaults.
"""
sns.set(context='paper', font='monospace')
warnings.filterwarnings('ignore')
pd.set_option('display.max_rows', 500)
pd.set_option('display.max_columns', 500)
plt.rcParams['figure.figsize'] = (17, 10) |
def report(self, output_file=sys.stdout):
"""Report analysis outcome in human readable form."""
max_perf = self.results['max_perf']
if self._args and self._args.verbose >= 3:
print('{}'.format(pformat(self.results)), file=output_file)
if self._args and self._args.verbose >= 1:
print('{}'.format(pformat(self.results['verbose infos'])), file=output_file)
print('Bottlenecks:', file=output_file)
print(' level | a. intensity | performance | peak bandwidth | peak bandwidth kernel',
file=output_file)
print('--------+--------------+-----------------+-------------------+----------------------',
file=output_file)
print(' CPU | | {!s:>15} | |'.format(
max_perf[self._args.unit]),
file=output_file)
for b in self.results['mem bottlenecks']:
print('{level:>7} | {arithmetic intensity:>5.2} FLOP/B | {0!s:>15} |'
' {bandwidth!s:>17} | {bw kernel:<8}'.format(
b['performance'][self._args.unit], **b),
file=output_file)
print('', file=output_file)
if self.results['min performance']['FLOP/s'] > max_perf['FLOP/s']:
# CPU bound
print('CPU bound. {!s} due to CPU max. FLOP/s'.format(max_perf), file=output_file)
else:
# Cache or mem bound
print('Cache or mem bound.', file=output_file)
bottleneck = self.results['mem bottlenecks'][self.results['bottleneck level']]
print('{!s} due to {} transfer bottleneck (with bw from {} benchmark)'.format(
bottleneck['performance'][self._args.unit],
bottleneck['level'],
bottleneck['bw kernel']),
file=output_file)
print('Arithmetic Intensity: {:.2f} FLOP/B'.format(bottleneck['arithmetic intensity']),
file=output_file) | def function[report, parameter[self, output_file]]:
constant[Report analysis outcome in human readable form.]
variable[max_perf] assign[=] call[name[self].results][constant[max_perf]]
if <ast.BoolOp object at 0x7da2041db4f0> begin[:]
call[name[print], parameter[call[constant[{}].format, parameter[call[name[pformat], parameter[name[self].results]]]]]]
if <ast.BoolOp object at 0x7da2041d9540> begin[:]
call[name[print], parameter[call[constant[{}].format, parameter[call[name[pformat], parameter[call[name[self].results][constant[verbose infos]]]]]]]]
call[name[print], parameter[constant[Bottlenecks:]]]
call[name[print], parameter[constant[ level | a. intensity | performance | peak bandwidth | peak bandwidth kernel]]]
call[name[print], parameter[constant[--------+--------------+-----------------+-------------------+----------------------]]]
call[name[print], parameter[call[constant[ CPU | | {!s:>15} | |].format, parameter[call[name[max_perf]][name[self]._args.unit]]]]]
for taget[name[b]] in starred[call[name[self].results][constant[mem bottlenecks]]] begin[:]
call[name[print], parameter[call[constant[{level:>7} | {arithmetic intensity:>5.2} FLOP/B | {0!s:>15} | {bandwidth!s:>17} | {bw kernel:<8}].format, parameter[call[call[name[b]][constant[performance]]][name[self]._args.unit]]]]]
call[name[print], parameter[constant[]]]
if compare[call[call[name[self].results][constant[min performance]]][constant[FLOP/s]] greater[>] call[name[max_perf]][constant[FLOP/s]]] begin[:]
call[name[print], parameter[call[constant[CPU bound. {!s} due to CPU max. FLOP/s].format, parameter[name[max_perf]]]]] | keyword[def] identifier[report] ( identifier[self] , identifier[output_file] = identifier[sys] . identifier[stdout] ):
literal[string]
identifier[max_perf] = identifier[self] . identifier[results] [ literal[string] ]
keyword[if] identifier[self] . identifier[_args] keyword[and] identifier[self] . identifier[_args] . identifier[verbose] >= literal[int] :
identifier[print] ( literal[string] . identifier[format] ( identifier[pformat] ( identifier[self] . identifier[results] )), identifier[file] = identifier[output_file] )
keyword[if] identifier[self] . identifier[_args] keyword[and] identifier[self] . identifier[_args] . identifier[verbose] >= literal[int] :
identifier[print] ( literal[string] . identifier[format] ( identifier[pformat] ( identifier[self] . identifier[results] [ literal[string] ])), identifier[file] = identifier[output_file] )
identifier[print] ( literal[string] , identifier[file] = identifier[output_file] )
identifier[print] ( literal[string] ,
identifier[file] = identifier[output_file] )
identifier[print] ( literal[string] ,
identifier[file] = identifier[output_file] )
identifier[print] ( literal[string] . identifier[format] (
identifier[max_perf] [ identifier[self] . identifier[_args] . identifier[unit] ]),
identifier[file] = identifier[output_file] )
keyword[for] identifier[b] keyword[in] identifier[self] . identifier[results] [ literal[string] ]:
identifier[print] ( literal[string]
literal[string] . identifier[format] (
identifier[b] [ literal[string] ][ identifier[self] . identifier[_args] . identifier[unit] ],** identifier[b] ),
identifier[file] = identifier[output_file] )
identifier[print] ( literal[string] , identifier[file] = identifier[output_file] )
keyword[if] identifier[self] . identifier[results] [ literal[string] ][ literal[string] ]> identifier[max_perf] [ literal[string] ]:
identifier[print] ( literal[string] . identifier[format] ( identifier[max_perf] ), identifier[file] = identifier[output_file] )
keyword[else] :
identifier[print] ( literal[string] , identifier[file] = identifier[output_file] )
identifier[bottleneck] = identifier[self] . identifier[results] [ literal[string] ][ identifier[self] . identifier[results] [ literal[string] ]]
identifier[print] ( literal[string] . identifier[format] (
identifier[bottleneck] [ literal[string] ][ identifier[self] . identifier[_args] . identifier[unit] ],
identifier[bottleneck] [ literal[string] ],
identifier[bottleneck] [ literal[string] ]),
identifier[file] = identifier[output_file] )
identifier[print] ( literal[string] . identifier[format] ( identifier[bottleneck] [ literal[string] ]),
identifier[file] = identifier[output_file] ) | def report(self, output_file=sys.stdout):
"""Report analysis outcome in human readable form."""
max_perf = self.results['max_perf']
if self._args and self._args.verbose >= 3:
print('{}'.format(pformat(self.results)), file=output_file) # depends on [control=['if'], data=[]]
if self._args and self._args.verbose >= 1:
print('{}'.format(pformat(self.results['verbose infos'])), file=output_file)
print('Bottlenecks:', file=output_file)
print(' level | a. intensity | performance | peak bandwidth | peak bandwidth kernel', file=output_file)
print('--------+--------------+-----------------+-------------------+----------------------', file=output_file)
print(' CPU | | {!s:>15} | |'.format(max_perf[self._args.unit]), file=output_file)
for b in self.results['mem bottlenecks']:
print('{level:>7} | {arithmetic intensity:>5.2} FLOP/B | {0!s:>15} | {bandwidth!s:>17} | {bw kernel:<8}'.format(b['performance'][self._args.unit], **b), file=output_file) # depends on [control=['for'], data=['b']]
print('', file=output_file) # depends on [control=['if'], data=[]]
if self.results['min performance']['FLOP/s'] > max_perf['FLOP/s']:
# CPU bound
print('CPU bound. {!s} due to CPU max. FLOP/s'.format(max_perf), file=output_file) # depends on [control=['if'], data=[]]
else:
# Cache or mem bound
print('Cache or mem bound.', file=output_file)
bottleneck = self.results['mem bottlenecks'][self.results['bottleneck level']]
print('{!s} due to {} transfer bottleneck (with bw from {} benchmark)'.format(bottleneck['performance'][self._args.unit], bottleneck['level'], bottleneck['bw kernel']), file=output_file)
print('Arithmetic Intensity: {:.2f} FLOP/B'.format(bottleneck['arithmetic intensity']), file=output_file) |
def run_step(context):
"""pypyr step saves current utc datetime to context.
Args:
context: pypyr.context.Context. Mandatory.
The following context key is optional:
- nowUtcIn. str. Datetime formatting expression. For full list
of possible expressions, check here:
https://docs.python.org/3.7/library/datetime.html#strftime-and-strptime-behavior
All inputs support pypyr formatting expressions.
This step creates now in context, containing a string representation of the
timestamp. If input formatting not specified, defaults to ISO8601.
Default is:
YYYY-MM-DDTHH:MM:SS.ffffff+00:00, or, if microsecond is 0,
YYYY-MM-DDTHH:MM:SS
Returns:
None. updates context arg.
"""
logger.debug("started")
format_expression = context.get('nowUtcIn', None)
if format_expression:
formatted_expression = context.get_formatted_string(format_expression)
context['nowUtc'] = datetime.now(
timezone.utc).strftime(formatted_expression)
else:
context['nowUtc'] = datetime.now(timezone.utc).isoformat()
logger.info(f"timestamp {context['nowUtc']} saved to context nowUtc")
logger.debug("done") | def function[run_step, parameter[context]]:
constant[pypyr step saves current utc datetime to context.
Args:
context: pypyr.context.Context. Mandatory.
The following context key is optional:
- nowUtcIn. str. Datetime formatting expression. For full list
of possible expressions, check here:
https://docs.python.org/3.7/library/datetime.html#strftime-and-strptime-behavior
All inputs support pypyr formatting expressions.
This step creates now in context, containing a string representation of the
timestamp. If input formatting not specified, defaults to ISO8601.
Default is:
YYYY-MM-DDTHH:MM:SS.ffffff+00:00, or, if microsecond is 0,
YYYY-MM-DDTHH:MM:SS
Returns:
None. updates context arg.
]
call[name[logger].debug, parameter[constant[started]]]
variable[format_expression] assign[=] call[name[context].get, parameter[constant[nowUtcIn], constant[None]]]
if name[format_expression] begin[:]
variable[formatted_expression] assign[=] call[name[context].get_formatted_string, parameter[name[format_expression]]]
call[name[context]][constant[nowUtc]] assign[=] call[call[name[datetime].now, parameter[name[timezone].utc]].strftime, parameter[name[formatted_expression]]]
call[name[logger].info, parameter[<ast.JoinedStr object at 0x7da18eb57d60>]]
call[name[logger].debug, parameter[constant[done]]] | keyword[def] identifier[run_step] ( identifier[context] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[format_expression] = identifier[context] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[format_expression] :
identifier[formatted_expression] = identifier[context] . identifier[get_formatted_string] ( identifier[format_expression] )
identifier[context] [ literal[string] ]= identifier[datetime] . identifier[now] (
identifier[timezone] . identifier[utc] ). identifier[strftime] ( identifier[formatted_expression] )
keyword[else] :
identifier[context] [ literal[string] ]= identifier[datetime] . identifier[now] ( identifier[timezone] . identifier[utc] ). identifier[isoformat] ()
identifier[logger] . identifier[info] ( literal[string] )
identifier[logger] . identifier[debug] ( literal[string] ) | def run_step(context):
"""pypyr step saves current utc datetime to context.
Args:
context: pypyr.context.Context. Mandatory.
The following context key is optional:
- nowUtcIn. str. Datetime formatting expression. For full list
of possible expressions, check here:
https://docs.python.org/3.7/library/datetime.html#strftime-and-strptime-behavior
All inputs support pypyr formatting expressions.
This step creates now in context, containing a string representation of the
timestamp. If input formatting not specified, defaults to ISO8601.
Default is:
YYYY-MM-DDTHH:MM:SS.ffffff+00:00, or, if microsecond is 0,
YYYY-MM-DDTHH:MM:SS
Returns:
None. updates context arg.
"""
logger.debug('started')
format_expression = context.get('nowUtcIn', None)
if format_expression:
formatted_expression = context.get_formatted_string(format_expression)
context['nowUtc'] = datetime.now(timezone.utc).strftime(formatted_expression) # depends on [control=['if'], data=[]]
else:
context['nowUtc'] = datetime.now(timezone.utc).isoformat()
logger.info(f"timestamp {context['nowUtc']} saved to context nowUtc")
logger.debug('done') |
def delete(self, filepath):
"""
Delete the specified file.
"""
try:
self.fs.delete(filepath)
self.write({'msg':'File deleted at {}'.format(filepath)})
except OSError:
raise tornado.web.HTTPError(404) | def function[delete, parameter[self, filepath]]:
constant[
Delete the specified file.
]
<ast.Try object at 0x7da2047eb880> | keyword[def] identifier[delete] ( identifier[self] , identifier[filepath] ):
literal[string]
keyword[try] :
identifier[self] . identifier[fs] . identifier[delete] ( identifier[filepath] )
identifier[self] . identifier[write] ({ literal[string] : literal[string] . identifier[format] ( identifier[filepath] )})
keyword[except] identifier[OSError] :
keyword[raise] identifier[tornado] . identifier[web] . identifier[HTTPError] ( literal[int] ) | def delete(self, filepath):
"""
Delete the specified file.
"""
try:
self.fs.delete(filepath)
self.write({'msg': 'File deleted at {}'.format(filepath)}) # depends on [control=['try'], data=[]]
except OSError:
raise tornado.web.HTTPError(404) # depends on [control=['except'], data=[]] |
def build(self):
"""Build straight helix along z-axis, starting with CA1 on x-axis"""
ang_per_res = (2 * numpy.pi) / self.residues_per_turn
atom_offsets = _atom_offsets[self.helix_type]
if self.handedness == 'l':
handedness = -1
else:
handedness = 1
atom_labels = ['N', 'CA', 'C', 'O']
if all([x in atom_offsets.keys() for x in atom_labels]):
res_label = 'GLY'
else:
res_label = 'UNK'
monomers = []
for i in range(self.num_monomers):
residue = Residue(mol_code=res_label, ampal_parent=self)
atoms_dict = OrderedDict()
for atom_label in atom_labels:
r, zeta, z_shift = atom_offsets[atom_label]
rot_ang = ((i * ang_per_res) + zeta) * handedness
z = (self.rise_per_residue * i) + z_shift
coords = cylindrical_to_cartesian(
radius=r, azimuth=rot_ang, z=z, radians=True)
atom = Atom(
coordinates=coords, element=atom_label[0],
ampal_parent=residue, res_label=atom_label)
atoms_dict[atom_label] = atom
residue.atoms = atoms_dict
monomers.append(residue)
self._monomers = monomers
self.relabel_monomers()
self.relabel_atoms()
return | def function[build, parameter[self]]:
constant[Build straight helix along z-axis, starting with CA1 on x-axis]
variable[ang_per_res] assign[=] binary_operation[binary_operation[constant[2] * name[numpy].pi] / name[self].residues_per_turn]
variable[atom_offsets] assign[=] call[name[_atom_offsets]][name[self].helix_type]
if compare[name[self].handedness equal[==] constant[l]] begin[:]
variable[handedness] assign[=] <ast.UnaryOp object at 0x7da1b261cb20>
variable[atom_labels] assign[=] list[[<ast.Constant object at 0x7da1b261cf10>, <ast.Constant object at 0x7da1b261fa90>, <ast.Constant object at 0x7da1b261c400>, <ast.Constant object at 0x7da1b261d6f0>]]
if call[name[all], parameter[<ast.ListComp object at 0x7da1b261fb50>]] begin[:]
variable[res_label] assign[=] constant[GLY]
variable[monomers] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[self].num_monomers]]] begin[:]
variable[residue] assign[=] call[name[Residue], parameter[]]
variable[atoms_dict] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[atom_label]] in starred[name[atom_labels]] begin[:]
<ast.Tuple object at 0x7da1b261de70> assign[=] call[name[atom_offsets]][name[atom_label]]
variable[rot_ang] assign[=] binary_operation[binary_operation[binary_operation[name[i] * name[ang_per_res]] + name[zeta]] * name[handedness]]
variable[z] assign[=] binary_operation[binary_operation[name[self].rise_per_residue * name[i]] + name[z_shift]]
variable[coords] assign[=] call[name[cylindrical_to_cartesian], parameter[]]
variable[atom] assign[=] call[name[Atom], parameter[]]
call[name[atoms_dict]][name[atom_label]] assign[=] name[atom]
name[residue].atoms assign[=] name[atoms_dict]
call[name[monomers].append, parameter[name[residue]]]
name[self]._monomers assign[=] name[monomers]
call[name[self].relabel_monomers, parameter[]]
call[name[self].relabel_atoms, parameter[]]
return[None] | keyword[def] identifier[build] ( identifier[self] ):
literal[string]
identifier[ang_per_res] =( literal[int] * identifier[numpy] . identifier[pi] )/ identifier[self] . identifier[residues_per_turn]
identifier[atom_offsets] = identifier[_atom_offsets] [ identifier[self] . identifier[helix_type] ]
keyword[if] identifier[self] . identifier[handedness] == literal[string] :
identifier[handedness] =- literal[int]
keyword[else] :
identifier[handedness] = literal[int]
identifier[atom_labels] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[all] ([ identifier[x] keyword[in] identifier[atom_offsets] . identifier[keys] () keyword[for] identifier[x] keyword[in] identifier[atom_labels] ]):
identifier[res_label] = literal[string]
keyword[else] :
identifier[res_label] = literal[string]
identifier[monomers] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[num_monomers] ):
identifier[residue] = identifier[Residue] ( identifier[mol_code] = identifier[res_label] , identifier[ampal_parent] = identifier[self] )
identifier[atoms_dict] = identifier[OrderedDict] ()
keyword[for] identifier[atom_label] keyword[in] identifier[atom_labels] :
identifier[r] , identifier[zeta] , identifier[z_shift] = identifier[atom_offsets] [ identifier[atom_label] ]
identifier[rot_ang] =(( identifier[i] * identifier[ang_per_res] )+ identifier[zeta] )* identifier[handedness]
identifier[z] =( identifier[self] . identifier[rise_per_residue] * identifier[i] )+ identifier[z_shift]
identifier[coords] = identifier[cylindrical_to_cartesian] (
identifier[radius] = identifier[r] , identifier[azimuth] = identifier[rot_ang] , identifier[z] = identifier[z] , identifier[radians] = keyword[True] )
identifier[atom] = identifier[Atom] (
identifier[coordinates] = identifier[coords] , identifier[element] = identifier[atom_label] [ literal[int] ],
identifier[ampal_parent] = identifier[residue] , identifier[res_label] = identifier[atom_label] )
identifier[atoms_dict] [ identifier[atom_label] ]= identifier[atom]
identifier[residue] . identifier[atoms] = identifier[atoms_dict]
identifier[monomers] . identifier[append] ( identifier[residue] )
identifier[self] . identifier[_monomers] = identifier[monomers]
identifier[self] . identifier[relabel_monomers] ()
identifier[self] . identifier[relabel_atoms] ()
keyword[return] | def build(self):
"""Build straight helix along z-axis, starting with CA1 on x-axis"""
ang_per_res = 2 * numpy.pi / self.residues_per_turn
atom_offsets = _atom_offsets[self.helix_type]
if self.handedness == 'l':
handedness = -1 # depends on [control=['if'], data=[]]
else:
handedness = 1
atom_labels = ['N', 'CA', 'C', 'O']
if all([x in atom_offsets.keys() for x in atom_labels]):
res_label = 'GLY' # depends on [control=['if'], data=[]]
else:
res_label = 'UNK'
monomers = []
for i in range(self.num_monomers):
residue = Residue(mol_code=res_label, ampal_parent=self)
atoms_dict = OrderedDict()
for atom_label in atom_labels:
(r, zeta, z_shift) = atom_offsets[atom_label]
rot_ang = (i * ang_per_res + zeta) * handedness
z = self.rise_per_residue * i + z_shift
coords = cylindrical_to_cartesian(radius=r, azimuth=rot_ang, z=z, radians=True)
atom = Atom(coordinates=coords, element=atom_label[0], ampal_parent=residue, res_label=atom_label)
atoms_dict[atom_label] = atom # depends on [control=['for'], data=['atom_label']]
residue.atoms = atoms_dict
monomers.append(residue) # depends on [control=['for'], data=['i']]
self._monomers = monomers
self.relabel_monomers()
self.relabel_atoms()
return |
def default_indexes(
coords: Mapping[Any, Variable],
dims: Iterable,
) -> 'OrderedDict[Any, pd.Index]':
"""Default indexes for a Dataset/DataArray.
Parameters
----------
coords : Mapping[Any, xarray.Variable]
Coordinate variables from which to draw default indexes.
dims : iterable
Iterable of dimension names.
Returns
-------
Mapping from indexing keys (levels/dimension names) to indexes used for
indexing along that dimension.
"""
return OrderedDict((key, coords[key].to_index())
for key in dims if key in coords) | def function[default_indexes, parameter[coords, dims]]:
constant[Default indexes for a Dataset/DataArray.
Parameters
----------
coords : Mapping[Any, xarray.Variable]
Coordinate variables from which to draw default indexes.
dims : iterable
Iterable of dimension names.
Returns
-------
Mapping from indexing keys (levels/dimension names) to indexes used for
indexing along that dimension.
]
return[call[name[OrderedDict], parameter[<ast.GeneratorExp object at 0x7da207f03670>]]] | keyword[def] identifier[default_indexes] (
identifier[coords] : identifier[Mapping] [ identifier[Any] , identifier[Variable] ],
identifier[dims] : identifier[Iterable] ,
)-> literal[string] :
literal[string]
keyword[return] identifier[OrderedDict] (( identifier[key] , identifier[coords] [ identifier[key] ]. identifier[to_index] ())
keyword[for] identifier[key] keyword[in] identifier[dims] keyword[if] identifier[key] keyword[in] identifier[coords] ) | def default_indexes(coords: Mapping[Any, Variable], dims: Iterable) -> 'OrderedDict[Any, pd.Index]':
"""Default indexes for a Dataset/DataArray.
Parameters
----------
coords : Mapping[Any, xarray.Variable]
Coordinate variables from which to draw default indexes.
dims : iterable
Iterable of dimension names.
Returns
-------
Mapping from indexing keys (levels/dimension names) to indexes used for
indexing along that dimension.
"""
return OrderedDict(((key, coords[key].to_index()) for key in dims if key in coords)) |
def scan_line(self, line, regex):
"""Checks if regex is in line, returns bool"""
return bool(re.search(regex, line, flags=re.IGNORECASE)) | def function[scan_line, parameter[self, line, regex]]:
constant[Checks if regex is in line, returns bool]
return[call[name[bool], parameter[call[name[re].search, parameter[name[regex], name[line]]]]]] | keyword[def] identifier[scan_line] ( identifier[self] , identifier[line] , identifier[regex] ):
literal[string]
keyword[return] identifier[bool] ( identifier[re] . identifier[search] ( identifier[regex] , identifier[line] , identifier[flags] = identifier[re] . identifier[IGNORECASE] )) | def scan_line(self, line, regex):
"""Checks if regex is in line, returns bool"""
return bool(re.search(regex, line, flags=re.IGNORECASE)) |
def is_element_in_database(element='', database='ENDF_VII'):
"""will try to find the element in the folder (database) specified
Parameters:
==========
element: string. Name of the element. Not case sensitive
database: string (default is 'ENDF_VII'). Name of folder that has the list of elements
Returns:
=======
bool: True if element was found in the database
False if element could not be found
"""
if element == '':
return False
list_entry_from_database = get_list_element_from_database(database=database)
if element in list_entry_from_database:
return True
return False | def function[is_element_in_database, parameter[element, database]]:
constant[will try to find the element in the folder (database) specified
Parameters:
==========
element: string. Name of the element. Not case sensitive
database: string (default is 'ENDF_VII'). Name of folder that has the list of elements
Returns:
=======
bool: True if element was found in the database
False if element could not be found
]
if compare[name[element] equal[==] constant[]] begin[:]
return[constant[False]]
variable[list_entry_from_database] assign[=] call[name[get_list_element_from_database], parameter[]]
if compare[name[element] in name[list_entry_from_database]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_element_in_database] ( identifier[element] = literal[string] , identifier[database] = literal[string] ):
literal[string]
keyword[if] identifier[element] == literal[string] :
keyword[return] keyword[False]
identifier[list_entry_from_database] = identifier[get_list_element_from_database] ( identifier[database] = identifier[database] )
keyword[if] identifier[element] keyword[in] identifier[list_entry_from_database] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_element_in_database(element='', database='ENDF_VII'):
"""will try to find the element in the folder (database) specified
Parameters:
==========
element: string. Name of the element. Not case sensitive
database: string (default is 'ENDF_VII'). Name of folder that has the list of elements
Returns:
=======
bool: True if element was found in the database
False if element could not be found
"""
if element == '':
return False # depends on [control=['if'], data=[]]
list_entry_from_database = get_list_element_from_database(database=database)
if element in list_entry_from_database:
return True # depends on [control=['if'], data=[]]
return False |
def _sumterm_prime(lexer):
"""Return a sum term' expression, eliminates left recursion."""
tok = next(lexer)
# '|' XORTERM SUMTERM'
if isinstance(tok, OP_or):
xorterm = _xorterm(lexer)
sumterm_prime = _sumterm_prime(lexer)
if sumterm_prime is None:
return xorterm
else:
return ('or', xorterm, sumterm_prime)
# null
else:
lexer.unpop_token(tok)
return None | def function[_sumterm_prime, parameter[lexer]]:
constant[Return a sum term' expression, eliminates left recursion.]
variable[tok] assign[=] call[name[next], parameter[name[lexer]]]
if call[name[isinstance], parameter[name[tok], name[OP_or]]] begin[:]
variable[xorterm] assign[=] call[name[_xorterm], parameter[name[lexer]]]
variable[sumterm_prime] assign[=] call[name[_sumterm_prime], parameter[name[lexer]]]
if compare[name[sumterm_prime] is constant[None]] begin[:]
return[name[xorterm]] | keyword[def] identifier[_sumterm_prime] ( identifier[lexer] ):
literal[string]
identifier[tok] = identifier[next] ( identifier[lexer] )
keyword[if] identifier[isinstance] ( identifier[tok] , identifier[OP_or] ):
identifier[xorterm] = identifier[_xorterm] ( identifier[lexer] )
identifier[sumterm_prime] = identifier[_sumterm_prime] ( identifier[lexer] )
keyword[if] identifier[sumterm_prime] keyword[is] keyword[None] :
keyword[return] identifier[xorterm]
keyword[else] :
keyword[return] ( literal[string] , identifier[xorterm] , identifier[sumterm_prime] )
keyword[else] :
identifier[lexer] . identifier[unpop_token] ( identifier[tok] )
keyword[return] keyword[None] | def _sumterm_prime(lexer):
"""Return a sum term' expression, eliminates left recursion."""
tok = next(lexer)
# '|' XORTERM SUMTERM'
if isinstance(tok, OP_or):
xorterm = _xorterm(lexer)
sumterm_prime = _sumterm_prime(lexer)
if sumterm_prime is None:
return xorterm # depends on [control=['if'], data=[]]
else:
return ('or', xorterm, sumterm_prime) # depends on [control=['if'], data=[]]
else:
# null
lexer.unpop_token(tok)
return None |
def load(self, filename, ctx=None, allow_missing=False,
ignore_extra=False, restore_prefix=''):
"""Load parameters from file.
Parameters
----------
filename : str
Path to parameter file.
ctx : Context or list of Context
Context(s) initialize loaded parameters on.
allow_missing : bool, default False
Whether to silently skip loading parameters not represents in the file.
ignore_extra : bool, default False
Whether to silently ignore parameters from the file that are not
present in this ParameterDict.
restore_prefix : str, default ''
prepend prefix to names of stored parameters before loading.
"""
if restore_prefix:
for name in self.keys():
assert name.startswith(restore_prefix), \
"restore_prefix is '%s' but Parameters name '%s' does not start " \
"with '%s'"%(restore_prefix, name, restore_prefix)
lprefix = len(restore_prefix)
loaded = [(k[4:] if k.startswith('arg:') or k.startswith('aux:') else k, v) \
for k, v in ndarray.load(filename).items()]
arg_dict = {restore_prefix+k: v for k, v in loaded}
if not allow_missing:
for name in self.keys():
assert name in arg_dict, \
"Parameter '%s' is missing in file '%s', which contains parameters: %s. " \
"Please make sure source and target networks have the same prefix."%(
name[lprefix:], filename, _brief_print_list(arg_dict.keys()))
for name in arg_dict:
if name not in self._params:
assert ignore_extra, \
"Parameter '%s' loaded from file '%s' is not present in ParameterDict, " \
"choices are: %s. Set ignore_extra to True to ignore. " \
"Please make sure source and target networks have the same prefix."%(
name[lprefix:], filename, _brief_print_list(self._params.keys()))
continue
self[name]._load_init(arg_dict[name], ctx) | def function[load, parameter[self, filename, ctx, allow_missing, ignore_extra, restore_prefix]]:
constant[Load parameters from file.
Parameters
----------
filename : str
Path to parameter file.
ctx : Context or list of Context
Context(s) initialize loaded parameters on.
allow_missing : bool, default False
Whether to silently skip loading parameters not represents in the file.
ignore_extra : bool, default False
Whether to silently ignore parameters from the file that are not
present in this ParameterDict.
restore_prefix : str, default ''
prepend prefix to names of stored parameters before loading.
]
if name[restore_prefix] begin[:]
for taget[name[name]] in starred[call[name[self].keys, parameter[]]] begin[:]
assert[call[name[name].startswith, parameter[name[restore_prefix]]]]
variable[lprefix] assign[=] call[name[len], parameter[name[restore_prefix]]]
variable[loaded] assign[=] <ast.ListComp object at 0x7da1b200bdc0>
variable[arg_dict] assign[=] <ast.DictComp object at 0x7da1b2008be0>
if <ast.UnaryOp object at 0x7da1b20084f0> begin[:]
for taget[name[name]] in starred[call[name[self].keys, parameter[]]] begin[:]
assert[compare[name[name] in name[arg_dict]]]
for taget[name[name]] in starred[name[arg_dict]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._params] begin[:]
assert[name[ignore_extra]]
continue
call[call[name[self]][name[name]]._load_init, parameter[call[name[arg_dict]][name[name]], name[ctx]]] | keyword[def] identifier[load] ( identifier[self] , identifier[filename] , identifier[ctx] = keyword[None] , identifier[allow_missing] = keyword[False] ,
identifier[ignore_extra] = keyword[False] , identifier[restore_prefix] = literal[string] ):
literal[string]
keyword[if] identifier[restore_prefix] :
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[keys] ():
keyword[assert] identifier[name] . identifier[startswith] ( identifier[restore_prefix] ), literal[string] literal[string] %( identifier[restore_prefix] , identifier[name] , identifier[restore_prefix] )
identifier[lprefix] = identifier[len] ( identifier[restore_prefix] )
identifier[loaded] =[( identifier[k] [ literal[int] :] keyword[if] identifier[k] . identifier[startswith] ( literal[string] ) keyword[or] identifier[k] . identifier[startswith] ( literal[string] ) keyword[else] identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[ndarray] . identifier[load] ( identifier[filename] ). identifier[items] ()]
identifier[arg_dict] ={ identifier[restore_prefix] + identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[loaded] }
keyword[if] keyword[not] identifier[allow_missing] :
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[keys] ():
keyword[assert] identifier[name] keyword[in] identifier[arg_dict] , literal[string] literal[string] %(
identifier[name] [ identifier[lprefix] :], identifier[filename] , identifier[_brief_print_list] ( identifier[arg_dict] . identifier[keys] ()))
keyword[for] identifier[name] keyword[in] identifier[arg_dict] :
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_params] :
keyword[assert] identifier[ignore_extra] , literal[string] literal[string] literal[string] %(
identifier[name] [ identifier[lprefix] :], identifier[filename] , identifier[_brief_print_list] ( identifier[self] . identifier[_params] . identifier[keys] ()))
keyword[continue]
identifier[self] [ identifier[name] ]. identifier[_load_init] ( identifier[arg_dict] [ identifier[name] ], identifier[ctx] ) | def load(self, filename, ctx=None, allow_missing=False, ignore_extra=False, restore_prefix=''):
"""Load parameters from file.
Parameters
----------
filename : str
Path to parameter file.
ctx : Context or list of Context
Context(s) initialize loaded parameters on.
allow_missing : bool, default False
Whether to silently skip loading parameters not represents in the file.
ignore_extra : bool, default False
Whether to silently ignore parameters from the file that are not
present in this ParameterDict.
restore_prefix : str, default ''
prepend prefix to names of stored parameters before loading.
"""
if restore_prefix:
for name in self.keys():
assert name.startswith(restore_prefix), "restore_prefix is '%s' but Parameters name '%s' does not start with '%s'" % (restore_prefix, name, restore_prefix) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]]
lprefix = len(restore_prefix)
loaded = [(k[4:] if k.startswith('arg:') or k.startswith('aux:') else k, v) for (k, v) in ndarray.load(filename).items()]
arg_dict = {restore_prefix + k: v for (k, v) in loaded}
if not allow_missing:
for name in self.keys():
assert name in arg_dict, "Parameter '%s' is missing in file '%s', which contains parameters: %s. Please make sure source and target networks have the same prefix." % (name[lprefix:], filename, _brief_print_list(arg_dict.keys())) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]]
for name in arg_dict:
if name not in self._params:
assert ignore_extra, "Parameter '%s' loaded from file '%s' is not present in ParameterDict, choices are: %s. Set ignore_extra to True to ignore. Please make sure source and target networks have the same prefix." % (name[lprefix:], filename, _brief_print_list(self._params.keys()))
continue # depends on [control=['if'], data=['name']]
self[name]._load_init(arg_dict[name], ctx) # depends on [control=['for'], data=['name']] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.