code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def _is_and_or_ternary(node):
"""
Returns true if node is 'condition and true_value or false_value' form.
All of: condition, true_value and false_value should not be a complex boolean expression
"""
return (
isinstance(node, astroid.BoolOp)
and node.op == "or"
and len(node.values) == 2
and isinstance(node.values[0], astroid.BoolOp)
and not isinstance(node.values[1], astroid.BoolOp)
and node.values[0].op == "and"
and not isinstance(node.values[0].values[1], astroid.BoolOp)
and len(node.values[0].values) == 2
)
|
def function[_is_and_or_ternary, parameter[node]]:
constant[
Returns true if node is 'condition and true_value or false_value' form.
All of: condition, true_value and false_value should not be a complex boolean expression
]
return[<ast.BoolOp object at 0x7da1b020df60>]
|
keyword[def] identifier[_is_and_or_ternary] ( identifier[node] ):
literal[string]
keyword[return] (
identifier[isinstance] ( identifier[node] , identifier[astroid] . identifier[BoolOp] )
keyword[and] identifier[node] . identifier[op] == literal[string]
keyword[and] identifier[len] ( identifier[node] . identifier[values] )== literal[int]
keyword[and] identifier[isinstance] ( identifier[node] . identifier[values] [ literal[int] ], identifier[astroid] . identifier[BoolOp] )
keyword[and] keyword[not] identifier[isinstance] ( identifier[node] . identifier[values] [ literal[int] ], identifier[astroid] . identifier[BoolOp] )
keyword[and] identifier[node] . identifier[values] [ literal[int] ]. identifier[op] == literal[string]
keyword[and] keyword[not] identifier[isinstance] ( identifier[node] . identifier[values] [ literal[int] ]. identifier[values] [ literal[int] ], identifier[astroid] . identifier[BoolOp] )
keyword[and] identifier[len] ( identifier[node] . identifier[values] [ literal[int] ]. identifier[values] )== literal[int]
)
|
def _is_and_or_ternary(node):
"""
Returns true if node is 'condition and true_value or false_value' form.
All of: condition, true_value and false_value should not be a complex boolean expression
"""
return isinstance(node, astroid.BoolOp) and node.op == 'or' and (len(node.values) == 2) and isinstance(node.values[0], astroid.BoolOp) and (not isinstance(node.values[1], astroid.BoolOp)) and (node.values[0].op == 'and') and (not isinstance(node.values[0].values[1], astroid.BoolOp)) and (len(node.values[0].values) == 2)
|
def row(self, idx):
"""
Returns DataFrameRow of the DataFrame given its index.
:param idx: the index of the row in the DataFrame.
:return: returns a DataFrameRow
"""
return DataFrameRow(idx, [x[idx] for x in self], self.colnames)
|
def function[row, parameter[self, idx]]:
constant[
Returns DataFrameRow of the DataFrame given its index.
:param idx: the index of the row in the DataFrame.
:return: returns a DataFrameRow
]
return[call[name[DataFrameRow], parameter[name[idx], <ast.ListComp object at 0x7da1afef8670>, name[self].colnames]]]
|
keyword[def] identifier[row] ( identifier[self] , identifier[idx] ):
literal[string]
keyword[return] identifier[DataFrameRow] ( identifier[idx] ,[ identifier[x] [ identifier[idx] ] keyword[for] identifier[x] keyword[in] identifier[self] ], identifier[self] . identifier[colnames] )
|
def row(self, idx):
"""
Returns DataFrameRow of the DataFrame given its index.
:param idx: the index of the row in the DataFrame.
:return: returns a DataFrameRow
"""
return DataFrameRow(idx, [x[idx] for x in self], self.colnames)
|
def _value_formatter(self, float_format=None, threshold=None):
"""Returns a function to be applied on each value to format it
"""
# the float_format parameter supersedes self.float_format
if float_format is None:
float_format = self.float_format
# we are going to compose different functions, to first convert to
# a string, then replace the decimal symbol, and finally chop according
# to the threshold
# when there is no float_format, we use str instead of '%g'
# because str(0.0) = '0.0' while '%g' % 0.0 = '0'
if float_format:
def base_formatter(v):
return float_format(value=v) if notna(v) else self.na_rep
else:
def base_formatter(v):
return str(v) if notna(v) else self.na_rep
if self.decimal != '.':
def decimal_formatter(v):
return base_formatter(v).replace('.', self.decimal, 1)
else:
decimal_formatter = base_formatter
if threshold is None:
return decimal_formatter
def formatter(value):
if notna(value):
if abs(value) > threshold:
return decimal_formatter(value)
else:
return decimal_formatter(0.0)
else:
return self.na_rep
return formatter
|
def function[_value_formatter, parameter[self, float_format, threshold]]:
constant[Returns a function to be applied on each value to format it
]
if compare[name[float_format] is constant[None]] begin[:]
variable[float_format] assign[=] name[self].float_format
if name[float_format] begin[:]
def function[base_formatter, parameter[v]]:
return[<ast.IfExp object at 0x7da1b1dd8370>]
if compare[name[self].decimal not_equal[!=] constant[.]] begin[:]
def function[decimal_formatter, parameter[v]]:
return[call[call[name[base_formatter], parameter[name[v]]].replace, parameter[constant[.], name[self].decimal, constant[1]]]]
if compare[name[threshold] is constant[None]] begin[:]
return[name[decimal_formatter]]
def function[formatter, parameter[value]]:
if call[name[notna], parameter[name[value]]] begin[:]
if compare[call[name[abs], parameter[name[value]]] greater[>] name[threshold]] begin[:]
return[call[name[decimal_formatter], parameter[name[value]]]]
return[name[formatter]]
|
keyword[def] identifier[_value_formatter] ( identifier[self] , identifier[float_format] = keyword[None] , identifier[threshold] = keyword[None] ):
literal[string]
keyword[if] identifier[float_format] keyword[is] keyword[None] :
identifier[float_format] = identifier[self] . identifier[float_format]
keyword[if] identifier[float_format] :
keyword[def] identifier[base_formatter] ( identifier[v] ):
keyword[return] identifier[float_format] ( identifier[value] = identifier[v] ) keyword[if] identifier[notna] ( identifier[v] ) keyword[else] identifier[self] . identifier[na_rep]
keyword[else] :
keyword[def] identifier[base_formatter] ( identifier[v] ):
keyword[return] identifier[str] ( identifier[v] ) keyword[if] identifier[notna] ( identifier[v] ) keyword[else] identifier[self] . identifier[na_rep]
keyword[if] identifier[self] . identifier[decimal] != literal[string] :
keyword[def] identifier[decimal_formatter] ( identifier[v] ):
keyword[return] identifier[base_formatter] ( identifier[v] ). identifier[replace] ( literal[string] , identifier[self] . identifier[decimal] , literal[int] )
keyword[else] :
identifier[decimal_formatter] = identifier[base_formatter]
keyword[if] identifier[threshold] keyword[is] keyword[None] :
keyword[return] identifier[decimal_formatter]
keyword[def] identifier[formatter] ( identifier[value] ):
keyword[if] identifier[notna] ( identifier[value] ):
keyword[if] identifier[abs] ( identifier[value] )> identifier[threshold] :
keyword[return] identifier[decimal_formatter] ( identifier[value] )
keyword[else] :
keyword[return] identifier[decimal_formatter] ( literal[int] )
keyword[else] :
keyword[return] identifier[self] . identifier[na_rep]
keyword[return] identifier[formatter]
|
def _value_formatter(self, float_format=None, threshold=None):
"""Returns a function to be applied on each value to format it
"""
# the float_format parameter supersedes self.float_format
if float_format is None:
float_format = self.float_format # depends on [control=['if'], data=['float_format']]
# we are going to compose different functions, to first convert to
# a string, then replace the decimal symbol, and finally chop according
# to the threshold
# when there is no float_format, we use str instead of '%g'
# because str(0.0) = '0.0' while '%g' % 0.0 = '0'
if float_format:
def base_formatter(v):
return float_format(value=v) if notna(v) else self.na_rep # depends on [control=['if'], data=[]]
else:
def base_formatter(v):
return str(v) if notna(v) else self.na_rep
if self.decimal != '.':
def decimal_formatter(v):
return base_formatter(v).replace('.', self.decimal, 1) # depends on [control=['if'], data=[]]
else:
decimal_formatter = base_formatter
if threshold is None:
return decimal_formatter # depends on [control=['if'], data=[]]
def formatter(value):
if notna(value):
if abs(value) > threshold:
return decimal_formatter(value) # depends on [control=['if'], data=[]]
else:
return decimal_formatter(0.0) # depends on [control=['if'], data=[]]
else:
return self.na_rep
return formatter
|
def seq_seguid(seq, normalize=True):
"""returns seguid for sequence `seq`
This seguid is compatible with BioPython's seguid.
>>> seq_seguid('')
'2jmj7l5rSw0yVb/vlWAYkK/YBwk'
>>> seq_seguid('ACGT')
'IQiZThf2zKn/I1KtqStlEdsHYDQ'
>>> seq_seguid('acgt')
'IQiZThf2zKn/I1KtqStlEdsHYDQ'
>>> seq_seguid('acgt', normalize=False)
'lII0AoG1/I8qKY271rgv5CFZtsU'
"""
seq = normalize_sequence(seq) if normalize else seq
bseq = seq.encode("ascii")
return base64.b64encode(hashlib.sha1(bseq).digest()).decode("ascii").rstrip(
'=')
|
def function[seq_seguid, parameter[seq, normalize]]:
constant[returns seguid for sequence `seq`
This seguid is compatible with BioPython's seguid.
>>> seq_seguid('')
'2jmj7l5rSw0yVb/vlWAYkK/YBwk'
>>> seq_seguid('ACGT')
'IQiZThf2zKn/I1KtqStlEdsHYDQ'
>>> seq_seguid('acgt')
'IQiZThf2zKn/I1KtqStlEdsHYDQ'
>>> seq_seguid('acgt', normalize=False)
'lII0AoG1/I8qKY271rgv5CFZtsU'
]
variable[seq] assign[=] <ast.IfExp object at 0x7da2047e8130>
variable[bseq] assign[=] call[name[seq].encode, parameter[constant[ascii]]]
return[call[call[call[name[base64].b64encode, parameter[call[call[name[hashlib].sha1, parameter[name[bseq]]].digest, parameter[]]]].decode, parameter[constant[ascii]]].rstrip, parameter[constant[=]]]]
|
keyword[def] identifier[seq_seguid] ( identifier[seq] , identifier[normalize] = keyword[True] ):
literal[string]
identifier[seq] = identifier[normalize_sequence] ( identifier[seq] ) keyword[if] identifier[normalize] keyword[else] identifier[seq]
identifier[bseq] = identifier[seq] . identifier[encode] ( literal[string] )
keyword[return] identifier[base64] . identifier[b64encode] ( identifier[hashlib] . identifier[sha1] ( identifier[bseq] ). identifier[digest] ()). identifier[decode] ( literal[string] ). identifier[rstrip] (
literal[string] )
|
def seq_seguid(seq, normalize=True):
"""returns seguid for sequence `seq`
This seguid is compatible with BioPython's seguid.
>>> seq_seguid('')
'2jmj7l5rSw0yVb/vlWAYkK/YBwk'
>>> seq_seguid('ACGT')
'IQiZThf2zKn/I1KtqStlEdsHYDQ'
>>> seq_seguid('acgt')
'IQiZThf2zKn/I1KtqStlEdsHYDQ'
>>> seq_seguid('acgt', normalize=False)
'lII0AoG1/I8qKY271rgv5CFZtsU'
"""
seq = normalize_sequence(seq) if normalize else seq
bseq = seq.encode('ascii')
return base64.b64encode(hashlib.sha1(bseq).digest()).decode('ascii').rstrip('=')
|
def reset_parcov(self,arg=None):
"""reset the parcov attribute to None
Parameters
----------
arg : str or pyemu.Matrix
the value to assign to the parcov attribute. If None,
the private __parcov attribute is cleared but not reset
"""
self.logger.statement("resetting parcov")
self.__parcov = None
if arg is not None:
self.parcov_arg = arg
|
def function[reset_parcov, parameter[self, arg]]:
constant[reset the parcov attribute to None
Parameters
----------
arg : str or pyemu.Matrix
the value to assign to the parcov attribute. If None,
the private __parcov attribute is cleared but not reset
]
call[name[self].logger.statement, parameter[constant[resetting parcov]]]
name[self].__parcov assign[=] constant[None]
if compare[name[arg] is_not constant[None]] begin[:]
name[self].parcov_arg assign[=] name[arg]
|
keyword[def] identifier[reset_parcov] ( identifier[self] , identifier[arg] = keyword[None] ):
literal[string]
identifier[self] . identifier[logger] . identifier[statement] ( literal[string] )
identifier[self] . identifier[__parcov] = keyword[None]
keyword[if] identifier[arg] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[parcov_arg] = identifier[arg]
|
def reset_parcov(self, arg=None):
"""reset the parcov attribute to None
Parameters
----------
arg : str or pyemu.Matrix
the value to assign to the parcov attribute. If None,
the private __parcov attribute is cleared but not reset
"""
self.logger.statement('resetting parcov')
self.__parcov = None
if arg is not None:
self.parcov_arg = arg # depends on [control=['if'], data=['arg']]
|
def _read_xlsx_table(path):
"""Lee la hoja activa de un archivo XLSX a una lista de diccionarios."""
workbook = pyxl.load_workbook(path)
worksheet = workbook.active
table = helpers.sheet_to_table(worksheet)
return table
|
def function[_read_xlsx_table, parameter[path]]:
constant[Lee la hoja activa de un archivo XLSX a una lista de diccionarios.]
variable[workbook] assign[=] call[name[pyxl].load_workbook, parameter[name[path]]]
variable[worksheet] assign[=] name[workbook].active
variable[table] assign[=] call[name[helpers].sheet_to_table, parameter[name[worksheet]]]
return[name[table]]
|
keyword[def] identifier[_read_xlsx_table] ( identifier[path] ):
literal[string]
identifier[workbook] = identifier[pyxl] . identifier[load_workbook] ( identifier[path] )
identifier[worksheet] = identifier[workbook] . identifier[active]
identifier[table] = identifier[helpers] . identifier[sheet_to_table] ( identifier[worksheet] )
keyword[return] identifier[table]
|
def _read_xlsx_table(path):
"""Lee la hoja activa de un archivo XLSX a una lista de diccionarios."""
workbook = pyxl.load_workbook(path)
worksheet = workbook.active
table = helpers.sheet_to_table(worksheet)
return table
|
def siamese_cosine_loss(left, right, y, scope="cosine_loss"):
r"""Loss for Siamese networks (cosine version).
Same as :func:`contrastive_loss` but with different similarity measurement.
.. math::
[\frac{l \cdot r}{\lVert l\rVert \lVert r\rVert} - (2y-1)]^2
Args:
left (tf.Tensor): left feature vectors of shape [Batch, N].
right (tf.Tensor): right feature vectors of shape [Batch, N].
y (tf.Tensor): binary labels of shape [Batch]. 1: similar, 0: not similar.
Returns:
tf.Tensor: cosine-loss as a scalar tensor.
"""
def l2_norm(t, eps=1e-12):
"""
Returns:
tf.Tensor: norm of 2D input tensor on axis 1
"""
with tf.name_scope("l2_norm"):
return tf.sqrt(tf.reduce_sum(tf.square(t), 1) + eps)
with tf.name_scope(scope):
y = 2 * tf.cast(y, tf.float32) - 1
pred = tf.reduce_sum(left * right, 1) / (l2_norm(left) * l2_norm(right) + 1e-10)
return tf.nn.l2_loss(y - pred) / tf.cast(tf.shape(left)[0], tf.float32)
|
def function[siamese_cosine_loss, parameter[left, right, y, scope]]:
constant[Loss for Siamese networks (cosine version).
Same as :func:`contrastive_loss` but with different similarity measurement.
.. math::
[\frac{l \cdot r}{\lVert l\rVert \lVert r\rVert} - (2y-1)]^2
Args:
left (tf.Tensor): left feature vectors of shape [Batch, N].
right (tf.Tensor): right feature vectors of shape [Batch, N].
y (tf.Tensor): binary labels of shape [Batch]. 1: similar, 0: not similar.
Returns:
tf.Tensor: cosine-loss as a scalar tensor.
]
def function[l2_norm, parameter[t, eps]]:
constant[
Returns:
tf.Tensor: norm of 2D input tensor on axis 1
]
with call[name[tf].name_scope, parameter[constant[l2_norm]]] begin[:]
return[call[name[tf].sqrt, parameter[binary_operation[call[name[tf].reduce_sum, parameter[call[name[tf].square, parameter[name[t]]], constant[1]]] + name[eps]]]]]
with call[name[tf].name_scope, parameter[name[scope]]] begin[:]
variable[y] assign[=] binary_operation[binary_operation[constant[2] * call[name[tf].cast, parameter[name[y], name[tf].float32]]] - constant[1]]
variable[pred] assign[=] binary_operation[call[name[tf].reduce_sum, parameter[binary_operation[name[left] * name[right]], constant[1]]] / binary_operation[binary_operation[call[name[l2_norm], parameter[name[left]]] * call[name[l2_norm], parameter[name[right]]]] + constant[1e-10]]]
return[binary_operation[call[name[tf].nn.l2_loss, parameter[binary_operation[name[y] - name[pred]]]] / call[name[tf].cast, parameter[call[call[name[tf].shape, parameter[name[left]]]][constant[0]], name[tf].float32]]]]
|
keyword[def] identifier[siamese_cosine_loss] ( identifier[left] , identifier[right] , identifier[y] , identifier[scope] = literal[string] ):
literal[string]
keyword[def] identifier[l2_norm] ( identifier[t] , identifier[eps] = literal[int] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ):
keyword[return] identifier[tf] . identifier[sqrt] ( identifier[tf] . identifier[reduce_sum] ( identifier[tf] . identifier[square] ( identifier[t] ), literal[int] )+ identifier[eps] )
keyword[with] identifier[tf] . identifier[name_scope] ( identifier[scope] ):
identifier[y] = literal[int] * identifier[tf] . identifier[cast] ( identifier[y] , identifier[tf] . identifier[float32] )- literal[int]
identifier[pred] = identifier[tf] . identifier[reduce_sum] ( identifier[left] * identifier[right] , literal[int] )/( identifier[l2_norm] ( identifier[left] )* identifier[l2_norm] ( identifier[right] )+ literal[int] )
keyword[return] identifier[tf] . identifier[nn] . identifier[l2_loss] ( identifier[y] - identifier[pred] )/ identifier[tf] . identifier[cast] ( identifier[tf] . identifier[shape] ( identifier[left] )[ literal[int] ], identifier[tf] . identifier[float32] )
|
def siamese_cosine_loss(left, right, y, scope='cosine_loss'):
"""Loss for Siamese networks (cosine version).
Same as :func:`contrastive_loss` but with different similarity measurement.
.. math::
[\\frac{l \\cdot r}{\\lVert l\\rVert \\lVert r\\rVert} - (2y-1)]^2
Args:
left (tf.Tensor): left feature vectors of shape [Batch, N].
right (tf.Tensor): right feature vectors of shape [Batch, N].
y (tf.Tensor): binary labels of shape [Batch]. 1: similar, 0: not similar.
Returns:
tf.Tensor: cosine-loss as a scalar tensor.
"""
def l2_norm(t, eps=1e-12):
"""
Returns:
tf.Tensor: norm of 2D input tensor on axis 1
"""
with tf.name_scope('l2_norm'):
return tf.sqrt(tf.reduce_sum(tf.square(t), 1) + eps) # depends on [control=['with'], data=[]]
with tf.name_scope(scope):
y = 2 * tf.cast(y, tf.float32) - 1
pred = tf.reduce_sum(left * right, 1) / (l2_norm(left) * l2_norm(right) + 1e-10)
return tf.nn.l2_loss(y - pred) / tf.cast(tf.shape(left)[0], tf.float32) # depends on [control=['with'], data=[]]
|
def export(self, name, columns, points):
"""Write the points to the Prometheus exporter using Gauge."""
logger.debug("Export {} stats to Prometheus exporter".format(name))
# Remove non number stats and convert all to float (for Boolean)
data = {k: float(v) for (k, v) in iteritems(dict(zip(columns, points))) if isinstance(v, Number)}
# Write metrics to the Prometheus exporter
for k, v in iteritems(data):
# Prometheus metric name: prefix_<glances stats name>
metric_name = self.prefix + self.METRIC_SEPARATOR + str(name) + self.METRIC_SEPARATOR + str(k)
# Prometheus is very sensible to the metric name
# See: https://prometheus.io/docs/practices/naming/
for c in ['.', '-', '/', ' ']:
metric_name = metric_name.replace(c, self.METRIC_SEPARATOR)
# Get the labels
labels = self.parse_tags(self.labels)
# Manage an internal dict between metric name and Gauge
if metric_name not in self._metric_dict:
self._metric_dict[metric_name] = Gauge(metric_name, k,
labelnames=listkeys(labels))
# Write the value
if hasattr(self._metric_dict[metric_name], 'labels'):
# Add the labels (see issue #1255)
self._metric_dict[metric_name].labels(**labels).set(v)
else:
self._metric_dict[metric_name].set(v)
|
def function[export, parameter[self, name, columns, points]]:
constant[Write the points to the Prometheus exporter using Gauge.]
call[name[logger].debug, parameter[call[constant[Export {} stats to Prometheus exporter].format, parameter[name[name]]]]]
variable[data] assign[=] <ast.DictComp object at 0x7da20c7c8790>
for taget[tuple[[<ast.Name object at 0x7da20c7c8460>, <ast.Name object at 0x7da20c7c8e50>]]] in starred[call[name[iteritems], parameter[name[data]]]] begin[:]
variable[metric_name] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[self].prefix + name[self].METRIC_SEPARATOR] + call[name[str], parameter[name[name]]]] + name[self].METRIC_SEPARATOR] + call[name[str], parameter[name[k]]]]
for taget[name[c]] in starred[list[[<ast.Constant object at 0x7da18f09de10>, <ast.Constant object at 0x7da18f09e5c0>, <ast.Constant object at 0x7da18f09f520>, <ast.Constant object at 0x7da18f09e860>]]] begin[:]
variable[metric_name] assign[=] call[name[metric_name].replace, parameter[name[c], name[self].METRIC_SEPARATOR]]
variable[labels] assign[=] call[name[self].parse_tags, parameter[name[self].labels]]
if compare[name[metric_name] <ast.NotIn object at 0x7da2590d7190> name[self]._metric_dict] begin[:]
call[name[self]._metric_dict][name[metric_name]] assign[=] call[name[Gauge], parameter[name[metric_name], name[k]]]
if call[name[hasattr], parameter[call[name[self]._metric_dict][name[metric_name]], constant[labels]]] begin[:]
call[call[call[name[self]._metric_dict][name[metric_name]].labels, parameter[]].set, parameter[name[v]]]
|
keyword[def] identifier[export] ( identifier[self] , identifier[name] , identifier[columns] , identifier[points] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[data] ={ identifier[k] : identifier[float] ( identifier[v] ) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[iteritems] ( identifier[dict] ( identifier[zip] ( identifier[columns] , identifier[points] ))) keyword[if] identifier[isinstance] ( identifier[v] , identifier[Number] )}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[data] ):
identifier[metric_name] = identifier[self] . identifier[prefix] + identifier[self] . identifier[METRIC_SEPARATOR] + identifier[str] ( identifier[name] )+ identifier[self] . identifier[METRIC_SEPARATOR] + identifier[str] ( identifier[k] )
keyword[for] identifier[c] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[metric_name] = identifier[metric_name] . identifier[replace] ( identifier[c] , identifier[self] . identifier[METRIC_SEPARATOR] )
identifier[labels] = identifier[self] . identifier[parse_tags] ( identifier[self] . identifier[labels] )
keyword[if] identifier[metric_name] keyword[not] keyword[in] identifier[self] . identifier[_metric_dict] :
identifier[self] . identifier[_metric_dict] [ identifier[metric_name] ]= identifier[Gauge] ( identifier[metric_name] , identifier[k] ,
identifier[labelnames] = identifier[listkeys] ( identifier[labels] ))
keyword[if] identifier[hasattr] ( identifier[self] . identifier[_metric_dict] [ identifier[metric_name] ], literal[string] ):
identifier[self] . identifier[_metric_dict] [ identifier[metric_name] ]. identifier[labels] (** identifier[labels] ). identifier[set] ( identifier[v] )
keyword[else] :
identifier[self] . identifier[_metric_dict] [ identifier[metric_name] ]. identifier[set] ( identifier[v] )
|
def export(self, name, columns, points):
"""Write the points to the Prometheus exporter using Gauge."""
logger.debug('Export {} stats to Prometheus exporter'.format(name))
# Remove non number stats and convert all to float (for Boolean)
data = {k: float(v) for (k, v) in iteritems(dict(zip(columns, points))) if isinstance(v, Number)}
# Write metrics to the Prometheus exporter
for (k, v) in iteritems(data):
# Prometheus metric name: prefix_<glances stats name>
metric_name = self.prefix + self.METRIC_SEPARATOR + str(name) + self.METRIC_SEPARATOR + str(k)
# Prometheus is very sensible to the metric name
# See: https://prometheus.io/docs/practices/naming/
for c in ['.', '-', '/', ' ']:
metric_name = metric_name.replace(c, self.METRIC_SEPARATOR) # depends on [control=['for'], data=['c']]
# Get the labels
labels = self.parse_tags(self.labels)
# Manage an internal dict between metric name and Gauge
if metric_name not in self._metric_dict:
self._metric_dict[metric_name] = Gauge(metric_name, k, labelnames=listkeys(labels)) # depends on [control=['if'], data=['metric_name']]
# Write the value
if hasattr(self._metric_dict[metric_name], 'labels'):
# Add the labels (see issue #1255)
self._metric_dict[metric_name].labels(**labels).set(v) # depends on [control=['if'], data=[]]
else:
self._metric_dict[metric_name].set(v) # depends on [control=['for'], data=[]]
|
def calc_columns_rows(n):
"""
Calculate the number of columns and rows required to divide an image
into ``n`` parts.
Return a tuple of integers in the format (num_columns, num_rows)
"""
num_columns = int(ceil(sqrt(n)))
num_rows = int(ceil(n / float(num_columns)))
return (num_columns, num_rows)
|
def function[calc_columns_rows, parameter[n]]:
constant[
Calculate the number of columns and rows required to divide an image
into ``n`` parts.
Return a tuple of integers in the format (num_columns, num_rows)
]
variable[num_columns] assign[=] call[name[int], parameter[call[name[ceil], parameter[call[name[sqrt], parameter[name[n]]]]]]]
variable[num_rows] assign[=] call[name[int], parameter[call[name[ceil], parameter[binary_operation[name[n] / call[name[float], parameter[name[num_columns]]]]]]]]
return[tuple[[<ast.Name object at 0x7da1b0656620>, <ast.Name object at 0x7da1b0656e00>]]]
|
keyword[def] identifier[calc_columns_rows] ( identifier[n] ):
literal[string]
identifier[num_columns] = identifier[int] ( identifier[ceil] ( identifier[sqrt] ( identifier[n] )))
identifier[num_rows] = identifier[int] ( identifier[ceil] ( identifier[n] / identifier[float] ( identifier[num_columns] )))
keyword[return] ( identifier[num_columns] , identifier[num_rows] )
|
def calc_columns_rows(n):
"""
Calculate the number of columns and rows required to divide an image
into ``n`` parts.
Return a tuple of integers in the format (num_columns, num_rows)
"""
num_columns = int(ceil(sqrt(n)))
num_rows = int(ceil(n / float(num_columns)))
return (num_columns, num_rows)
|
def whoami(self):
"""
Get information about the access token.
Official docs:
https://monzo.com/docs/#authenticating-requests
:returns: access token details
:rtype: dict
"""
endpoint = '/ping/whoami'
response = self._get_response(
method='get', endpoint=endpoint,
)
return response.json()
|
def function[whoami, parameter[self]]:
constant[
Get information about the access token.
Official docs:
https://monzo.com/docs/#authenticating-requests
:returns: access token details
:rtype: dict
]
variable[endpoint] assign[=] constant[/ping/whoami]
variable[response] assign[=] call[name[self]._get_response, parameter[]]
return[call[name[response].json, parameter[]]]
|
keyword[def] identifier[whoami] ( identifier[self] ):
literal[string]
identifier[endpoint] = literal[string]
identifier[response] = identifier[self] . identifier[_get_response] (
identifier[method] = literal[string] , identifier[endpoint] = identifier[endpoint] ,
)
keyword[return] identifier[response] . identifier[json] ()
|
def whoami(self):
"""
Get information about the access token.
Official docs:
https://monzo.com/docs/#authenticating-requests
:returns: access token details
:rtype: dict
"""
endpoint = '/ping/whoami'
response = self._get_response(method='get', endpoint=endpoint)
return response.json()
|
def set_one(chainmap, thing_name, callobject):
""" Add a mapping with key thing_name for callobject in chainmap with
namespace handling.
"""
namespaces = reversed(thing_name.split("."))
lstname = []
for name in namespaces:
lstname.insert(0, name)
strname = '.'.join(lstname)
chainmap[strname] = callobject
|
def function[set_one, parameter[chainmap, thing_name, callobject]]:
constant[ Add a mapping with key thing_name for callobject in chainmap with
namespace handling.
]
variable[namespaces] assign[=] call[name[reversed], parameter[call[name[thing_name].split, parameter[constant[.]]]]]
variable[lstname] assign[=] list[[]]
for taget[name[name]] in starred[name[namespaces]] begin[:]
call[name[lstname].insert, parameter[constant[0], name[name]]]
variable[strname] assign[=] call[constant[.].join, parameter[name[lstname]]]
call[name[chainmap]][name[strname]] assign[=] name[callobject]
|
keyword[def] identifier[set_one] ( identifier[chainmap] , identifier[thing_name] , identifier[callobject] ):
literal[string]
identifier[namespaces] = identifier[reversed] ( identifier[thing_name] . identifier[split] ( literal[string] ))
identifier[lstname] =[]
keyword[for] identifier[name] keyword[in] identifier[namespaces] :
identifier[lstname] . identifier[insert] ( literal[int] , identifier[name] )
identifier[strname] = literal[string] . identifier[join] ( identifier[lstname] )
identifier[chainmap] [ identifier[strname] ]= identifier[callobject]
|
def set_one(chainmap, thing_name, callobject):
""" Add a mapping with key thing_name for callobject in chainmap with
namespace handling.
"""
namespaces = reversed(thing_name.split('.'))
lstname = []
for name in namespaces:
lstname.insert(0, name)
strname = '.'.join(lstname)
chainmap[strname] = callobject # depends on [control=['for'], data=['name']]
|
def _execute_pillar(pillar_name, run_type):
'''
Run one or more nagios plugins from pillar data and get the result of run_type
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp: 8.8.8.8
- check_icmp: google.com
Load:
- check_load: -w 0.8 -c 1
APT:
- check_apt
-------
'''
groups = __salt__['pillar.get'](pillar_name)
data = {}
for group in groups:
data[group] = {}
commands = groups[group]
for command in commands:
# Check if is a dict to get the arguments
# in command if not set the arguments to empty string
if isinstance(command, dict):
plugin = next(six.iterkeys(command))
args = command[plugin]
else:
plugin = command
args = ''
command_key = _format_dict_key(args, plugin)
data[group][command_key] = run_type(plugin, args)
return data
|
def function[_execute_pillar, parameter[pillar_name, run_type]]:
constant[
Run one or more nagios plugins from pillar data and get the result of run_type
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp: 8.8.8.8
- check_icmp: google.com
Load:
- check_load: -w 0.8 -c 1
APT:
- check_apt
-------
]
variable[groups] assign[=] call[call[name[__salt__]][constant[pillar.get]], parameter[name[pillar_name]]]
variable[data] assign[=] dictionary[[], []]
for taget[name[group]] in starred[name[groups]] begin[:]
call[name[data]][name[group]] assign[=] dictionary[[], []]
variable[commands] assign[=] call[name[groups]][name[group]]
for taget[name[command]] in starred[name[commands]] begin[:]
if call[name[isinstance], parameter[name[command], name[dict]]] begin[:]
variable[plugin] assign[=] call[name[next], parameter[call[name[six].iterkeys, parameter[name[command]]]]]
variable[args] assign[=] call[name[command]][name[plugin]]
variable[command_key] assign[=] call[name[_format_dict_key], parameter[name[args], name[plugin]]]
call[call[name[data]][name[group]]][name[command_key]] assign[=] call[name[run_type], parameter[name[plugin], name[args]]]
return[name[data]]
|
keyword[def] identifier[_execute_pillar] ( identifier[pillar_name] , identifier[run_type] ):
literal[string]
identifier[groups] = identifier[__salt__] [ literal[string] ]( identifier[pillar_name] )
identifier[data] ={}
keyword[for] identifier[group] keyword[in] identifier[groups] :
identifier[data] [ identifier[group] ]={}
identifier[commands] = identifier[groups] [ identifier[group] ]
keyword[for] identifier[command] keyword[in] identifier[commands] :
keyword[if] identifier[isinstance] ( identifier[command] , identifier[dict] ):
identifier[plugin] = identifier[next] ( identifier[six] . identifier[iterkeys] ( identifier[command] ))
identifier[args] = identifier[command] [ identifier[plugin] ]
keyword[else] :
identifier[plugin] = identifier[command]
identifier[args] = literal[string]
identifier[command_key] = identifier[_format_dict_key] ( identifier[args] , identifier[plugin] )
identifier[data] [ identifier[group] ][ identifier[command_key] ]= identifier[run_type] ( identifier[plugin] , identifier[args] )
keyword[return] identifier[data]
|
def _execute_pillar(pillar_name, run_type):
"""
Run one or more nagios plugins from pillar data and get the result of run_type
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp: 8.8.8.8
- check_icmp: google.com
Load:
- check_load: -w 0.8 -c 1
APT:
- check_apt
-------
"""
groups = __salt__['pillar.get'](pillar_name)
data = {}
for group in groups:
data[group] = {}
commands = groups[group]
for command in commands:
# Check if is a dict to get the arguments
# in command if not set the arguments to empty string
if isinstance(command, dict):
plugin = next(six.iterkeys(command))
args = command[plugin] # depends on [control=['if'], data=[]]
else:
plugin = command
args = ''
command_key = _format_dict_key(args, plugin)
data[group][command_key] = run_type(plugin, args) # depends on [control=['for'], data=['command']] # depends on [control=['for'], data=['group']]
return data
|
def system_call(command):
"""Run a command and return stdout.
Would be better to use subprocess.check_output, but this works on 2.6,
which is still the system Python on CentOS 7."""
p = subprocess.Popen([command], stdout=subprocess.PIPE, shell=True)
return p.stdout.read()
|
def function[system_call, parameter[command]]:
constant[Run a command and return stdout.
Would be better to use subprocess.check_output, but this works on 2.6,
which is still the system Python on CentOS 7.]
variable[p] assign[=] call[name[subprocess].Popen, parameter[list[[<ast.Name object at 0x7da18c4cdff0>]]]]
return[call[name[p].stdout.read, parameter[]]]
|
keyword[def] identifier[system_call] ( identifier[command] ):
literal[string]
identifier[p] = identifier[subprocess] . identifier[Popen] ([ identifier[command] ], identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[shell] = keyword[True] )
keyword[return] identifier[p] . identifier[stdout] . identifier[read] ()
|
def system_call(command):
"""Run a command and return stdout.
Would be better to use subprocess.check_output, but this works on 2.6,
which is still the system Python on CentOS 7."""
p = subprocess.Popen([command], stdout=subprocess.PIPE, shell=True)
return p.stdout.read()
|
def query(self, method_verb, endpoint, authenticate=False,
*args, **kwargs):
"""
Queries exchange using given data. Defaults to unauthenticated query.
:param method_verb: valid request type (PUT, GET, POST etc)
:param endpoint: endpoint path for the resource to query, sans the url &
API version (i.e. '/btcusd/ticker/').
:param authenticate: Bool to determine whether or not a signature is
required.
:param args: Optional args for requests.request()
:param kwargs: Optional Kwargs for self.sign() and requests.request()
:return: request.response() obj
"""
if self.version:
endpoint_path = join(self.version, endpoint)
else:
endpoint_path = endpoint
url = urljoin(self.uri, endpoint_path)
if authenticate: # sign off kwargs and url before sending request
url, request_kwargs = self.sign(url, endpoint, endpoint_path,
method_verb, *args, **kwargs)
else:
request_kwargs = kwargs
log.debug("Making request to: %s, kwargs: %s", url, request_kwargs)
r = self.api_request(method_verb, url, timeout=self.timeout,
**request_kwargs)
log.debug("Made %s request made to %s, with headers %s and body %s. "
"Status code %s", r.request.method,
r.request.url, r.request.headers,
r.request.body, r.status_code)
return r
|
def function[query, parameter[self, method_verb, endpoint, authenticate]]:
constant[
Queries exchange using given data. Defaults to unauthenticated query.
:param method_verb: valid request type (PUT, GET, POST etc)
:param endpoint: endpoint path for the resource to query, sans the url &
API version (i.e. '/btcusd/ticker/').
:param authenticate: Bool to determine whether or not a signature is
required.
:param args: Optional args for requests.request()
:param kwargs: Optional Kwargs for self.sign() and requests.request()
:return: request.response() obj
]
if name[self].version begin[:]
variable[endpoint_path] assign[=] call[name[join], parameter[name[self].version, name[endpoint]]]
variable[url] assign[=] call[name[urljoin], parameter[name[self].uri, name[endpoint_path]]]
if name[authenticate] begin[:]
<ast.Tuple object at 0x7da18f58c460> assign[=] call[name[self].sign, parameter[name[url], name[endpoint], name[endpoint_path], name[method_verb], <ast.Starred object at 0x7da18f58e020>]]
call[name[log].debug, parameter[constant[Making request to: %s, kwargs: %s], name[url], name[request_kwargs]]]
variable[r] assign[=] call[name[self].api_request, parameter[name[method_verb], name[url]]]
call[name[log].debug, parameter[constant[Made %s request made to %s, with headers %s and body %s. Status code %s], name[r].request.method, name[r].request.url, name[r].request.headers, name[r].request.body, name[r].status_code]]
return[name[r]]
|
keyword[def] identifier[query] ( identifier[self] , identifier[method_verb] , identifier[endpoint] , identifier[authenticate] = keyword[False] ,
* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[version] :
identifier[endpoint_path] = identifier[join] ( identifier[self] . identifier[version] , identifier[endpoint] )
keyword[else] :
identifier[endpoint_path] = identifier[endpoint]
identifier[url] = identifier[urljoin] ( identifier[self] . identifier[uri] , identifier[endpoint_path] )
keyword[if] identifier[authenticate] :
identifier[url] , identifier[request_kwargs] = identifier[self] . identifier[sign] ( identifier[url] , identifier[endpoint] , identifier[endpoint_path] ,
identifier[method_verb] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[request_kwargs] = identifier[kwargs]
identifier[log] . identifier[debug] ( literal[string] , identifier[url] , identifier[request_kwargs] )
identifier[r] = identifier[self] . identifier[api_request] ( identifier[method_verb] , identifier[url] , identifier[timeout] = identifier[self] . identifier[timeout] ,
** identifier[request_kwargs] )
identifier[log] . identifier[debug] ( literal[string]
literal[string] , identifier[r] . identifier[request] . identifier[method] ,
identifier[r] . identifier[request] . identifier[url] , identifier[r] . identifier[request] . identifier[headers] ,
identifier[r] . identifier[request] . identifier[body] , identifier[r] . identifier[status_code] )
keyword[return] identifier[r]
|
def query(self, method_verb, endpoint, authenticate=False, *args, **kwargs):
"""
Queries exchange using given data. Defaults to unauthenticated query.
:param method_verb: valid request type (PUT, GET, POST etc)
:param endpoint: endpoint path for the resource to query, sans the url &
API version (i.e. '/btcusd/ticker/').
:param authenticate: Bool to determine whether or not a signature is
required.
:param args: Optional args for requests.request()
:param kwargs: Optional Kwargs for self.sign() and requests.request()
:return: request.response() obj
"""
if self.version:
endpoint_path = join(self.version, endpoint) # depends on [control=['if'], data=[]]
else:
endpoint_path = endpoint
url = urljoin(self.uri, endpoint_path)
if authenticate: # sign off kwargs and url before sending request
(url, request_kwargs) = self.sign(url, endpoint, endpoint_path, method_verb, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
request_kwargs = kwargs
log.debug('Making request to: %s, kwargs: %s', url, request_kwargs)
r = self.api_request(method_verb, url, timeout=self.timeout, **request_kwargs)
log.debug('Made %s request made to %s, with headers %s and body %s. Status code %s', r.request.method, r.request.url, r.request.headers, r.request.body, r.status_code)
return r
|
def insert(self, cache_key, paths, overwrite=False):
"""Cache the output of a build.
By default, checks cache.has(key) first, only proceeding to create and insert an artifact
if it is not already in the cache (though `overwrite` can be used to skip the check and
unconditionally insert).
:param CacheKey cache_key: A CacheKey object.
:param list<str> paths: List of absolute paths to generated dirs/files.
These must be under the artifact_root.
:param bool overwrite: Skip check for existing, insert even if already in cache.
"""
missing_files = [f for f in paths if not os.path.exists(f)]
if missing_files:
raise ArtifactCacheError('Tried to cache nonexistent files {0}'.format(missing_files))
if not overwrite:
if self.has(cache_key):
logger.debug('Skipping insert of existing artifact: {0}'.format(cache_key))
return False
try:
self.try_insert(cache_key, paths)
return True
except NonfatalArtifactCacheError as e:
logger.error('Error while writing to artifact cache: {0}'.format(e))
return False
|
def function[insert, parameter[self, cache_key, paths, overwrite]]:
constant[Cache the output of a build.
By default, checks cache.has(key) first, only proceeding to create and insert an artifact
if it is not already in the cache (though `overwrite` can be used to skip the check and
unconditionally insert).
:param CacheKey cache_key: A CacheKey object.
:param list<str> paths: List of absolute paths to generated dirs/files.
These must be under the artifact_root.
:param bool overwrite: Skip check for existing, insert even if already in cache.
]
variable[missing_files] assign[=] <ast.ListComp object at 0x7da1b1e8d840>
if name[missing_files] begin[:]
<ast.Raise object at 0x7da1b1e8e290>
if <ast.UnaryOp object at 0x7da1b1e8e2f0> begin[:]
if call[name[self].has, parameter[name[cache_key]]] begin[:]
call[name[logger].debug, parameter[call[constant[Skipping insert of existing artifact: {0}].format, parameter[name[cache_key]]]]]
return[constant[False]]
<ast.Try object at 0x7da1b1e8d330>
|
keyword[def] identifier[insert] ( identifier[self] , identifier[cache_key] , identifier[paths] , identifier[overwrite] = keyword[False] ):
literal[string]
identifier[missing_files] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[paths] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[f] )]
keyword[if] identifier[missing_files] :
keyword[raise] identifier[ArtifactCacheError] ( literal[string] . identifier[format] ( identifier[missing_files] ))
keyword[if] keyword[not] identifier[overwrite] :
keyword[if] identifier[self] . identifier[has] ( identifier[cache_key] ):
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cache_key] ))
keyword[return] keyword[False]
keyword[try] :
identifier[self] . identifier[try_insert] ( identifier[cache_key] , identifier[paths] )
keyword[return] keyword[True]
keyword[except] identifier[NonfatalArtifactCacheError] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] ))
keyword[return] keyword[False]
|
def insert(self, cache_key, paths, overwrite=False):
"""Cache the output of a build.
By default, checks cache.has(key) first, only proceeding to create and insert an artifact
if it is not already in the cache (though `overwrite` can be used to skip the check and
unconditionally insert).
:param CacheKey cache_key: A CacheKey object.
:param list<str> paths: List of absolute paths to generated dirs/files.
These must be under the artifact_root.
:param bool overwrite: Skip check for existing, insert even if already in cache.
"""
missing_files = [f for f in paths if not os.path.exists(f)]
if missing_files:
raise ArtifactCacheError('Tried to cache nonexistent files {0}'.format(missing_files)) # depends on [control=['if'], data=[]]
if not overwrite:
if self.has(cache_key):
logger.debug('Skipping insert of existing artifact: {0}'.format(cache_key))
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
try:
self.try_insert(cache_key, paths)
return True # depends on [control=['try'], data=[]]
except NonfatalArtifactCacheError as e:
logger.error('Error while writing to artifact cache: {0}'.format(e))
return False # depends on [control=['except'], data=['e']]
|
def predict(self, X, y=None, output="margin", tree_limit=None):
""" A consistent interface to make predictions from this model.
Parameters
----------
tree_limit : None (default) or int
Limit the number of trees used by the model. By default None means no use the limit of the
original model, and -1 means no limit.
"""
# see if we have a default tree_limit in place.
if tree_limit is None:
tree_limit = -1 if self.tree_limit is None else self.tree_limit
# convert dataframes
if str(type(X)).endswith("pandas.core.series.Series'>"):
X = X.values
elif str(type(X)).endswith("pandas.core.frame.DataFrame'>"):
X = X.values
flat_output = False
if len(X.shape) == 1:
flat_output = True
X = X.reshape(1, X.shape[0])
if X.dtype != self.dtype:
X = X.astype(self.dtype)
X_missing = np.isnan(X, dtype=np.bool)
assert str(type(X)).endswith("'numpy.ndarray'>"), "Unknown instance type: " + str(type(X))
assert len(X.shape) == 2, "Passed input data matrix X must have 1 or 2 dimensions!"
if tree_limit < 0 or tree_limit > self.values.shape[0]:
tree_limit = self.values.shape[0]
if output == "logloss":
assert y is not None, "Both samples and labels must be provided when explaining the loss (i.e. `explainer.shap_values(X, y)`)!"
assert X.shape[0] == len(y), "The number of labels (%d) does not match the number of samples to explain (%d)!" % (len(y), X.shape[0])
transform = self.get_transform(output)
if True or self.model_type == "internal":
output = np.zeros((X.shape[0], self.n_outputs))
assert_import("cext")
_cext.dense_tree_predict(
self.children_left, self.children_right, self.children_default,
self.features, self.thresholds, self.values,
self.max_depth, tree_limit, self.base_offset, output_transform_codes[transform],
X, X_missing, y, output
)
elif self.model_type == "xgboost":
assert_import("xgboost")
output = self.original_model.predict(X, output_margin=True, tree_limit=tree_limit)
# drop dimensions we don't need
if flat_output:
if self.n_outputs == 1:
return output.flatten()[0]
else:
return output.reshape(-1, self.n_outputs)
else:
if self.n_outputs == 1:
return output.flatten()
else:
return output
|
def function[predict, parameter[self, X, y, output, tree_limit]]:
constant[ A consistent interface to make predictions from this model.
Parameters
----------
tree_limit : None (default) or int
Limit the number of trees used by the model. By default None means no use the limit of the
original model, and -1 means no limit.
]
if compare[name[tree_limit] is constant[None]] begin[:]
variable[tree_limit] assign[=] <ast.IfExp object at 0x7da20c6a9570>
if call[call[name[str], parameter[call[name[type], parameter[name[X]]]]].endswith, parameter[constant[pandas.core.series.Series'>]]] begin[:]
variable[X] assign[=] name[X].values
variable[flat_output] assign[=] constant[False]
if compare[call[name[len], parameter[name[X].shape]] equal[==] constant[1]] begin[:]
variable[flat_output] assign[=] constant[True]
variable[X] assign[=] call[name[X].reshape, parameter[constant[1], call[name[X].shape][constant[0]]]]
if compare[name[X].dtype not_equal[!=] name[self].dtype] begin[:]
variable[X] assign[=] call[name[X].astype, parameter[name[self].dtype]]
variable[X_missing] assign[=] call[name[np].isnan, parameter[name[X]]]
assert[call[call[name[str], parameter[call[name[type], parameter[name[X]]]]].endswith, parameter[constant['numpy.ndarray'>]]]]
assert[compare[call[name[len], parameter[name[X].shape]] equal[==] constant[2]]]
if <ast.BoolOp object at 0x7da1b1fa4c10> begin[:]
variable[tree_limit] assign[=] call[name[self].values.shape][constant[0]]
if compare[name[output] equal[==] constant[logloss]] begin[:]
assert[compare[name[y] is_not constant[None]]]
assert[compare[call[name[X].shape][constant[0]] equal[==] call[name[len], parameter[name[y]]]]]
variable[transform] assign[=] call[name[self].get_transform, parameter[name[output]]]
if <ast.BoolOp object at 0x7da1b1fa45b0> begin[:]
variable[output] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da1b1fa48b0>, <ast.Attribute object at 0x7da1b1fa5240>]]]]
call[name[assert_import], parameter[constant[cext]]]
call[name[_cext].dense_tree_predict, parameter[name[self].children_left, name[self].children_right, name[self].children_default, name[self].features, name[self].thresholds, name[self].values, name[self].max_depth, name[tree_limit], name[self].base_offset, call[name[output_transform_codes]][name[transform]], name[X], name[X_missing], name[y], name[output]]]
if name[flat_output] begin[:]
if compare[name[self].n_outputs equal[==] constant[1]] begin[:]
return[call[call[name[output].flatten, parameter[]]][constant[0]]]
|
keyword[def] identifier[predict] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] , identifier[output] = literal[string] , identifier[tree_limit] = keyword[None] ):
literal[string]
keyword[if] identifier[tree_limit] keyword[is] keyword[None] :
identifier[tree_limit] =- literal[int] keyword[if] identifier[self] . identifier[tree_limit] keyword[is] keyword[None] keyword[else] identifier[self] . identifier[tree_limit]
keyword[if] identifier[str] ( identifier[type] ( identifier[X] )). identifier[endswith] ( literal[string] ):
identifier[X] = identifier[X] . identifier[values]
keyword[elif] identifier[str] ( identifier[type] ( identifier[X] )). identifier[endswith] ( literal[string] ):
identifier[X] = identifier[X] . identifier[values]
identifier[flat_output] = keyword[False]
keyword[if] identifier[len] ( identifier[X] . identifier[shape] )== literal[int] :
identifier[flat_output] = keyword[True]
identifier[X] = identifier[X] . identifier[reshape] ( literal[int] , identifier[X] . identifier[shape] [ literal[int] ])
keyword[if] identifier[X] . identifier[dtype] != identifier[self] . identifier[dtype] :
identifier[X] = identifier[X] . identifier[astype] ( identifier[self] . identifier[dtype] )
identifier[X_missing] = identifier[np] . identifier[isnan] ( identifier[X] , identifier[dtype] = identifier[np] . identifier[bool] )
keyword[assert] identifier[str] ( identifier[type] ( identifier[X] )). identifier[endswith] ( literal[string] ), literal[string] + identifier[str] ( identifier[type] ( identifier[X] ))
keyword[assert] identifier[len] ( identifier[X] . identifier[shape] )== literal[int] , literal[string]
keyword[if] identifier[tree_limit] < literal[int] keyword[or] identifier[tree_limit] > identifier[self] . identifier[values] . identifier[shape] [ literal[int] ]:
identifier[tree_limit] = identifier[self] . identifier[values] . identifier[shape] [ literal[int] ]
keyword[if] identifier[output] == literal[string] :
keyword[assert] identifier[y] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[assert] identifier[X] . identifier[shape] [ literal[int] ]== identifier[len] ( identifier[y] ), literal[string] %( identifier[len] ( identifier[y] ), identifier[X] . identifier[shape] [ literal[int] ])
identifier[transform] = identifier[self] . identifier[get_transform] ( identifier[output] )
keyword[if] keyword[True] keyword[or] identifier[self] . identifier[model_type] == literal[string] :
identifier[output] = identifier[np] . identifier[zeros] (( identifier[X] . identifier[shape] [ literal[int] ], identifier[self] . identifier[n_outputs] ))
identifier[assert_import] ( literal[string] )
identifier[_cext] . identifier[dense_tree_predict] (
identifier[self] . identifier[children_left] , identifier[self] . identifier[children_right] , identifier[self] . identifier[children_default] ,
identifier[self] . identifier[features] , identifier[self] . identifier[thresholds] , identifier[self] . identifier[values] ,
identifier[self] . identifier[max_depth] , identifier[tree_limit] , identifier[self] . identifier[base_offset] , identifier[output_transform_codes] [ identifier[transform] ],
identifier[X] , identifier[X_missing] , identifier[y] , identifier[output]
)
keyword[elif] identifier[self] . identifier[model_type] == literal[string] :
identifier[assert_import] ( literal[string] )
identifier[output] = identifier[self] . identifier[original_model] . identifier[predict] ( identifier[X] , identifier[output_margin] = keyword[True] , identifier[tree_limit] = identifier[tree_limit] )
keyword[if] identifier[flat_output] :
keyword[if] identifier[self] . identifier[n_outputs] == literal[int] :
keyword[return] identifier[output] . identifier[flatten] ()[ literal[int] ]
keyword[else] :
keyword[return] identifier[output] . identifier[reshape] (- literal[int] , identifier[self] . identifier[n_outputs] )
keyword[else] :
keyword[if] identifier[self] . identifier[n_outputs] == literal[int] :
keyword[return] identifier[output] . identifier[flatten] ()
keyword[else] :
keyword[return] identifier[output]
|
def predict(self, X, y=None, output='margin', tree_limit=None):
""" A consistent interface to make predictions from this model.
Parameters
----------
tree_limit : None (default) or int
Limit the number of trees used by the model. By default None means no use the limit of the
original model, and -1 means no limit.
"""
# see if we have a default tree_limit in place.
if tree_limit is None:
tree_limit = -1 if self.tree_limit is None else self.tree_limit # depends on [control=['if'], data=['tree_limit']]
# convert dataframes
if str(type(X)).endswith("pandas.core.series.Series'>"):
X = X.values # depends on [control=['if'], data=[]]
elif str(type(X)).endswith("pandas.core.frame.DataFrame'>"):
X = X.values # depends on [control=['if'], data=[]]
flat_output = False
if len(X.shape) == 1:
flat_output = True
X = X.reshape(1, X.shape[0]) # depends on [control=['if'], data=[]]
if X.dtype != self.dtype:
X = X.astype(self.dtype) # depends on [control=['if'], data=[]]
X_missing = np.isnan(X, dtype=np.bool)
assert str(type(X)).endswith("'numpy.ndarray'>"), 'Unknown instance type: ' + str(type(X))
assert len(X.shape) == 2, 'Passed input data matrix X must have 1 or 2 dimensions!'
if tree_limit < 0 or tree_limit > self.values.shape[0]:
tree_limit = self.values.shape[0] # depends on [control=['if'], data=[]]
if output == 'logloss':
assert y is not None, 'Both samples and labels must be provided when explaining the loss (i.e. `explainer.shap_values(X, y)`)!'
assert X.shape[0] == len(y), 'The number of labels (%d) does not match the number of samples to explain (%d)!' % (len(y), X.shape[0]) # depends on [control=['if'], data=[]]
transform = self.get_transform(output)
if True or self.model_type == 'internal':
output = np.zeros((X.shape[0], self.n_outputs))
assert_import('cext')
_cext.dense_tree_predict(self.children_left, self.children_right, self.children_default, self.features, self.thresholds, self.values, self.max_depth, tree_limit, self.base_offset, output_transform_codes[transform], X, X_missing, y, output) # depends on [control=['if'], data=[]]
elif self.model_type == 'xgboost':
assert_import('xgboost')
output = self.original_model.predict(X, output_margin=True, tree_limit=tree_limit) # depends on [control=['if'], data=[]]
# drop dimensions we don't need
if flat_output:
if self.n_outputs == 1:
return output.flatten()[0] # depends on [control=['if'], data=[]]
else:
return output.reshape(-1, self.n_outputs) # depends on [control=['if'], data=[]]
elif self.n_outputs == 1:
return output.flatten() # depends on [control=['if'], data=[]]
else:
return output
|
def _populate_spelling_error(word,
suggestions,
contents,
line_offset,
column_offset,
message_start):
"""Create a LinterFailure for word.
This function takes suggestions from :suggestions: and uses it to
populate the message and candidate replacement. The replacement will
be a line in :contents:, as determined by :line_offset: and
:column_offset:.
"""
error_line = contents[line_offset]
if len(suggestions):
char_word_offset = (column_offset + len(word))
replacement = (error_line[:column_offset] +
suggestions[0] +
error_line[char_word_offset:])
else:
replacement = None
if len(suggestions):
suggestions_text = (""", perhaps you meant """ +
" ".join(suggestions))
else:
suggestions_text = ""
format_desc = message_start + suggestions_text
return LinterFailure(format_desc,
line_offset + 1,
replacement)
|
def function[_populate_spelling_error, parameter[word, suggestions, contents, line_offset, column_offset, message_start]]:
constant[Create a LinterFailure for word.
This function takes suggestions from :suggestions: and uses it to
populate the message and candidate replacement. The replacement will
be a line in :contents:, as determined by :line_offset: and
:column_offset:.
]
variable[error_line] assign[=] call[name[contents]][name[line_offset]]
if call[name[len], parameter[name[suggestions]]] begin[:]
variable[char_word_offset] assign[=] binary_operation[name[column_offset] + call[name[len], parameter[name[word]]]]
variable[replacement] assign[=] binary_operation[binary_operation[call[name[error_line]][<ast.Slice object at 0x7da1b157bd90>] + call[name[suggestions]][constant[0]]] + call[name[error_line]][<ast.Slice object at 0x7da1b15790f0>]]
if call[name[len], parameter[name[suggestions]]] begin[:]
variable[suggestions_text] assign[=] binary_operation[constant[, perhaps you meant ] + call[constant[ ].join, parameter[name[suggestions]]]]
variable[format_desc] assign[=] binary_operation[name[message_start] + name[suggestions_text]]
return[call[name[LinterFailure], parameter[name[format_desc], binary_operation[name[line_offset] + constant[1]], name[replacement]]]]
|
keyword[def] identifier[_populate_spelling_error] ( identifier[word] ,
identifier[suggestions] ,
identifier[contents] ,
identifier[line_offset] ,
identifier[column_offset] ,
identifier[message_start] ):
literal[string]
identifier[error_line] = identifier[contents] [ identifier[line_offset] ]
keyword[if] identifier[len] ( identifier[suggestions] ):
identifier[char_word_offset] =( identifier[column_offset] + identifier[len] ( identifier[word] ))
identifier[replacement] =( identifier[error_line] [: identifier[column_offset] ]+
identifier[suggestions] [ literal[int] ]+
identifier[error_line] [ identifier[char_word_offset] :])
keyword[else] :
identifier[replacement] = keyword[None]
keyword[if] identifier[len] ( identifier[suggestions] ):
identifier[suggestions_text] =( literal[string] +
literal[string] . identifier[join] ( identifier[suggestions] ))
keyword[else] :
identifier[suggestions_text] = literal[string]
identifier[format_desc] = identifier[message_start] + identifier[suggestions_text]
keyword[return] identifier[LinterFailure] ( identifier[format_desc] ,
identifier[line_offset] + literal[int] ,
identifier[replacement] )
|
def _populate_spelling_error(word, suggestions, contents, line_offset, column_offset, message_start):
"""Create a LinterFailure for word.
This function takes suggestions from :suggestions: and uses it to
populate the message and candidate replacement. The replacement will
be a line in :contents:, as determined by :line_offset: and
:column_offset:.
"""
error_line = contents[line_offset]
if len(suggestions):
char_word_offset = column_offset + len(word)
replacement = error_line[:column_offset] + suggestions[0] + error_line[char_word_offset:] # depends on [control=['if'], data=[]]
else:
replacement = None
if len(suggestions):
suggestions_text = ', perhaps you meant ' + ' '.join(suggestions) # depends on [control=['if'], data=[]]
else:
suggestions_text = ''
format_desc = message_start + suggestions_text
return LinterFailure(format_desc, line_offset + 1, replacement)
|
def terminal(self, text):
"""terminal = '"' . (printable - '"') + . '"'
| "'" . (printable - "'") + . "'" ;
"""
self._attempting(text)
return alternation([
concatenation([
'"',
one_or_more(
exclusion(self.printable, '"')
),
'"'
], ignore_whitespace=False),
concatenation([
"'",
one_or_more(
exclusion(self.printable,"'")
),
"'"
], ignore_whitespace=False)
])(text).compressed(TokenType.terminal)
|
def function[terminal, parameter[self, text]]:
constant[terminal = '"' . (printable - '"') + . '"'
| "'" . (printable - "'") + . "'" ;
]
call[name[self]._attempting, parameter[name[text]]]
return[call[call[call[name[alternation], parameter[list[[<ast.Call object at 0x7da1b01abdc0>, <ast.Call object at 0x7da1b0243c10>]]]], parameter[name[text]]].compressed, parameter[name[TokenType].terminal]]]
|
keyword[def] identifier[terminal] ( identifier[self] , identifier[text] ):
literal[string]
identifier[self] . identifier[_attempting] ( identifier[text] )
keyword[return] identifier[alternation] ([
identifier[concatenation] ([
literal[string] ,
identifier[one_or_more] (
identifier[exclusion] ( identifier[self] . identifier[printable] , literal[string] )
),
literal[string]
], identifier[ignore_whitespace] = keyword[False] ),
identifier[concatenation] ([
literal[string] ,
identifier[one_or_more] (
identifier[exclusion] ( identifier[self] . identifier[printable] , literal[string] )
),
literal[string]
], identifier[ignore_whitespace] = keyword[False] )
])( identifier[text] ). identifier[compressed] ( identifier[TokenType] . identifier[terminal] )
|
def terminal(self, text):
"""terminal = '"' . (printable - '"') + . '"'
| "'" . (printable - "'") + . "'" ;
"""
self._attempting(text)
return alternation([concatenation(['"', one_or_more(exclusion(self.printable, '"')), '"'], ignore_whitespace=False), concatenation(["'", one_or_more(exclusion(self.printable, "'")), "'"], ignore_whitespace=False)])(text).compressed(TokenType.terminal)
|
def subs(self, path):
"""
Search the strings in a config file for a substitutable value, e.g.
"morphologies_dir": "$COMPONENT_DIR/morphologies",
"""
#print_v('Checking for: \n %s, \n %s \n in %s'%(self.substitutes,self.init_substitutes,path))
if type(path) == int or type(path) == float:
return path
for s in self.init_substitutes:
if path.startswith(s):
path = path.replace(s,self.init_substitutes[s], 1)
#print_v(' So far: %s'%path)
for s in self.substitutes:
if s in path:
path = path.replace(s,self.substitutes[s])
#print_v(' Returning: %s'%path)
return path
|
def function[subs, parameter[self, path]]:
constant[
Search the strings in a config file for a substitutable value, e.g.
"morphologies_dir": "$COMPONENT_DIR/morphologies",
]
if <ast.BoolOp object at 0x7da1b19cbc70> begin[:]
return[name[path]]
for taget[name[s]] in starred[name[self].init_substitutes] begin[:]
if call[name[path].startswith, parameter[name[s]]] begin[:]
variable[path] assign[=] call[name[path].replace, parameter[name[s], call[name[self].init_substitutes][name[s]], constant[1]]]
for taget[name[s]] in starred[name[self].substitutes] begin[:]
if compare[name[s] in name[path]] begin[:]
variable[path] assign[=] call[name[path].replace, parameter[name[s], call[name[self].substitutes][name[s]]]]
return[name[path]]
|
keyword[def] identifier[subs] ( identifier[self] , identifier[path] ):
literal[string]
keyword[if] identifier[type] ( identifier[path] )== identifier[int] keyword[or] identifier[type] ( identifier[path] )== identifier[float] :
keyword[return] identifier[path]
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[init_substitutes] :
keyword[if] identifier[path] . identifier[startswith] ( identifier[s] ):
identifier[path] = identifier[path] . identifier[replace] ( identifier[s] , identifier[self] . identifier[init_substitutes] [ identifier[s] ], literal[int] )
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[substitutes] :
keyword[if] identifier[s] keyword[in] identifier[path] :
identifier[path] = identifier[path] . identifier[replace] ( identifier[s] , identifier[self] . identifier[substitutes] [ identifier[s] ])
keyword[return] identifier[path]
|
def subs(self, path):
"""
Search the strings in a config file for a substitutable value, e.g.
"morphologies_dir": "$COMPONENT_DIR/morphologies",
"""
#print_v('Checking for: \n %s, \n %s \n in %s'%(self.substitutes,self.init_substitutes,path))
if type(path) == int or type(path) == float:
return path # depends on [control=['if'], data=[]]
for s in self.init_substitutes:
if path.startswith(s):
path = path.replace(s, self.init_substitutes[s], 1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']]
#print_v(' So far: %s'%path)
for s in self.substitutes:
if s in path:
path = path.replace(s, self.substitutes[s]) # depends on [control=['if'], data=['s', 'path']] # depends on [control=['for'], data=['s']]
#print_v(' Returning: %s'%path)
return path
|
def _bq_cast(string_field, bq_type):
"""
Helper method that casts a BigQuery row to the appropriate data types.
This is useful because BigQuery returns all fields as strings.
"""
if string_field is None:
return None
elif bq_type == 'INTEGER':
return int(string_field)
elif bq_type == 'FLOAT' or bq_type == 'TIMESTAMP':
return float(string_field)
elif bq_type == 'BOOLEAN':
if string_field not in ['true', 'false']:
raise ValueError("{} must have value 'true' or 'false'".format(
string_field))
return string_field == 'true'
else:
return string_field
|
def function[_bq_cast, parameter[string_field, bq_type]]:
constant[
Helper method that casts a BigQuery row to the appropriate data types.
This is useful because BigQuery returns all fields as strings.
]
if compare[name[string_field] is constant[None]] begin[:]
return[constant[None]]
|
keyword[def] identifier[_bq_cast] ( identifier[string_field] , identifier[bq_type] ):
literal[string]
keyword[if] identifier[string_field] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[elif] identifier[bq_type] == literal[string] :
keyword[return] identifier[int] ( identifier[string_field] )
keyword[elif] identifier[bq_type] == literal[string] keyword[or] identifier[bq_type] == literal[string] :
keyword[return] identifier[float] ( identifier[string_field] )
keyword[elif] identifier[bq_type] == literal[string] :
keyword[if] identifier[string_field] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[string_field] ))
keyword[return] identifier[string_field] == literal[string]
keyword[else] :
keyword[return] identifier[string_field]
|
def _bq_cast(string_field, bq_type):
"""
Helper method that casts a BigQuery row to the appropriate data types.
This is useful because BigQuery returns all fields as strings.
"""
if string_field is None:
return None # depends on [control=['if'], data=[]]
elif bq_type == 'INTEGER':
return int(string_field) # depends on [control=['if'], data=[]]
elif bq_type == 'FLOAT' or bq_type == 'TIMESTAMP':
return float(string_field) # depends on [control=['if'], data=[]]
elif bq_type == 'BOOLEAN':
if string_field not in ['true', 'false']:
raise ValueError("{} must have value 'true' or 'false'".format(string_field)) # depends on [control=['if'], data=['string_field']]
return string_field == 'true' # depends on [control=['if'], data=[]]
else:
return string_field
|
async def house_status_monitor_enable(pyvlx):
"""Enable house status monitor."""
status_monitor_enable = HouseStatusMonitorEnable(pyvlx=pyvlx)
await status_monitor_enable.do_api_call()
if not status_monitor_enable.success:
raise PyVLXException("Unable enable house status monitor.")
|
<ast.AsyncFunctionDef object at 0x7da2044c2e90>
|
keyword[async] keyword[def] identifier[house_status_monitor_enable] ( identifier[pyvlx] ):
literal[string]
identifier[status_monitor_enable] = identifier[HouseStatusMonitorEnable] ( identifier[pyvlx] = identifier[pyvlx] )
keyword[await] identifier[status_monitor_enable] . identifier[do_api_call] ()
keyword[if] keyword[not] identifier[status_monitor_enable] . identifier[success] :
keyword[raise] identifier[PyVLXException] ( literal[string] )
|
async def house_status_monitor_enable(pyvlx):
"""Enable house status monitor."""
status_monitor_enable = HouseStatusMonitorEnable(pyvlx=pyvlx)
await status_monitor_enable.do_api_call()
if not status_monitor_enable.success:
raise PyVLXException('Unable enable house status monitor.') # depends on [control=['if'], data=[]]
|
def build_def_use(graph, lparams):
"""
Builds the Def-Use and Use-Def (DU/UD) chains of the variables of the
method.
"""
analysis = reach_def_analysis(graph, lparams)
UD = defaultdict(list)
for node in graph.rpo:
for i, ins in node.get_loc_with_ins():
for var in ins.get_used_vars():
# var not in analysis.def_to_loc: test that the register
# exists. It is possible that it is not the case, when a
# variable is of a type which is stored on multiple registers
# e.g: a 'double' stored in v3 is also present in v4, so a call
# to foo(v3), will in fact call foo(v3, v4).
if var not in analysis.def_to_loc:
continue
ldefs = analysis.defs[node]
prior_def = -1
for v in ldefs.get(var, set()):
if prior_def < v < i:
prior_def = v
if prior_def >= 0:
UD[var, i].append(prior_def)
else:
intersect = analysis.def_to_loc[var].intersection(
analysis.R[node])
UD[var, i].extend(intersect)
DU = defaultdict(list)
for var_loc, defs_loc in UD.items():
var, loc = var_loc
for def_loc in defs_loc:
DU[var, def_loc].append(loc)
return UD, DU
|
def function[build_def_use, parameter[graph, lparams]]:
constant[
Builds the Def-Use and Use-Def (DU/UD) chains of the variables of the
method.
]
variable[analysis] assign[=] call[name[reach_def_analysis], parameter[name[graph], name[lparams]]]
variable[UD] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[node]] in starred[name[graph].rpo] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b26ad570>, <ast.Name object at 0x7da1b26ae200>]]] in starred[call[name[node].get_loc_with_ins, parameter[]]] begin[:]
for taget[name[var]] in starred[call[name[ins].get_used_vars, parameter[]]] begin[:]
if compare[name[var] <ast.NotIn object at 0x7da2590d7190> name[analysis].def_to_loc] begin[:]
continue
variable[ldefs] assign[=] call[name[analysis].defs][name[node]]
variable[prior_def] assign[=] <ast.UnaryOp object at 0x7da1b26ae920>
for taget[name[v]] in starred[call[name[ldefs].get, parameter[name[var], call[name[set], parameter[]]]]] begin[:]
if compare[name[prior_def] less[<] name[v]] begin[:]
variable[prior_def] assign[=] name[v]
if compare[name[prior_def] greater_or_equal[>=] constant[0]] begin[:]
call[call[name[UD]][tuple[[<ast.Name object at 0x7da20c6e75b0>, <ast.Name object at 0x7da20c6e5000>]]].append, parameter[name[prior_def]]]
variable[DU] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da20c6e4640>, <ast.Name object at 0x7da20c6e6c50>]]] in starred[call[name[UD].items, parameter[]]] begin[:]
<ast.Tuple object at 0x7da20c6e4be0> assign[=] name[var_loc]
for taget[name[def_loc]] in starred[name[defs_loc]] begin[:]
call[call[name[DU]][tuple[[<ast.Name object at 0x7da20c6e4760>, <ast.Name object at 0x7da20c6e7520>]]].append, parameter[name[loc]]]
return[tuple[[<ast.Name object at 0x7da20c6e5990>, <ast.Name object at 0x7da20c6e6620>]]]
|
keyword[def] identifier[build_def_use] ( identifier[graph] , identifier[lparams] ):
literal[string]
identifier[analysis] = identifier[reach_def_analysis] ( identifier[graph] , identifier[lparams] )
identifier[UD] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[node] keyword[in] identifier[graph] . identifier[rpo] :
keyword[for] identifier[i] , identifier[ins] keyword[in] identifier[node] . identifier[get_loc_with_ins] ():
keyword[for] identifier[var] keyword[in] identifier[ins] . identifier[get_used_vars] ():
keyword[if] identifier[var] keyword[not] keyword[in] identifier[analysis] . identifier[def_to_loc] :
keyword[continue]
identifier[ldefs] = identifier[analysis] . identifier[defs] [ identifier[node] ]
identifier[prior_def] =- literal[int]
keyword[for] identifier[v] keyword[in] identifier[ldefs] . identifier[get] ( identifier[var] , identifier[set] ()):
keyword[if] identifier[prior_def] < identifier[v] < identifier[i] :
identifier[prior_def] = identifier[v]
keyword[if] identifier[prior_def] >= literal[int] :
identifier[UD] [ identifier[var] , identifier[i] ]. identifier[append] ( identifier[prior_def] )
keyword[else] :
identifier[intersect] = identifier[analysis] . identifier[def_to_loc] [ identifier[var] ]. identifier[intersection] (
identifier[analysis] . identifier[R] [ identifier[node] ])
identifier[UD] [ identifier[var] , identifier[i] ]. identifier[extend] ( identifier[intersect] )
identifier[DU] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[var_loc] , identifier[defs_loc] keyword[in] identifier[UD] . identifier[items] ():
identifier[var] , identifier[loc] = identifier[var_loc]
keyword[for] identifier[def_loc] keyword[in] identifier[defs_loc] :
identifier[DU] [ identifier[var] , identifier[def_loc] ]. identifier[append] ( identifier[loc] )
keyword[return] identifier[UD] , identifier[DU]
|
def build_def_use(graph, lparams):
"""
Builds the Def-Use and Use-Def (DU/UD) chains of the variables of the
method.
"""
analysis = reach_def_analysis(graph, lparams)
UD = defaultdict(list)
for node in graph.rpo:
for (i, ins) in node.get_loc_with_ins():
for var in ins.get_used_vars():
# var not in analysis.def_to_loc: test that the register
# exists. It is possible that it is not the case, when a
# variable is of a type which is stored on multiple registers
# e.g: a 'double' stored in v3 is also present in v4, so a call
# to foo(v3), will in fact call foo(v3, v4).
if var not in analysis.def_to_loc:
continue # depends on [control=['if'], data=[]]
ldefs = analysis.defs[node]
prior_def = -1
for v in ldefs.get(var, set()):
if prior_def < v < i:
prior_def = v # depends on [control=['if'], data=['prior_def', 'v']] # depends on [control=['for'], data=['v']]
if prior_def >= 0:
UD[var, i].append(prior_def) # depends on [control=['if'], data=['prior_def']]
else:
intersect = analysis.def_to_loc[var].intersection(analysis.R[node])
UD[var, i].extend(intersect) # depends on [control=['for'], data=['var']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['node']]
DU = defaultdict(list)
for (var_loc, defs_loc) in UD.items():
(var, loc) = var_loc
for def_loc in defs_loc:
DU[var, def_loc].append(loc) # depends on [control=['for'], data=['def_loc']] # depends on [control=['for'], data=[]]
return (UD, DU)
|
def _handle_inotify_event(self, wd):
"""Handle a series of events coming-in from inotify."""
b = os.read(wd, 1024)
if not b:
return
self.__buffer += b
while 1:
length = len(self.__buffer)
if length < _STRUCT_HEADER_LENGTH:
_LOGGER.debug("Not enough bytes for a header.")
return
# We have, at least, a whole-header in the buffer.
peek_slice = self.__buffer[:_STRUCT_HEADER_LENGTH]
header_raw = struct.unpack(
_HEADER_STRUCT_FORMAT,
peek_slice)
header = _INOTIFY_EVENT(*header_raw)
type_names = self._get_event_names(header.mask)
_LOGGER.debug("Events received in stream: {}".format(type_names))
event_length = (_STRUCT_HEADER_LENGTH + header.len)
if length < event_length:
return
filename = self.__buffer[_STRUCT_HEADER_LENGTH:event_length]
# Our filename is 16-byte aligned and right-padded with NULs.
filename_bytes = filename.rstrip(b'\0')
self.__buffer = self.__buffer[event_length:]
path = self.__watches_r.get(header.wd)
if path is not None:
filename_unicode = filename_bytes.decode('utf8')
yield (header, type_names, path, filename_unicode)
buffer_length = len(self.__buffer)
if buffer_length < _STRUCT_HEADER_LENGTH:
break
|
def function[_handle_inotify_event, parameter[self, wd]]:
constant[Handle a series of events coming-in from inotify.]
variable[b] assign[=] call[name[os].read, parameter[name[wd], constant[1024]]]
if <ast.UnaryOp object at 0x7da1b0980490> begin[:]
return[None]
<ast.AugAssign object at 0x7da1b0981090>
while constant[1] begin[:]
variable[length] assign[=] call[name[len], parameter[name[self].__buffer]]
if compare[name[length] less[<] name[_STRUCT_HEADER_LENGTH]] begin[:]
call[name[_LOGGER].debug, parameter[constant[Not enough bytes for a header.]]]
return[None]
variable[peek_slice] assign[=] call[name[self].__buffer][<ast.Slice object at 0x7da204960e80>]
variable[header_raw] assign[=] call[name[struct].unpack, parameter[name[_HEADER_STRUCT_FORMAT], name[peek_slice]]]
variable[header] assign[=] call[name[_INOTIFY_EVENT], parameter[<ast.Starred object at 0x7da18eb54970>]]
variable[type_names] assign[=] call[name[self]._get_event_names, parameter[name[header].mask]]
call[name[_LOGGER].debug, parameter[call[constant[Events received in stream: {}].format, parameter[name[type_names]]]]]
variable[event_length] assign[=] binary_operation[name[_STRUCT_HEADER_LENGTH] + name[header].len]
if compare[name[length] less[<] name[event_length]] begin[:]
return[None]
variable[filename] assign[=] call[name[self].__buffer][<ast.Slice object at 0x7da1b0926110>]
variable[filename_bytes] assign[=] call[name[filename].rstrip, parameter[constant[b'\x00']]]
name[self].__buffer assign[=] call[name[self].__buffer][<ast.Slice object at 0x7da1b086f040>]
variable[path] assign[=] call[name[self].__watches_r.get, parameter[name[header].wd]]
if compare[name[path] is_not constant[None]] begin[:]
variable[filename_unicode] assign[=] call[name[filename_bytes].decode, parameter[constant[utf8]]]
<ast.Yield object at 0x7da1b086fd90>
variable[buffer_length] assign[=] call[name[len], parameter[name[self].__buffer]]
if compare[name[buffer_length] less[<] name[_STRUCT_HEADER_LENGTH]] begin[:]
break
|
keyword[def] identifier[_handle_inotify_event] ( identifier[self] , identifier[wd] ):
literal[string]
identifier[b] = identifier[os] . identifier[read] ( identifier[wd] , literal[int] )
keyword[if] keyword[not] identifier[b] :
keyword[return]
identifier[self] . identifier[__buffer] += identifier[b]
keyword[while] literal[int] :
identifier[length] = identifier[len] ( identifier[self] . identifier[__buffer] )
keyword[if] identifier[length] < identifier[_STRUCT_HEADER_LENGTH] :
identifier[_LOGGER] . identifier[debug] ( literal[string] )
keyword[return]
identifier[peek_slice] = identifier[self] . identifier[__buffer] [: identifier[_STRUCT_HEADER_LENGTH] ]
identifier[header_raw] = identifier[struct] . identifier[unpack] (
identifier[_HEADER_STRUCT_FORMAT] ,
identifier[peek_slice] )
identifier[header] = identifier[_INOTIFY_EVENT] (* identifier[header_raw] )
identifier[type_names] = identifier[self] . identifier[_get_event_names] ( identifier[header] . identifier[mask] )
identifier[_LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[type_names] ))
identifier[event_length] =( identifier[_STRUCT_HEADER_LENGTH] + identifier[header] . identifier[len] )
keyword[if] identifier[length] < identifier[event_length] :
keyword[return]
identifier[filename] = identifier[self] . identifier[__buffer] [ identifier[_STRUCT_HEADER_LENGTH] : identifier[event_length] ]
identifier[filename_bytes] = identifier[filename] . identifier[rstrip] ( literal[string] )
identifier[self] . identifier[__buffer] = identifier[self] . identifier[__buffer] [ identifier[event_length] :]
identifier[path] = identifier[self] . identifier[__watches_r] . identifier[get] ( identifier[header] . identifier[wd] )
keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] :
identifier[filename_unicode] = identifier[filename_bytes] . identifier[decode] ( literal[string] )
keyword[yield] ( identifier[header] , identifier[type_names] , identifier[path] , identifier[filename_unicode] )
identifier[buffer_length] = identifier[len] ( identifier[self] . identifier[__buffer] )
keyword[if] identifier[buffer_length] < identifier[_STRUCT_HEADER_LENGTH] :
keyword[break]
|
def _handle_inotify_event(self, wd):
"""Handle a series of events coming-in from inotify."""
b = os.read(wd, 1024)
if not b:
return # depends on [control=['if'], data=[]]
self.__buffer += b
while 1:
length = len(self.__buffer)
if length < _STRUCT_HEADER_LENGTH:
_LOGGER.debug('Not enough bytes for a header.')
return # depends on [control=['if'], data=[]]
# We have, at least, a whole-header in the buffer.
peek_slice = self.__buffer[:_STRUCT_HEADER_LENGTH]
header_raw = struct.unpack(_HEADER_STRUCT_FORMAT, peek_slice)
header = _INOTIFY_EVENT(*header_raw)
type_names = self._get_event_names(header.mask)
_LOGGER.debug('Events received in stream: {}'.format(type_names))
event_length = _STRUCT_HEADER_LENGTH + header.len
if length < event_length:
return # depends on [control=['if'], data=[]]
filename = self.__buffer[_STRUCT_HEADER_LENGTH:event_length]
# Our filename is 16-byte aligned and right-padded with NULs.
filename_bytes = filename.rstrip(b'\x00')
self.__buffer = self.__buffer[event_length:]
path = self.__watches_r.get(header.wd)
if path is not None:
filename_unicode = filename_bytes.decode('utf8')
yield (header, type_names, path, filename_unicode) # depends on [control=['if'], data=['path']]
buffer_length = len(self.__buffer)
if buffer_length < _STRUCT_HEADER_LENGTH:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def combine_mv_and_lv(mv, lv):
"""Combine MV and LV grid topology in PyPSA format
"""
combined = {
c: pd.concat([mv[c], lv[c]], axis=0) for c in list(lv.keys())
}
combined['Transformer'] = mv['Transformer']
return combined
|
def function[combine_mv_and_lv, parameter[mv, lv]]:
constant[Combine MV and LV grid topology in PyPSA format
]
variable[combined] assign[=] <ast.DictComp object at 0x7da1b0369090>
call[name[combined]][constant[Transformer]] assign[=] call[name[mv]][constant[Transformer]]
return[name[combined]]
|
keyword[def] identifier[combine_mv_and_lv] ( identifier[mv] , identifier[lv] ):
literal[string]
identifier[combined] ={
identifier[c] : identifier[pd] . identifier[concat] ([ identifier[mv] [ identifier[c] ], identifier[lv] [ identifier[c] ]], identifier[axis] = literal[int] ) keyword[for] identifier[c] keyword[in] identifier[list] ( identifier[lv] . identifier[keys] ())
}
identifier[combined] [ literal[string] ]= identifier[mv] [ literal[string] ]
keyword[return] identifier[combined]
|
def combine_mv_and_lv(mv, lv):
"""Combine MV and LV grid topology in PyPSA format
"""
combined = {c: pd.concat([mv[c], lv[c]], axis=0) for c in list(lv.keys())}
combined['Transformer'] = mv['Transformer']
return combined
|
def time_emd(emd_type, data):
"""Time an EMD command with the given data as arguments"""
emd = {
'cause': _CAUSE_EMD,
'effect': pyphi.subsystem.effect_emd,
'hamming': pyphi.utils.hamming_emd
}[emd_type]
def statement():
for (d1, d2) in data:
emd(d1, d2)
results = timeit.repeat(statement, number=NUMBER, repeat=REPEAT)
return min(results)
|
def function[time_emd, parameter[emd_type, data]]:
constant[Time an EMD command with the given data as arguments]
variable[emd] assign[=] call[dictionary[[<ast.Constant object at 0x7da18dc073d0>, <ast.Constant object at 0x7da18dc058a0>, <ast.Constant object at 0x7da18dc05120>], [<ast.Name object at 0x7da18dc07a90>, <ast.Attribute object at 0x7da18dc06560>, <ast.Attribute object at 0x7da18dc054e0>]]][name[emd_type]]
def function[statement, parameter[]]:
for taget[tuple[[<ast.Name object at 0x7da18dc06530>, <ast.Name object at 0x7da18dc04eb0>]]] in starred[name[data]] begin[:]
call[name[emd], parameter[name[d1], name[d2]]]
variable[results] assign[=] call[name[timeit].repeat, parameter[name[statement]]]
return[call[name[min], parameter[name[results]]]]
|
keyword[def] identifier[time_emd] ( identifier[emd_type] , identifier[data] ):
literal[string]
identifier[emd] ={
literal[string] : identifier[_CAUSE_EMD] ,
literal[string] : identifier[pyphi] . identifier[subsystem] . identifier[effect_emd] ,
literal[string] : identifier[pyphi] . identifier[utils] . identifier[hamming_emd]
}[ identifier[emd_type] ]
keyword[def] identifier[statement] ():
keyword[for] ( identifier[d1] , identifier[d2] ) keyword[in] identifier[data] :
identifier[emd] ( identifier[d1] , identifier[d2] )
identifier[results] = identifier[timeit] . identifier[repeat] ( identifier[statement] , identifier[number] = identifier[NUMBER] , identifier[repeat] = identifier[REPEAT] )
keyword[return] identifier[min] ( identifier[results] )
|
def time_emd(emd_type, data):
"""Time an EMD command with the given data as arguments"""
emd = {'cause': _CAUSE_EMD, 'effect': pyphi.subsystem.effect_emd, 'hamming': pyphi.utils.hamming_emd}[emd_type]
def statement():
for (d1, d2) in data:
emd(d1, d2) # depends on [control=['for'], data=[]]
results = timeit.repeat(statement, number=NUMBER, repeat=REPEAT)
return min(results)
|
def exit(self):
"""
Cleanup pid file at exit.
"""
self.logger.warning("Stopping daemon.")
os.remove(self.pid)
sys.exit(0)
|
def function[exit, parameter[self]]:
constant[
Cleanup pid file at exit.
]
call[name[self].logger.warning, parameter[constant[Stopping daemon.]]]
call[name[os].remove, parameter[name[self].pid]]
call[name[sys].exit, parameter[constant[0]]]
|
keyword[def] identifier[exit] ( identifier[self] ):
literal[string]
identifier[self] . identifier[logger] . identifier[warning] ( literal[string] )
identifier[os] . identifier[remove] ( identifier[self] . identifier[pid] )
identifier[sys] . identifier[exit] ( literal[int] )
|
def exit(self):
"""
Cleanup pid file at exit.
"""
self.logger.warning('Stopping daemon.')
os.remove(self.pid)
sys.exit(0)
|
def setup_formats(self):
"""
Inspects its methods to see what it can convert from and to
"""
methods = self.get_methods()
for m in methods:
#Methods named "from_X" will be assumed to convert from format X to the common format
if m.startswith("from_"):
self.input_formats.append(re.sub("from_" , "",m))
#Methods named "to_X" will be assumed to convert from the common format to X
elif m.startswith("to_"):
self.output_formats.append(re.sub("to_","",m))
|
def function[setup_formats, parameter[self]]:
constant[
Inspects its methods to see what it can convert from and to
]
variable[methods] assign[=] call[name[self].get_methods, parameter[]]
for taget[name[m]] in starred[name[methods]] begin[:]
if call[name[m].startswith, parameter[constant[from_]]] begin[:]
call[name[self].input_formats.append, parameter[call[name[re].sub, parameter[constant[from_], constant[], name[m]]]]]
|
keyword[def] identifier[setup_formats] ( identifier[self] ):
literal[string]
identifier[methods] = identifier[self] . identifier[get_methods] ()
keyword[for] identifier[m] keyword[in] identifier[methods] :
keyword[if] identifier[m] . identifier[startswith] ( literal[string] ):
identifier[self] . identifier[input_formats] . identifier[append] ( identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[m] ))
keyword[elif] identifier[m] . identifier[startswith] ( literal[string] ):
identifier[self] . identifier[output_formats] . identifier[append] ( identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[m] ))
|
def setup_formats(self):
"""
Inspects its methods to see what it can convert from and to
"""
methods = self.get_methods()
for m in methods:
#Methods named "from_X" will be assumed to convert from format X to the common format
if m.startswith('from_'):
self.input_formats.append(re.sub('from_', '', m)) # depends on [control=['if'], data=[]]
#Methods named "to_X" will be assumed to convert from the common format to X
elif m.startswith('to_'):
self.output_formats.append(re.sub('to_', '', m)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
|
def search_users(self, user_name):
"""Searches for users via provisioning API.
If you get back an error 999, then the provisioning API is not enabled.
:param user_name: name of user to be searched for
:returns: list of usernames that contain user_name as substring
:raises: HTTPResponseError in case an HTTP error status was returned
"""
action_path = 'users'
if user_name:
action_path += '?search={}'.format(user_name)
res = self._make_ocs_request(
'GET',
self.OCS_SERVICE_CLOUD,
action_path
)
if res.status_code == 200:
tree = ET.fromstring(res.content)
users = [x.text for x in tree.findall('data/users/element')]
return users
raise HTTPResponseError(res)
|
def function[search_users, parameter[self, user_name]]:
constant[Searches for users via provisioning API.
If you get back an error 999, then the provisioning API is not enabled.
:param user_name: name of user to be searched for
:returns: list of usernames that contain user_name as substring
:raises: HTTPResponseError in case an HTTP error status was returned
]
variable[action_path] assign[=] constant[users]
if name[user_name] begin[:]
<ast.AugAssign object at 0x7da1b0fcff40>
variable[res] assign[=] call[name[self]._make_ocs_request, parameter[constant[GET], name[self].OCS_SERVICE_CLOUD, name[action_path]]]
if compare[name[res].status_code equal[==] constant[200]] begin[:]
variable[tree] assign[=] call[name[ET].fromstring, parameter[name[res].content]]
variable[users] assign[=] <ast.ListComp object at 0x7da1b0fcdc60>
return[name[users]]
<ast.Raise object at 0x7da18fe92020>
|
keyword[def] identifier[search_users] ( identifier[self] , identifier[user_name] ):
literal[string]
identifier[action_path] = literal[string]
keyword[if] identifier[user_name] :
identifier[action_path] += literal[string] . identifier[format] ( identifier[user_name] )
identifier[res] = identifier[self] . identifier[_make_ocs_request] (
literal[string] ,
identifier[self] . identifier[OCS_SERVICE_CLOUD] ,
identifier[action_path]
)
keyword[if] identifier[res] . identifier[status_code] == literal[int] :
identifier[tree] = identifier[ET] . identifier[fromstring] ( identifier[res] . identifier[content] )
identifier[users] =[ identifier[x] . identifier[text] keyword[for] identifier[x] keyword[in] identifier[tree] . identifier[findall] ( literal[string] )]
keyword[return] identifier[users]
keyword[raise] identifier[HTTPResponseError] ( identifier[res] )
|
def search_users(self, user_name):
"""Searches for users via provisioning API.
If you get back an error 999, then the provisioning API is not enabled.
:param user_name: name of user to be searched for
:returns: list of usernames that contain user_name as substring
:raises: HTTPResponseError in case an HTTP error status was returned
"""
action_path = 'users'
if user_name:
action_path += '?search={}'.format(user_name) # depends on [control=['if'], data=[]]
res = self._make_ocs_request('GET', self.OCS_SERVICE_CLOUD, action_path)
if res.status_code == 200:
tree = ET.fromstring(res.content)
users = [x.text for x in tree.findall('data/users/element')]
return users # depends on [control=['if'], data=[]]
raise HTTPResponseError(res)
|
def on_event(self, evt, is_final):
""" this is invoked from in response to COM PumpWaitingMessages - different thread """
for msg in XmlHelper.message_iter(evt):
# Single security element in historical request
node = msg.GetElement('securityData')
if node.HasElement('securityError'):
secid = XmlHelper.get_child_value(node, 'security')
self.security_errors.append(XmlHelper.as_security_error(node.GetElement('securityError'), secid))
else:
self.on_security_data_node(node)
|
def function[on_event, parameter[self, evt, is_final]]:
constant[ this is invoked from in response to COM PumpWaitingMessages - different thread ]
for taget[name[msg]] in starred[call[name[XmlHelper].message_iter, parameter[name[evt]]]] begin[:]
variable[node] assign[=] call[name[msg].GetElement, parameter[constant[securityData]]]
if call[name[node].HasElement, parameter[constant[securityError]]] begin[:]
variable[secid] assign[=] call[name[XmlHelper].get_child_value, parameter[name[node], constant[security]]]
call[name[self].security_errors.append, parameter[call[name[XmlHelper].as_security_error, parameter[call[name[node].GetElement, parameter[constant[securityError]]], name[secid]]]]]
|
keyword[def] identifier[on_event] ( identifier[self] , identifier[evt] , identifier[is_final] ):
literal[string]
keyword[for] identifier[msg] keyword[in] identifier[XmlHelper] . identifier[message_iter] ( identifier[evt] ):
identifier[node] = identifier[msg] . identifier[GetElement] ( literal[string] )
keyword[if] identifier[node] . identifier[HasElement] ( literal[string] ):
identifier[secid] = identifier[XmlHelper] . identifier[get_child_value] ( identifier[node] , literal[string] )
identifier[self] . identifier[security_errors] . identifier[append] ( identifier[XmlHelper] . identifier[as_security_error] ( identifier[node] . identifier[GetElement] ( literal[string] ), identifier[secid] ))
keyword[else] :
identifier[self] . identifier[on_security_data_node] ( identifier[node] )
|
def on_event(self, evt, is_final):
""" this is invoked from in response to COM PumpWaitingMessages - different thread """
for msg in XmlHelper.message_iter(evt):
# Single security element in historical request
node = msg.GetElement('securityData')
if node.HasElement('securityError'):
secid = XmlHelper.get_child_value(node, 'security')
self.security_errors.append(XmlHelper.as_security_error(node.GetElement('securityError'), secid)) # depends on [control=['if'], data=[]]
else:
self.on_security_data_node(node) # depends on [control=['for'], data=['msg']]
|
def sample(self, sample_indices=None, num_samples=1):
""" returns samples according to the KDE
Parameters
----------
sample_inices: list of ints
Indices into the training data used as centers for the samples
num_samples: int
if samples_indices is None, this specifies how many samples
are drawn.
"""
if sample_indices is None:
sample_indices = np.random.choice(self.data.shape[0], size=num_samples)
samples = self.data[sample_indices]
delta = np.random.normal(size=num_samples)*self.bw
samples += delta
oob_idx = np.argwhere(np.logical_or(samples > 1, samples < 0)).flatten()
while len(oob_idx) > 0:
samples[oob_idx] -= delta[oob_idx] # revert move
delta[oob_idx] = np.random.normal(size=len(oob_idx))*self.bw
samples[oob_idx] += delta[oob_idx]
oob_idx = oob_idx[np.argwhere(np.logical_or(samples[oob_idx] > 1, samples[oob_idx] < 0))].flatten()
return(samples)
|
def function[sample, parameter[self, sample_indices, num_samples]]:
constant[ returns samples according to the KDE
Parameters
----------
sample_inices: list of ints
Indices into the training data used as centers for the samples
num_samples: int
if samples_indices is None, this specifies how many samples
are drawn.
]
if compare[name[sample_indices] is constant[None]] begin[:]
variable[sample_indices] assign[=] call[name[np].random.choice, parameter[call[name[self].data.shape][constant[0]]]]
variable[samples] assign[=] call[name[self].data][name[sample_indices]]
variable[delta] assign[=] binary_operation[call[name[np].random.normal, parameter[]] * name[self].bw]
<ast.AugAssign object at 0x7da1b1713250>
variable[oob_idx] assign[=] call[call[name[np].argwhere, parameter[call[name[np].logical_or, parameter[compare[name[samples] greater[>] constant[1]], compare[name[samples] less[<] constant[0]]]]]].flatten, parameter[]]
while compare[call[name[len], parameter[name[oob_idx]]] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b1712b60>
call[name[delta]][name[oob_idx]] assign[=] binary_operation[call[name[np].random.normal, parameter[]] * name[self].bw]
<ast.AugAssign object at 0x7da1b1712bc0>
variable[oob_idx] assign[=] call[call[name[oob_idx]][call[name[np].argwhere, parameter[call[name[np].logical_or, parameter[compare[call[name[samples]][name[oob_idx]] greater[>] constant[1]], compare[call[name[samples]][name[oob_idx]] less[<] constant[0]]]]]]].flatten, parameter[]]
return[name[samples]]
|
keyword[def] identifier[sample] ( identifier[self] , identifier[sample_indices] = keyword[None] , identifier[num_samples] = literal[int] ):
literal[string]
keyword[if] identifier[sample_indices] keyword[is] keyword[None] :
identifier[sample_indices] = identifier[np] . identifier[random] . identifier[choice] ( identifier[self] . identifier[data] . identifier[shape] [ literal[int] ], identifier[size] = identifier[num_samples] )
identifier[samples] = identifier[self] . identifier[data] [ identifier[sample_indices] ]
identifier[delta] = identifier[np] . identifier[random] . identifier[normal] ( identifier[size] = identifier[num_samples] )* identifier[self] . identifier[bw]
identifier[samples] += identifier[delta]
identifier[oob_idx] = identifier[np] . identifier[argwhere] ( identifier[np] . identifier[logical_or] ( identifier[samples] > literal[int] , identifier[samples] < literal[int] )). identifier[flatten] ()
keyword[while] identifier[len] ( identifier[oob_idx] )> literal[int] :
identifier[samples] [ identifier[oob_idx] ]-= identifier[delta] [ identifier[oob_idx] ]
identifier[delta] [ identifier[oob_idx] ]= identifier[np] . identifier[random] . identifier[normal] ( identifier[size] = identifier[len] ( identifier[oob_idx] ))* identifier[self] . identifier[bw]
identifier[samples] [ identifier[oob_idx] ]+= identifier[delta] [ identifier[oob_idx] ]
identifier[oob_idx] = identifier[oob_idx] [ identifier[np] . identifier[argwhere] ( identifier[np] . identifier[logical_or] ( identifier[samples] [ identifier[oob_idx] ]> literal[int] , identifier[samples] [ identifier[oob_idx] ]< literal[int] ))]. identifier[flatten] ()
keyword[return] ( identifier[samples] )
|
def sample(self, sample_indices=None, num_samples=1):
""" returns samples according to the KDE
Parameters
----------
sample_inices: list of ints
Indices into the training data used as centers for the samples
num_samples: int
if samples_indices is None, this specifies how many samples
are drawn.
"""
if sample_indices is None:
sample_indices = np.random.choice(self.data.shape[0], size=num_samples) # depends on [control=['if'], data=['sample_indices']]
samples = self.data[sample_indices]
delta = np.random.normal(size=num_samples) * self.bw
samples += delta
oob_idx = np.argwhere(np.logical_or(samples > 1, samples < 0)).flatten()
while len(oob_idx) > 0:
samples[oob_idx] -= delta[oob_idx] # revert move
delta[oob_idx] = np.random.normal(size=len(oob_idx)) * self.bw
samples[oob_idx] += delta[oob_idx]
oob_idx = oob_idx[np.argwhere(np.logical_or(samples[oob_idx] > 1, samples[oob_idx] < 0))].flatten() # depends on [control=['while'], data=[]]
return samples
|
def shutdown(self):
""" Shuts down the daemon process.
"""
if not self._exited:
self._exited = True
# signal task runner to terminate via SIGTERM
if self._task_runner.is_alive():
self._task_runner.terminate()
# if command server is running, then block until
# task runner completes so it has time to use
# the command server to clean up root plugins
if self._command_server.is_alive():
if self._task_runner.is_alive():
self._task_runner.join()
_shutdown_pipe(self._pipe)
self._task.stop()
|
def function[shutdown, parameter[self]]:
constant[ Shuts down the daemon process.
]
if <ast.UnaryOp object at 0x7da18bc73eb0> begin[:]
name[self]._exited assign[=] constant[True]
if call[name[self]._task_runner.is_alive, parameter[]] begin[:]
call[name[self]._task_runner.terminate, parameter[]]
if call[name[self]._command_server.is_alive, parameter[]] begin[:]
if call[name[self]._task_runner.is_alive, parameter[]] begin[:]
call[name[self]._task_runner.join, parameter[]]
call[name[_shutdown_pipe], parameter[name[self]._pipe]]
call[name[self]._task.stop, parameter[]]
|
keyword[def] identifier[shutdown] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_exited] :
identifier[self] . identifier[_exited] = keyword[True]
keyword[if] identifier[self] . identifier[_task_runner] . identifier[is_alive] ():
identifier[self] . identifier[_task_runner] . identifier[terminate] ()
keyword[if] identifier[self] . identifier[_command_server] . identifier[is_alive] ():
keyword[if] identifier[self] . identifier[_task_runner] . identifier[is_alive] ():
identifier[self] . identifier[_task_runner] . identifier[join] ()
identifier[_shutdown_pipe] ( identifier[self] . identifier[_pipe] )
identifier[self] . identifier[_task] . identifier[stop] ()
|
def shutdown(self):
""" Shuts down the daemon process.
"""
if not self._exited:
self._exited = True
# signal task runner to terminate via SIGTERM
if self._task_runner.is_alive():
self._task_runner.terminate()
# if command server is running, then block until
# task runner completes so it has time to use
# the command server to clean up root plugins
if self._command_server.is_alive():
if self._task_runner.is_alive():
self._task_runner.join() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
_shutdown_pipe(self._pipe)
self._task.stop() # depends on [control=['if'], data=[]]
|
def capitalize_unicode_name(s):
"""
Turns a string such as 'capital delta' into the shortened,
capitalized version, in this case simply 'Delta'. Used as a
transform in sanitize_identifier.
"""
index = s.find('capital')
if index == -1: return s
tail = s[index:].replace('capital', '').strip()
tail = tail[0].upper() + tail[1:]
return s[:index] + tail
|
def function[capitalize_unicode_name, parameter[s]]:
constant[
Turns a string such as 'capital delta' into the shortened,
capitalized version, in this case simply 'Delta'. Used as a
transform in sanitize_identifier.
]
variable[index] assign[=] call[name[s].find, parameter[constant[capital]]]
if compare[name[index] equal[==] <ast.UnaryOp object at 0x7da2044c1810>] begin[:]
return[name[s]]
variable[tail] assign[=] call[call[call[name[s]][<ast.Slice object at 0x7da18fe912a0>].replace, parameter[constant[capital], constant[]]].strip, parameter[]]
variable[tail] assign[=] binary_operation[call[call[name[tail]][constant[0]].upper, parameter[]] + call[name[tail]][<ast.Slice object at 0x7da18dc05ff0>]]
return[binary_operation[call[name[s]][<ast.Slice object at 0x7da18dc04a30>] + name[tail]]]
|
keyword[def] identifier[capitalize_unicode_name] ( identifier[s] ):
literal[string]
identifier[index] = identifier[s] . identifier[find] ( literal[string] )
keyword[if] identifier[index] ==- literal[int] : keyword[return] identifier[s]
identifier[tail] = identifier[s] [ identifier[index] :]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
identifier[tail] = identifier[tail] [ literal[int] ]. identifier[upper] ()+ identifier[tail] [ literal[int] :]
keyword[return] identifier[s] [: identifier[index] ]+ identifier[tail]
|
def capitalize_unicode_name(s):
"""
Turns a string such as 'capital delta' into the shortened,
capitalized version, in this case simply 'Delta'. Used as a
transform in sanitize_identifier.
"""
index = s.find('capital')
if index == -1:
return s # depends on [control=['if'], data=[]]
tail = s[index:].replace('capital', '').strip()
tail = tail[0].upper() + tail[1:]
return s[:index] + tail
|
def _scrub_participant_table(path_to_data):
"""Scrub PII from the given participant table."""
path = os.path.join(path_to_data, "participant.csv")
with open_for_csv(path, "r") as input, open("{}.0".format(path), "w") as output:
reader = csv.reader(input)
writer = csv.writer(output)
headers = next(reader)
writer.writerow(headers)
for i, row in enumerate(reader):
row[headers.index("worker_id")] = row[headers.index("id")]
row[headers.index("unique_id")] = "{}:{}".format(
row[headers.index("id")], row[headers.index("assignment_id")]
)
writer.writerow(row)
os.rename("{}.0".format(path), path)
|
def function[_scrub_participant_table, parameter[path_to_data]]:
constant[Scrub PII from the given participant table.]
variable[path] assign[=] call[name[os].path.join, parameter[name[path_to_data], constant[participant.csv]]]
with call[name[open_for_csv], parameter[name[path], constant[r]]] begin[:]
variable[reader] assign[=] call[name[csv].reader, parameter[name[input]]]
variable[writer] assign[=] call[name[csv].writer, parameter[name[output]]]
variable[headers] assign[=] call[name[next], parameter[name[reader]]]
call[name[writer].writerow, parameter[name[headers]]]
for taget[tuple[[<ast.Name object at 0x7da1b040e710>, <ast.Name object at 0x7da1b040d000>]]] in starred[call[name[enumerate], parameter[name[reader]]]] begin[:]
call[name[row]][call[name[headers].index, parameter[constant[worker_id]]]] assign[=] call[name[row]][call[name[headers].index, parameter[constant[id]]]]
call[name[row]][call[name[headers].index, parameter[constant[unique_id]]]] assign[=] call[constant[{}:{}].format, parameter[call[name[row]][call[name[headers].index, parameter[constant[id]]]], call[name[row]][call[name[headers].index, parameter[constant[assignment_id]]]]]]
call[name[writer].writerow, parameter[name[row]]]
call[name[os].rename, parameter[call[constant[{}.0].format, parameter[name[path]]], name[path]]]
|
keyword[def] identifier[_scrub_participant_table] ( identifier[path_to_data] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path_to_data] , literal[string] )
keyword[with] identifier[open_for_csv] ( identifier[path] , literal[string] ) keyword[as] identifier[input] , identifier[open] ( literal[string] . identifier[format] ( identifier[path] ), literal[string] ) keyword[as] identifier[output] :
identifier[reader] = identifier[csv] . identifier[reader] ( identifier[input] )
identifier[writer] = identifier[csv] . identifier[writer] ( identifier[output] )
identifier[headers] = identifier[next] ( identifier[reader] )
identifier[writer] . identifier[writerow] ( identifier[headers] )
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[reader] ):
identifier[row] [ identifier[headers] . identifier[index] ( literal[string] )]= identifier[row] [ identifier[headers] . identifier[index] ( literal[string] )]
identifier[row] [ identifier[headers] . identifier[index] ( literal[string] )]= literal[string] . identifier[format] (
identifier[row] [ identifier[headers] . identifier[index] ( literal[string] )], identifier[row] [ identifier[headers] . identifier[index] ( literal[string] )]
)
identifier[writer] . identifier[writerow] ( identifier[row] )
identifier[os] . identifier[rename] ( literal[string] . identifier[format] ( identifier[path] ), identifier[path] )
|
def _scrub_participant_table(path_to_data):
"""Scrub PII from the given participant table."""
path = os.path.join(path_to_data, 'participant.csv')
with open_for_csv(path, 'r') as input, open('{}.0'.format(path), 'w') as output:
reader = csv.reader(input)
writer = csv.writer(output)
headers = next(reader)
writer.writerow(headers)
for (i, row) in enumerate(reader):
row[headers.index('worker_id')] = row[headers.index('id')]
row[headers.index('unique_id')] = '{}:{}'.format(row[headers.index('id')], row[headers.index('assignment_id')])
writer.writerow(row) # depends on [control=['for'], data=[]]
os.rename('{}.0'.format(path), path) # depends on [control=['with'], data=['input']]
|
def _change_height(self, ax, new_value):
"""Make bars in horizontal bar chart thinner"""
for patch in ax.patches:
current_height = patch.get_height()
diff = current_height - new_value
# we change the bar height
patch.set_height(new_value)
# we recenter the bar
patch.set_y(patch.get_y() + diff * .5)
|
def function[_change_height, parameter[self, ax, new_value]]:
constant[Make bars in horizontal bar chart thinner]
for taget[name[patch]] in starred[name[ax].patches] begin[:]
variable[current_height] assign[=] call[name[patch].get_height, parameter[]]
variable[diff] assign[=] binary_operation[name[current_height] - name[new_value]]
call[name[patch].set_height, parameter[name[new_value]]]
call[name[patch].set_y, parameter[binary_operation[call[name[patch].get_y, parameter[]] + binary_operation[name[diff] * constant[0.5]]]]]
|
keyword[def] identifier[_change_height] ( identifier[self] , identifier[ax] , identifier[new_value] ):
literal[string]
keyword[for] identifier[patch] keyword[in] identifier[ax] . identifier[patches] :
identifier[current_height] = identifier[patch] . identifier[get_height] ()
identifier[diff] = identifier[current_height] - identifier[new_value]
identifier[patch] . identifier[set_height] ( identifier[new_value] )
identifier[patch] . identifier[set_y] ( identifier[patch] . identifier[get_y] ()+ identifier[diff] * literal[int] )
|
def _change_height(self, ax, new_value):
"""Make bars in horizontal bar chart thinner"""
for patch in ax.patches:
current_height = patch.get_height()
diff = current_height - new_value
# we change the bar height
patch.set_height(new_value)
# we recenter the bar
patch.set_y(patch.get_y() + diff * 0.5) # depends on [control=['for'], data=['patch']]
|
def log_accept(self, block_id, vtxindex, opcode, op_data):
"""
Log an accepted operation
"""
log.debug("ACCEPT op {} at ({}, {}) ({})".format(opcode, block_id, vtxindex, json.dumps(op_data, sort_keys=True)))
|
def function[log_accept, parameter[self, block_id, vtxindex, opcode, op_data]]:
constant[
Log an accepted operation
]
call[name[log].debug, parameter[call[constant[ACCEPT op {} at ({}, {}) ({})].format, parameter[name[opcode], name[block_id], name[vtxindex], call[name[json].dumps, parameter[name[op_data]]]]]]]
|
keyword[def] identifier[log_accept] ( identifier[self] , identifier[block_id] , identifier[vtxindex] , identifier[opcode] , identifier[op_data] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[opcode] , identifier[block_id] , identifier[vtxindex] , identifier[json] . identifier[dumps] ( identifier[op_data] , identifier[sort_keys] = keyword[True] )))
|
def log_accept(self, block_id, vtxindex, opcode, op_data):
"""
Log an accepted operation
"""
log.debug('ACCEPT op {} at ({}, {}) ({})'.format(opcode, block_id, vtxindex, json.dumps(op_data, sort_keys=True)))
|
def sparsify_rows(x, quantile=0.01):
'''
Return a row-sparse matrix approximating the input `x`.
Parameters
----------
x : np.ndarray [ndim <= 2]
The input matrix to sparsify.
quantile : float in [0, 1.0)
Percentage of magnitude to discard in each row of `x`
Returns
-------
x_sparse : `scipy.sparse.csr_matrix` [shape=x.shape]
Row-sparsified approximation of `x`
If `x.ndim == 1`, then `x` is interpreted as a row vector,
and `x_sparse.shape == (1, len(x))`.
Raises
------
ParameterError
If `x.ndim > 2`
If `quantile` lies outside `[0, 1.0)`
Notes
-----
This function caches at level 40.
Examples
--------
>>> # Construct a Hann window to sparsify
>>> x = scipy.signal.hann(32)
>>> x
array([ 0. , 0.01 , 0.041, 0.09 , 0.156, 0.236, 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0.236, 0.156,
0.09 , 0.041, 0.01 , 0. ])
>>> # Discard the bottom percentile
>>> x_sparse = librosa.util.sparsify_rows(x, quantile=0.01)
>>> x_sparse
<1x32 sparse matrix of type '<type 'numpy.float64'>'
with 26 stored elements in Compressed Sparse Row format>
>>> x_sparse.todense()
matrix([[ 0. , 0. , 0. , 0.09 , 0.156, 0.236, 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0.236, 0.156,
0.09 , 0. , 0. , 0. ]])
>>> # Discard up to the bottom 10th percentile
>>> x_sparse = librosa.util.sparsify_rows(x, quantile=0.1)
>>> x_sparse
<1x32 sparse matrix of type '<type 'numpy.float64'>'
with 20 stored elements in Compressed Sparse Row format>
>>> x_sparse.todense()
matrix([[ 0. , 0. , 0. , 0. , 0. , 0. , 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0. , 0. ,
0. , 0. , 0. , 0. ]])
'''
if x.ndim == 1:
x = x.reshape((1, -1))
elif x.ndim > 2:
raise ParameterError('Input must have 2 or fewer dimensions. '
'Provided x.shape={}.'.format(x.shape))
if not 0.0 <= quantile < 1:
raise ParameterError('Invalid quantile {:.2f}'.format(quantile))
x_sparse = scipy.sparse.lil_matrix(x.shape, dtype=x.dtype)
mags = np.abs(x)
norms = np.sum(mags, axis=1, keepdims=True)
mag_sort = np.sort(mags, axis=1)
cumulative_mag = np.cumsum(mag_sort / norms, axis=1)
threshold_idx = np.argmin(cumulative_mag < quantile, axis=1)
for i, j in enumerate(threshold_idx):
idx = np.where(mags[i] >= mag_sort[i, j])
x_sparse[i, idx] = x[i, idx]
return x_sparse.tocsr()
|
def function[sparsify_rows, parameter[x, quantile]]:
constant[
Return a row-sparse matrix approximating the input `x`.
Parameters
----------
x : np.ndarray [ndim <= 2]
The input matrix to sparsify.
quantile : float in [0, 1.0)
Percentage of magnitude to discard in each row of `x`
Returns
-------
x_sparse : `scipy.sparse.csr_matrix` [shape=x.shape]
Row-sparsified approximation of `x`
If `x.ndim == 1`, then `x` is interpreted as a row vector,
and `x_sparse.shape == (1, len(x))`.
Raises
------
ParameterError
If `x.ndim > 2`
If `quantile` lies outside `[0, 1.0)`
Notes
-----
This function caches at level 40.
Examples
--------
>>> # Construct a Hann window to sparsify
>>> x = scipy.signal.hann(32)
>>> x
array([ 0. , 0.01 , 0.041, 0.09 , 0.156, 0.236, 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0.236, 0.156,
0.09 , 0.041, 0.01 , 0. ])
>>> # Discard the bottom percentile
>>> x_sparse = librosa.util.sparsify_rows(x, quantile=0.01)
>>> x_sparse
<1x32 sparse matrix of type '<type 'numpy.float64'>'
with 26 stored elements in Compressed Sparse Row format>
>>> x_sparse.todense()
matrix([[ 0. , 0. , 0. , 0.09 , 0.156, 0.236, 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0.236, 0.156,
0.09 , 0. , 0. , 0. ]])
>>> # Discard up to the bottom 10th percentile
>>> x_sparse = librosa.util.sparsify_rows(x, quantile=0.1)
>>> x_sparse
<1x32 sparse matrix of type '<type 'numpy.float64'>'
with 20 stored elements in Compressed Sparse Row format>
>>> x_sparse.todense()
matrix([[ 0. , 0. , 0. , 0. , 0. , 0. , 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0. , 0. ,
0. , 0. , 0. , 0. ]])
]
if compare[name[x].ndim equal[==] constant[1]] begin[:]
variable[x] assign[=] call[name[x].reshape, parameter[tuple[[<ast.Constant object at 0x7da18f09cd60>, <ast.UnaryOp object at 0x7da18f09fc70>]]]]
if <ast.UnaryOp object at 0x7da2047e9690> begin[:]
<ast.Raise object at 0x7da2047eb010>
variable[x_sparse] assign[=] call[name[scipy].sparse.lil_matrix, parameter[name[x].shape]]
variable[mags] assign[=] call[name[np].abs, parameter[name[x]]]
variable[norms] assign[=] call[name[np].sum, parameter[name[mags]]]
variable[mag_sort] assign[=] call[name[np].sort, parameter[name[mags]]]
variable[cumulative_mag] assign[=] call[name[np].cumsum, parameter[binary_operation[name[mag_sort] / name[norms]]]]
variable[threshold_idx] assign[=] call[name[np].argmin, parameter[compare[name[cumulative_mag] less[<] name[quantile]]]]
for taget[tuple[[<ast.Name object at 0x7da2047eb070>, <ast.Name object at 0x7da2047ea230>]]] in starred[call[name[enumerate], parameter[name[threshold_idx]]]] begin[:]
variable[idx] assign[=] call[name[np].where, parameter[compare[call[name[mags]][name[i]] greater_or_equal[>=] call[name[mag_sort]][tuple[[<ast.Name object at 0x7da18ede6530>, <ast.Name object at 0x7da18ede5360>]]]]]]
call[name[x_sparse]][tuple[[<ast.Name object at 0x7da18ede7a30>, <ast.Name object at 0x7da18ede46d0>]]] assign[=] call[name[x]][tuple[[<ast.Name object at 0x7da18ede4910>, <ast.Name object at 0x7da18ede72e0>]]]
return[call[name[x_sparse].tocsr, parameter[]]]
|
keyword[def] identifier[sparsify_rows] ( identifier[x] , identifier[quantile] = literal[int] ):
literal[string]
keyword[if] identifier[x] . identifier[ndim] == literal[int] :
identifier[x] = identifier[x] . identifier[reshape] (( literal[int] ,- literal[int] ))
keyword[elif] identifier[x] . identifier[ndim] > literal[int] :
keyword[raise] identifier[ParameterError] ( literal[string]
literal[string] . identifier[format] ( identifier[x] . identifier[shape] ))
keyword[if] keyword[not] literal[int] <= identifier[quantile] < literal[int] :
keyword[raise] identifier[ParameterError] ( literal[string] . identifier[format] ( identifier[quantile] ))
identifier[x_sparse] = identifier[scipy] . identifier[sparse] . identifier[lil_matrix] ( identifier[x] . identifier[shape] , identifier[dtype] = identifier[x] . identifier[dtype] )
identifier[mags] = identifier[np] . identifier[abs] ( identifier[x] )
identifier[norms] = identifier[np] . identifier[sum] ( identifier[mags] , identifier[axis] = literal[int] , identifier[keepdims] = keyword[True] )
identifier[mag_sort] = identifier[np] . identifier[sort] ( identifier[mags] , identifier[axis] = literal[int] )
identifier[cumulative_mag] = identifier[np] . identifier[cumsum] ( identifier[mag_sort] / identifier[norms] , identifier[axis] = literal[int] )
identifier[threshold_idx] = identifier[np] . identifier[argmin] ( identifier[cumulative_mag] < identifier[quantile] , identifier[axis] = literal[int] )
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[enumerate] ( identifier[threshold_idx] ):
identifier[idx] = identifier[np] . identifier[where] ( identifier[mags] [ identifier[i] ]>= identifier[mag_sort] [ identifier[i] , identifier[j] ])
identifier[x_sparse] [ identifier[i] , identifier[idx] ]= identifier[x] [ identifier[i] , identifier[idx] ]
keyword[return] identifier[x_sparse] . identifier[tocsr] ()
|
def sparsify_rows(x, quantile=0.01):
"""
Return a row-sparse matrix approximating the input `x`.
Parameters
----------
x : np.ndarray [ndim <= 2]
The input matrix to sparsify.
quantile : float in [0, 1.0)
Percentage of magnitude to discard in each row of `x`
Returns
-------
x_sparse : `scipy.sparse.csr_matrix` [shape=x.shape]
Row-sparsified approximation of `x`
If `x.ndim == 1`, then `x` is interpreted as a row vector,
and `x_sparse.shape == (1, len(x))`.
Raises
------
ParameterError
If `x.ndim > 2`
If `quantile` lies outside `[0, 1.0)`
Notes
-----
This function caches at level 40.
Examples
--------
>>> # Construct a Hann window to sparsify
>>> x = scipy.signal.hann(32)
>>> x
array([ 0. , 0.01 , 0.041, 0.09 , 0.156, 0.236, 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0.236, 0.156,
0.09 , 0.041, 0.01 , 0. ])
>>> # Discard the bottom percentile
>>> x_sparse = librosa.util.sparsify_rows(x, quantile=0.01)
>>> x_sparse
<1x32 sparse matrix of type '<type 'numpy.float64'>'
with 26 stored elements in Compressed Sparse Row format>
>>> x_sparse.todense()
matrix([[ 0. , 0. , 0. , 0.09 , 0.156, 0.236, 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0.236, 0.156,
0.09 , 0. , 0. , 0. ]])
>>> # Discard up to the bottom 10th percentile
>>> x_sparse = librosa.util.sparsify_rows(x, quantile=0.1)
>>> x_sparse
<1x32 sparse matrix of type '<type 'numpy.float64'>'
with 20 stored elements in Compressed Sparse Row format>
>>> x_sparse.todense()
matrix([[ 0. , 0. , 0. , 0. , 0. , 0. , 0.326,
0.424, 0.525, 0.625, 0.72 , 0.806, 0.879, 0.937,
0.977, 0.997, 0.997, 0.977, 0.937, 0.879, 0.806,
0.72 , 0.625, 0.525, 0.424, 0.326, 0. , 0. ,
0. , 0. , 0. , 0. ]])
"""
if x.ndim == 1:
x = x.reshape((1, -1)) # depends on [control=['if'], data=[]]
elif x.ndim > 2:
raise ParameterError('Input must have 2 or fewer dimensions. Provided x.shape={}.'.format(x.shape)) # depends on [control=['if'], data=[]]
if not 0.0 <= quantile < 1:
raise ParameterError('Invalid quantile {:.2f}'.format(quantile)) # depends on [control=['if'], data=[]]
x_sparse = scipy.sparse.lil_matrix(x.shape, dtype=x.dtype)
mags = np.abs(x)
norms = np.sum(mags, axis=1, keepdims=True)
mag_sort = np.sort(mags, axis=1)
cumulative_mag = np.cumsum(mag_sort / norms, axis=1)
threshold_idx = np.argmin(cumulative_mag < quantile, axis=1)
for (i, j) in enumerate(threshold_idx):
idx = np.where(mags[i] >= mag_sort[i, j])
x_sparse[i, idx] = x[i, idx] # depends on [control=['for'], data=[]]
return x_sparse.tocsr()
|
def _local_update(self, rdict):
"""Call this with a response dictionary to update instance attrs.
If the response has only valid keys, stash meta_data, replace __dict__,
and reassign meta_data.
:param rdict: response attributes derived from server JSON
"""
sanitized = self._check_keys(rdict)
temp_meta = self._meta_data
self.__dict__ = sanitized
self._meta_data = temp_meta
|
def function[_local_update, parameter[self, rdict]]:
constant[Call this with a response dictionary to update instance attrs.
If the response has only valid keys, stash meta_data, replace __dict__,
and reassign meta_data.
:param rdict: response attributes derived from server JSON
]
variable[sanitized] assign[=] call[name[self]._check_keys, parameter[name[rdict]]]
variable[temp_meta] assign[=] name[self]._meta_data
name[self].__dict__ assign[=] name[sanitized]
name[self]._meta_data assign[=] name[temp_meta]
|
keyword[def] identifier[_local_update] ( identifier[self] , identifier[rdict] ):
literal[string]
identifier[sanitized] = identifier[self] . identifier[_check_keys] ( identifier[rdict] )
identifier[temp_meta] = identifier[self] . identifier[_meta_data]
identifier[self] . identifier[__dict__] = identifier[sanitized]
identifier[self] . identifier[_meta_data] = identifier[temp_meta]
|
def _local_update(self, rdict):
"""Call this with a response dictionary to update instance attrs.
If the response has only valid keys, stash meta_data, replace __dict__,
and reassign meta_data.
:param rdict: response attributes derived from server JSON
"""
sanitized = self._check_keys(rdict)
temp_meta = self._meta_data
self.__dict__ = sanitized
self._meta_data = temp_meta
|
def edit(self,
billing_email=None,
company=None,
email=None,
location=None,
name=None):
"""Edit this organization.
:param str billing_email: (optional) Billing email address (private)
:param str company: (optional)
:param str email: (optional) Public email address
:param str location: (optional)
:param str name: (optional)
:returns: bool
"""
json = None
data = {'billing_email': billing_email, 'company': company,
'email': email, 'location': location, 'name': name}
self._remove_none(data)
if data:
json = self._json(self._patch(self._api, data=dumps(data)), 200)
if json:
self._update_(json)
return True
return False
|
def function[edit, parameter[self, billing_email, company, email, location, name]]:
constant[Edit this organization.
:param str billing_email: (optional) Billing email address (private)
:param str company: (optional)
:param str email: (optional) Public email address
:param str location: (optional)
:param str name: (optional)
:returns: bool
]
variable[json] assign[=] constant[None]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f2b670>, <ast.Constant object at 0x7da1b0f2a980>, <ast.Constant object at 0x7da1b0f2a770>, <ast.Constant object at 0x7da1b0f2a0e0>, <ast.Constant object at 0x7da1b0f2b100>], [<ast.Name object at 0x7da1b0f2b700>, <ast.Name object at 0x7da1b0f2bb50>, <ast.Name object at 0x7da1b0f29f60>, <ast.Name object at 0x7da1b0f297e0>, <ast.Name object at 0x7da1b0f2a800>]]
call[name[self]._remove_none, parameter[name[data]]]
if name[data] begin[:]
variable[json] assign[=] call[name[self]._json, parameter[call[name[self]._patch, parameter[name[self]._api]], constant[200]]]
if name[json] begin[:]
call[name[self]._update_, parameter[name[json]]]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[edit] ( identifier[self] ,
identifier[billing_email] = keyword[None] ,
identifier[company] = keyword[None] ,
identifier[email] = keyword[None] ,
identifier[location] = keyword[None] ,
identifier[name] = keyword[None] ):
literal[string]
identifier[json] = keyword[None]
identifier[data] ={ literal[string] : identifier[billing_email] , literal[string] : identifier[company] ,
literal[string] : identifier[email] , literal[string] : identifier[location] , literal[string] : identifier[name] }
identifier[self] . identifier[_remove_none] ( identifier[data] )
keyword[if] identifier[data] :
identifier[json] = identifier[self] . identifier[_json] ( identifier[self] . identifier[_patch] ( identifier[self] . identifier[_api] , identifier[data] = identifier[dumps] ( identifier[data] )), literal[int] )
keyword[if] identifier[json] :
identifier[self] . identifier[_update_] ( identifier[json] )
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def edit(self, billing_email=None, company=None, email=None, location=None, name=None):
"""Edit this organization.
:param str billing_email: (optional) Billing email address (private)
:param str company: (optional)
:param str email: (optional) Public email address
:param str location: (optional)
:param str name: (optional)
:returns: bool
"""
json = None
data = {'billing_email': billing_email, 'company': company, 'email': email, 'location': location, 'name': name}
self._remove_none(data)
if data:
json = self._json(self._patch(self._api, data=dumps(data)), 200) # depends on [control=['if'], data=[]]
if json:
self._update_(json)
return True # depends on [control=['if'], data=[]]
return False
|
def _skip_children(self, check_name, results):
"""
Recursively skip the children of check_name (presumably because check_name
did not pass).
"""
for name, description in self.child_map[check_name]:
if results[name] is None:
results[name] = CheckResult(name=name, description=_(description),
passed=None,
dependency=check_name,
cause={"rationale": _("can't check until a frown turns upside down")})
self._skip_children(name, results)
|
def function[_skip_children, parameter[self, check_name, results]]:
constant[
Recursively skip the children of check_name (presumably because check_name
did not pass).
]
for taget[tuple[[<ast.Name object at 0x7da1b16b0790>, <ast.Name object at 0x7da1b16b1d50>]]] in starred[call[name[self].child_map][name[check_name]]] begin[:]
if compare[call[name[results]][name[name]] is constant[None]] begin[:]
call[name[results]][name[name]] assign[=] call[name[CheckResult], parameter[]]
call[name[self]._skip_children, parameter[name[name], name[results]]]
|
keyword[def] identifier[_skip_children] ( identifier[self] , identifier[check_name] , identifier[results] ):
literal[string]
keyword[for] identifier[name] , identifier[description] keyword[in] identifier[self] . identifier[child_map] [ identifier[check_name] ]:
keyword[if] identifier[results] [ identifier[name] ] keyword[is] keyword[None] :
identifier[results] [ identifier[name] ]= identifier[CheckResult] ( identifier[name] = identifier[name] , identifier[description] = identifier[_] ( identifier[description] ),
identifier[passed] = keyword[None] ,
identifier[dependency] = identifier[check_name] ,
identifier[cause] ={ literal[string] : identifier[_] ( literal[string] )})
identifier[self] . identifier[_skip_children] ( identifier[name] , identifier[results] )
|
def _skip_children(self, check_name, results):
"""
Recursively skip the children of check_name (presumably because check_name
did not pass).
"""
for (name, description) in self.child_map[check_name]:
if results[name] is None:
results[name] = CheckResult(name=name, description=_(description), passed=None, dependency=check_name, cause={'rationale': _("can't check until a frown turns upside down")})
self._skip_children(name, results) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def play(state):
""" Play sound for a given state.
:param state: a State value.
"""
filename = None
if state == SoundService.State.welcome:
filename = "pad_glow_welcome1.wav"
elif state == SoundService.State.goodbye:
filename = "pad_glow_power_off.wav"
elif state == SoundService.State.hotword_detected:
filename = "pad_soft_on.wav"
elif state == SoundService.State.asr_text_captured:
filename = "pad_soft_off.wav"
elif state == SoundService.State.error:
filename = "music_marimba_error_chord_2x.wav"
if filename is not None:
AudioPlayer.play_async("{}/{}".format(ABS_SOUND_DIR, filename))
|
def function[play, parameter[state]]:
constant[ Play sound for a given state.
:param state: a State value.
]
variable[filename] assign[=] constant[None]
if compare[name[state] equal[==] name[SoundService].State.welcome] begin[:]
variable[filename] assign[=] constant[pad_glow_welcome1.wav]
if compare[name[filename] is_not constant[None]] begin[:]
call[name[AudioPlayer].play_async, parameter[call[constant[{}/{}].format, parameter[name[ABS_SOUND_DIR], name[filename]]]]]
|
keyword[def] identifier[play] ( identifier[state] ):
literal[string]
identifier[filename] = keyword[None]
keyword[if] identifier[state] == identifier[SoundService] . identifier[State] . identifier[welcome] :
identifier[filename] = literal[string]
keyword[elif] identifier[state] == identifier[SoundService] . identifier[State] . identifier[goodbye] :
identifier[filename] = literal[string]
keyword[elif] identifier[state] == identifier[SoundService] . identifier[State] . identifier[hotword_detected] :
identifier[filename] = literal[string]
keyword[elif] identifier[state] == identifier[SoundService] . identifier[State] . identifier[asr_text_captured] :
identifier[filename] = literal[string]
keyword[elif] identifier[state] == identifier[SoundService] . identifier[State] . identifier[error] :
identifier[filename] = literal[string]
keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] :
identifier[AudioPlayer] . identifier[play_async] ( literal[string] . identifier[format] ( identifier[ABS_SOUND_DIR] , identifier[filename] ))
|
def play(state):
""" Play sound for a given state.
:param state: a State value.
"""
filename = None
if state == SoundService.State.welcome:
filename = 'pad_glow_welcome1.wav' # depends on [control=['if'], data=[]]
elif state == SoundService.State.goodbye:
filename = 'pad_glow_power_off.wav' # depends on [control=['if'], data=[]]
elif state == SoundService.State.hotword_detected:
filename = 'pad_soft_on.wav' # depends on [control=['if'], data=[]]
elif state == SoundService.State.asr_text_captured:
filename = 'pad_soft_off.wav' # depends on [control=['if'], data=[]]
elif state == SoundService.State.error:
filename = 'music_marimba_error_chord_2x.wav' # depends on [control=['if'], data=[]]
if filename is not None:
AudioPlayer.play_async('{}/{}'.format(ABS_SOUND_DIR, filename)) # depends on [control=['if'], data=['filename']]
|
def send(self, data):
"""Send data to socket."""
# send message
_LOGGER.debug("send: " + data)
self.socket.send(data.encode('ascii'))
# sleep needed to prevent flooding the GC100 with sends
sleep(.01)
|
def function[send, parameter[self, data]]:
constant[Send data to socket.]
call[name[_LOGGER].debug, parameter[binary_operation[constant[send: ] + name[data]]]]
call[name[self].socket.send, parameter[call[name[data].encode, parameter[constant[ascii]]]]]
call[name[sleep], parameter[constant[0.01]]]
|
keyword[def] identifier[send] ( identifier[self] , identifier[data] ):
literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] + identifier[data] )
identifier[self] . identifier[socket] . identifier[send] ( identifier[data] . identifier[encode] ( literal[string] ))
identifier[sleep] ( literal[int] )
|
def send(self, data):
"""Send data to socket.""" # send message
_LOGGER.debug('send: ' + data)
self.socket.send(data.encode('ascii')) # sleep needed to prevent flooding the GC100 with sends
sleep(0.01)
|
def _write(self, session, openFile, replaceParamFile):
"""
Replace Param File Write to File Method
"""
# Retrieve TargetParameter objects
targets = self.targetParameters
# Write lines
openFile.write('%s\n' % self.numParameters)
for target in targets:
openFile.write('%s %s\n' % (target.targetVariable, target.varFormat))
|
def function[_write, parameter[self, session, openFile, replaceParamFile]]:
constant[
Replace Param File Write to File Method
]
variable[targets] assign[=] name[self].targetParameters
call[name[openFile].write, parameter[binary_operation[constant[%s
] <ast.Mod object at 0x7da2590d6920> name[self].numParameters]]]
for taget[name[target]] in starred[name[targets]] begin[:]
call[name[openFile].write, parameter[binary_operation[constant[%s %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6e7670>, <ast.Attribute object at 0x7da20c6e72b0>]]]]]
|
keyword[def] identifier[_write] ( identifier[self] , identifier[session] , identifier[openFile] , identifier[replaceParamFile] ):
literal[string]
identifier[targets] = identifier[self] . identifier[targetParameters]
identifier[openFile] . identifier[write] ( literal[string] % identifier[self] . identifier[numParameters] )
keyword[for] identifier[target] keyword[in] identifier[targets] :
identifier[openFile] . identifier[write] ( literal[string] %( identifier[target] . identifier[targetVariable] , identifier[target] . identifier[varFormat] ))
|
def _write(self, session, openFile, replaceParamFile):
"""
Replace Param File Write to File Method
"""
# Retrieve TargetParameter objects
targets = self.targetParameters
# Write lines
openFile.write('%s\n' % self.numParameters)
for target in targets:
openFile.write('%s %s\n' % (target.targetVariable, target.varFormat)) # depends on [control=['for'], data=['target']]
|
def hybrid_forward(self, F, *states): # pylint: disable=arguments-differ
"""
Parameters
----------
states : list
the stack outputs from RNN, which consists of output from each time step (TNC).
Returns
--------
loss : NDArray
loss tensor with shape (batch_size,). Dimensions other than batch_axis are averaged out.
"""
# pylint: disable=unused-argument
if self._beta != 0:
if states:
means = [self._beta * (state[1:] - state[:-1]).__pow__(2).mean()
for state in states[-1:]]
return F.add_n(*means)
else:
return F.zeros(1)
return F.zeros(1)
|
def function[hybrid_forward, parameter[self, F]]:
constant[
Parameters
----------
states : list
the stack outputs from RNN, which consists of output from each time step (TNC).
Returns
--------
loss : NDArray
loss tensor with shape (batch_size,). Dimensions other than batch_axis are averaged out.
]
if compare[name[self]._beta not_equal[!=] constant[0]] begin[:]
if name[states] begin[:]
variable[means] assign[=] <ast.ListComp object at 0x7da1b2122530>
return[call[name[F].add_n, parameter[<ast.Starred object at 0x7da1b2120f10>]]]
return[call[name[F].zeros, parameter[constant[1]]]]
|
keyword[def] identifier[hybrid_forward] ( identifier[self] , identifier[F] ,* identifier[states] ):
literal[string]
keyword[if] identifier[self] . identifier[_beta] != literal[int] :
keyword[if] identifier[states] :
identifier[means] =[ identifier[self] . identifier[_beta] *( identifier[state] [ literal[int] :]- identifier[state] [:- literal[int] ]). identifier[__pow__] ( literal[int] ). identifier[mean] ()
keyword[for] identifier[state] keyword[in] identifier[states] [- literal[int] :]]
keyword[return] identifier[F] . identifier[add_n] (* identifier[means] )
keyword[else] :
keyword[return] identifier[F] . identifier[zeros] ( literal[int] )
keyword[return] identifier[F] . identifier[zeros] ( literal[int] )
|
def hybrid_forward(self, F, *states): # pylint: disable=arguments-differ
'\n Parameters\n ----------\n states : list\n the stack outputs from RNN, which consists of output from each time step (TNC).\n\n Returns\n --------\n loss : NDArray\n loss tensor with shape (batch_size,). Dimensions other than batch_axis are averaged out.\n '
# pylint: disable=unused-argument
if self._beta != 0:
if states:
means = [self._beta * (state[1:] - state[:-1]).__pow__(2).mean() for state in states[-1:]]
return F.add_n(*means) # depends on [control=['if'], data=[]]
else:
return F.zeros(1) # depends on [control=['if'], data=[]]
return F.zeros(1)
|
def start_file_logger(filename, rank, name='parsl', level=logging.DEBUG, format_string=None):
"""Add a stream log handler.
Args:
- filename (string): Name of the file to write logs to
- name (string): Logger name
- level (logging.LEVEL): Set the logging level.
- format_string (string): Set the format string
Returns:
- None
"""
try:
os.makedirs(os.path.dirname(filename), 511, True)
except Exception as e:
print("Caught exception with trying to make log dirs: {}".format(e))
if format_string is None:
format_string = "%(asctime)s %(name)s:%(lineno)d Rank:{0} [%(levelname)s] %(message)s".format(
rank)
global logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename)
handler.setLevel(level)
formatter = logging.Formatter(format_string, datefmt='%Y-%m-%d %H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)
|
def function[start_file_logger, parameter[filename, rank, name, level, format_string]]:
constant[Add a stream log handler.
Args:
- filename (string): Name of the file to write logs to
- name (string): Logger name
- level (logging.LEVEL): Set the logging level.
- format_string (string): Set the format string
Returns:
- None
]
<ast.Try object at 0x7da1b014f520>
if compare[name[format_string] is constant[None]] begin[:]
variable[format_string] assign[=] call[constant[%(asctime)s %(name)s:%(lineno)d Rank:{0} [%(levelname)s] %(message)s].format, parameter[name[rank]]]
<ast.Global object at 0x7da1b014f370>
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[name]]]
call[name[logger].setLevel, parameter[name[logging].DEBUG]]
variable[handler] assign[=] call[name[logging].FileHandler, parameter[name[filename]]]
call[name[handler].setLevel, parameter[name[level]]]
variable[formatter] assign[=] call[name[logging].Formatter, parameter[name[format_string]]]
call[name[handler].setFormatter, parameter[name[formatter]]]
call[name[logger].addHandler, parameter[name[handler]]]
|
keyword[def] identifier[start_file_logger] ( identifier[filename] , identifier[rank] , identifier[name] = literal[string] , identifier[level] = identifier[logging] . identifier[DEBUG] , identifier[format_string] = keyword[None] ):
literal[string]
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] ), literal[int] , keyword[True] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( literal[string] . identifier[format] ( identifier[e] ))
keyword[if] identifier[format_string] keyword[is] keyword[None] :
identifier[format_string] = literal[string] . identifier[format] (
identifier[rank] )
keyword[global] identifier[logger]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[name] )
identifier[logger] . identifier[setLevel] ( identifier[logging] . identifier[DEBUG] )
identifier[handler] = identifier[logging] . identifier[FileHandler] ( identifier[filename] )
identifier[handler] . identifier[setLevel] ( identifier[level] )
identifier[formatter] = identifier[logging] . identifier[Formatter] ( identifier[format_string] , identifier[datefmt] = literal[string] )
identifier[handler] . identifier[setFormatter] ( identifier[formatter] )
identifier[logger] . identifier[addHandler] ( identifier[handler] )
|
def start_file_logger(filename, rank, name='parsl', level=logging.DEBUG, format_string=None):
"""Add a stream log handler.
Args:
- filename (string): Name of the file to write logs to
- name (string): Logger name
- level (logging.LEVEL): Set the logging level.
- format_string (string): Set the format string
Returns:
- None
"""
try:
os.makedirs(os.path.dirname(filename), 511, True) # depends on [control=['try'], data=[]]
except Exception as e:
print('Caught exception with trying to make log dirs: {}'.format(e)) # depends on [control=['except'], data=['e']]
if format_string is None:
format_string = '%(asctime)s %(name)s:%(lineno)d Rank:{0} [%(levelname)s] %(message)s'.format(rank) # depends on [control=['if'], data=['format_string']]
global logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename)
handler.setLevel(level)
formatter = logging.Formatter(format_string, datefmt='%Y-%m-%d %H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)
|
def mtotdev(data, rate=1.0, data_type="phase", taus=None):
""" PRELIMINARY - REQUIRES FURTHER TESTING.
Modified Total deviation.
Better confidence at long averages for modified Allan
FIXME: bias-correction http://www.wriley.com/CI2.pdf page 6
The variance is scaled up (divided by this number) based on the
noise-type identified.
WPM 0.94
FPM 0.83
WFM 0.73
FFM 0.70
RWFM 0.69
Parameters
----------
data: np.array
Input data. Provide either phase or frequency (fractional,
adimensional).
rate: float
The sampling rate for data, in Hz. Defaults to 1.0
data_type: {'phase', 'freq'}
Data type, i.e. phase or frequency. Defaults to "phase".
taus: np.array
Array of tau values, in seconds, for which to compute statistic.
Optionally set taus=["all"|"octave"|"decade"] for automatic
tau-list generation.
NIST SP 1065 eqn (27) page 25
"""
phase = input_to_phase(data, rate, data_type)
(phase, ms, taus_used) = tau_generator(phase, rate, taus,
maximum_m=float(len(phase))/3.0)
devs = np.zeros_like(taus_used)
deverrs = np.zeros_like(taus_used)
ns = np.zeros_like(taus_used)
for idx, mj in enumerate(ms):
devs[idx], deverrs[idx], ns[idx] = calc_mtotdev_phase(phase, rate, mj)
return remove_small_ns(taus_used, devs, deverrs, ns)
|
def function[mtotdev, parameter[data, rate, data_type, taus]]:
constant[ PRELIMINARY - REQUIRES FURTHER TESTING.
Modified Total deviation.
Better confidence at long averages for modified Allan
FIXME: bias-correction http://www.wriley.com/CI2.pdf page 6
The variance is scaled up (divided by this number) based on the
noise-type identified.
WPM 0.94
FPM 0.83
WFM 0.73
FFM 0.70
RWFM 0.69
Parameters
----------
data: np.array
Input data. Provide either phase or frequency (fractional,
adimensional).
rate: float
The sampling rate for data, in Hz. Defaults to 1.0
data_type: {'phase', 'freq'}
Data type, i.e. phase or frequency. Defaults to "phase".
taus: np.array
Array of tau values, in seconds, for which to compute statistic.
Optionally set taus=["all"|"octave"|"decade"] for automatic
tau-list generation.
NIST SP 1065 eqn (27) page 25
]
variable[phase] assign[=] call[name[input_to_phase], parameter[name[data], name[rate], name[data_type]]]
<ast.Tuple object at 0x7da1b16bf820> assign[=] call[name[tau_generator], parameter[name[phase], name[rate], name[taus]]]
variable[devs] assign[=] call[name[np].zeros_like, parameter[name[taus_used]]]
variable[deverrs] assign[=] call[name[np].zeros_like, parameter[name[taus_used]]]
variable[ns] assign[=] call[name[np].zeros_like, parameter[name[taus_used]]]
for taget[tuple[[<ast.Name object at 0x7da1b16bc370>, <ast.Name object at 0x7da1b16bf0d0>]]] in starred[call[name[enumerate], parameter[name[ms]]]] begin[:]
<ast.Tuple object at 0x7da1b16be6b0> assign[=] call[name[calc_mtotdev_phase], parameter[name[phase], name[rate], name[mj]]]
return[call[name[remove_small_ns], parameter[name[taus_used], name[devs], name[deverrs], name[ns]]]]
|
keyword[def] identifier[mtotdev] ( identifier[data] , identifier[rate] = literal[int] , identifier[data_type] = literal[string] , identifier[taus] = keyword[None] ):
literal[string]
identifier[phase] = identifier[input_to_phase] ( identifier[data] , identifier[rate] , identifier[data_type] )
( identifier[phase] , identifier[ms] , identifier[taus_used] )= identifier[tau_generator] ( identifier[phase] , identifier[rate] , identifier[taus] ,
identifier[maximum_m] = identifier[float] ( identifier[len] ( identifier[phase] ))/ literal[int] )
identifier[devs] = identifier[np] . identifier[zeros_like] ( identifier[taus_used] )
identifier[deverrs] = identifier[np] . identifier[zeros_like] ( identifier[taus_used] )
identifier[ns] = identifier[np] . identifier[zeros_like] ( identifier[taus_used] )
keyword[for] identifier[idx] , identifier[mj] keyword[in] identifier[enumerate] ( identifier[ms] ):
identifier[devs] [ identifier[idx] ], identifier[deverrs] [ identifier[idx] ], identifier[ns] [ identifier[idx] ]= identifier[calc_mtotdev_phase] ( identifier[phase] , identifier[rate] , identifier[mj] )
keyword[return] identifier[remove_small_ns] ( identifier[taus_used] , identifier[devs] , identifier[deverrs] , identifier[ns] )
|
def mtotdev(data, rate=1.0, data_type='phase', taus=None):
""" PRELIMINARY - REQUIRES FURTHER TESTING.
Modified Total deviation.
Better confidence at long averages for modified Allan
FIXME: bias-correction http://www.wriley.com/CI2.pdf page 6
The variance is scaled up (divided by this number) based on the
noise-type identified.
WPM 0.94
FPM 0.83
WFM 0.73
FFM 0.70
RWFM 0.69
Parameters
----------
data: np.array
Input data. Provide either phase or frequency (fractional,
adimensional).
rate: float
The sampling rate for data, in Hz. Defaults to 1.0
data_type: {'phase', 'freq'}
Data type, i.e. phase or frequency. Defaults to "phase".
taus: np.array
Array of tau values, in seconds, for which to compute statistic.
Optionally set taus=["all"|"octave"|"decade"] for automatic
tau-list generation.
NIST SP 1065 eqn (27) page 25
"""
phase = input_to_phase(data, rate, data_type)
(phase, ms, taus_used) = tau_generator(phase, rate, taus, maximum_m=float(len(phase)) / 3.0)
devs = np.zeros_like(taus_used)
deverrs = np.zeros_like(taus_used)
ns = np.zeros_like(taus_used)
for (idx, mj) in enumerate(ms):
(devs[idx], deverrs[idx], ns[idx]) = calc_mtotdev_phase(phase, rate, mj) # depends on [control=['for'], data=[]]
return remove_small_ns(taus_used, devs, deverrs, ns)
|
def clean_translated_locales(configuration, langs=None):
"""
Strips out the warning from all translated po files
about being an English source file.
"""
if not langs:
langs = configuration.translated_locales
for locale in langs:
clean_locale(configuration, locale)
|
def function[clean_translated_locales, parameter[configuration, langs]]:
constant[
Strips out the warning from all translated po files
about being an English source file.
]
if <ast.UnaryOp object at 0x7da20e9b2b60> begin[:]
variable[langs] assign[=] name[configuration].translated_locales
for taget[name[locale]] in starred[name[langs]] begin[:]
call[name[clean_locale], parameter[name[configuration], name[locale]]]
|
keyword[def] identifier[clean_translated_locales] ( identifier[configuration] , identifier[langs] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[langs] :
identifier[langs] = identifier[configuration] . identifier[translated_locales]
keyword[for] identifier[locale] keyword[in] identifier[langs] :
identifier[clean_locale] ( identifier[configuration] , identifier[locale] )
|
def clean_translated_locales(configuration, langs=None):
"""
Strips out the warning from all translated po files
about being an English source file.
"""
if not langs:
langs = configuration.translated_locales # depends on [control=['if'], data=[]]
for locale in langs:
clean_locale(configuration, locale) # depends on [control=['for'], data=['locale']]
|
def get_cli(cls) -> click.Group:
"""Add a :mod:`click` main function to use as a command line interface."""
main = super().get_cli()
cls._cli_add_flask(main)
return main
|
def function[get_cli, parameter[cls]]:
constant[Add a :mod:`click` main function to use as a command line interface.]
variable[main] assign[=] call[call[name[super], parameter[]].get_cli, parameter[]]
call[name[cls]._cli_add_flask, parameter[name[main]]]
return[name[main]]
|
keyword[def] identifier[get_cli] ( identifier[cls] )-> identifier[click] . identifier[Group] :
literal[string]
identifier[main] = identifier[super] (). identifier[get_cli] ()
identifier[cls] . identifier[_cli_add_flask] ( identifier[main] )
keyword[return] identifier[main]
|
def get_cli(cls) -> click.Group:
"""Add a :mod:`click` main function to use as a command line interface."""
main = super().get_cli()
cls._cli_add_flask(main)
return main
|
def filter_slow_requests(slowness):
"""Filter :class:`.Line` objects by their response time.
:param slowness: minimum time, in milliseconds, a server needs to answer
a request. If the server takes more time than that the log line is
accepted.
:type slowness: string
:returns: a function that filters by the server response time.
:rtype: function
"""
def filter_func(log_line):
slowness_int = int(slowness)
return slowness_int <= log_line.time_wait_response
return filter_func
|
def function[filter_slow_requests, parameter[slowness]]:
constant[Filter :class:`.Line` objects by their response time.
:param slowness: minimum time, in milliseconds, a server needs to answer
a request. If the server takes more time than that the log line is
accepted.
:type slowness: string
:returns: a function that filters by the server response time.
:rtype: function
]
def function[filter_func, parameter[log_line]]:
variable[slowness_int] assign[=] call[name[int], parameter[name[slowness]]]
return[compare[name[slowness_int] less_or_equal[<=] name[log_line].time_wait_response]]
return[name[filter_func]]
|
keyword[def] identifier[filter_slow_requests] ( identifier[slowness] ):
literal[string]
keyword[def] identifier[filter_func] ( identifier[log_line] ):
identifier[slowness_int] = identifier[int] ( identifier[slowness] )
keyword[return] identifier[slowness_int] <= identifier[log_line] . identifier[time_wait_response]
keyword[return] identifier[filter_func]
|
def filter_slow_requests(slowness):
"""Filter :class:`.Line` objects by their response time.
:param slowness: minimum time, in milliseconds, a server needs to answer
a request. If the server takes more time than that the log line is
accepted.
:type slowness: string
:returns: a function that filters by the server response time.
:rtype: function
"""
def filter_func(log_line):
slowness_int = int(slowness)
return slowness_int <= log_line.time_wait_response
return filter_func
|
def output_to_bar(self, message, comma=True):
"""
Outputs data to stdout, without buffering.
message: A string containing the data to be output.
comma: Whether or not a comma should be placed at the end of the output.
"""
if comma:
message += ','
sys.stdout.write(message + '\n')
sys.stdout.flush()
|
def function[output_to_bar, parameter[self, message, comma]]:
constant[
Outputs data to stdout, without buffering.
message: A string containing the data to be output.
comma: Whether or not a comma should be placed at the end of the output.
]
if name[comma] begin[:]
<ast.AugAssign object at 0x7da18ede7c10>
call[name[sys].stdout.write, parameter[binary_operation[name[message] + constant[
]]]]
call[name[sys].stdout.flush, parameter[]]
|
keyword[def] identifier[output_to_bar] ( identifier[self] , identifier[message] , identifier[comma] = keyword[True] ):
literal[string]
keyword[if] identifier[comma] :
identifier[message] += literal[string]
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[message] + literal[string] )
identifier[sys] . identifier[stdout] . identifier[flush] ()
|
def output_to_bar(self, message, comma=True):
"""
Outputs data to stdout, without buffering.
message: A string containing the data to be output.
comma: Whether or not a comma should be placed at the end of the output.
"""
if comma:
message += ',' # depends on [control=['if'], data=[]]
sys.stdout.write(message + '\n')
sys.stdout.flush()
|
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
def EncodeVarint(write, value):
bits = value & 0x7f
value >>= 7
while value:
write(six.int2byte(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(six.int2byte(bits))
return EncodeVarint
|
def function[_VarintEncoder, parameter[]]:
constant[Return an encoder for a basic varint value (does not include tag).]
def function[EncodeVarint, parameter[write, value]]:
variable[bits] assign[=] binary_operation[name[value] <ast.BitAnd object at 0x7da2590d6b60> constant[127]]
<ast.AugAssign object at 0x7da1b2096bf0>
while name[value] begin[:]
call[name[write], parameter[call[name[six].int2byte, parameter[binary_operation[constant[128] <ast.BitOr object at 0x7da2590d6aa0> name[bits]]]]]]
variable[bits] assign[=] binary_operation[name[value] <ast.BitAnd object at 0x7da2590d6b60> constant[127]]
<ast.AugAssign object at 0x7da1b2096920>
return[call[name[write], parameter[call[name[six].int2byte, parameter[name[bits]]]]]]
return[name[EncodeVarint]]
|
keyword[def] identifier[_VarintEncoder] ():
literal[string]
keyword[def] identifier[EncodeVarint] ( identifier[write] , identifier[value] ):
identifier[bits] = identifier[value] & literal[int]
identifier[value] >>= literal[int]
keyword[while] identifier[value] :
identifier[write] ( identifier[six] . identifier[int2byte] ( literal[int] | identifier[bits] ))
identifier[bits] = identifier[value] & literal[int]
identifier[value] >>= literal[int]
keyword[return] identifier[write] ( identifier[six] . identifier[int2byte] ( identifier[bits] ))
keyword[return] identifier[EncodeVarint]
|
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
def EncodeVarint(write, value):
bits = value & 127
value >>= 7
while value:
write(six.int2byte(128 | bits))
bits = value & 127
value >>= 7 # depends on [control=['while'], data=[]]
return write(six.int2byte(bits))
return EncodeVarint
|
def write_cyc(fn, this, conv=1.0):
""" Write the lattice information to a cyc.dat file (i.e., tblmd input file)
"""
lattice = this.get_cell()
f = paropen(fn, "w")
f.write("<------- Simulation box definition\n")
f.write("<------- Barostat (on = 1, off = 0)\n")
f.write(" 0\n")
f.write("<------- Box vectors (start)\n")
f.write(" %20.10f %20.10f %20.10f\n" % (lattice[0][0]*conv, lattice[1][0]*conv, lattice[2][0]*conv))
f.write(" %20.10f %20.10f %20.10f\n" % (lattice[0][1]*conv, lattice[1][1]*conv, lattice[2][1]*conv))
f.write(" %20.10f %20.10f %20.10f\n" % (lattice[0][2]*conv, lattice[1][2]*conv, lattice[2][2]*conv))
f.write("<------- Box vectors (end)\n")
f.write(" %20.10f %20.10f %20.10f\n" % (lattice[0][0]*conv, lattice[1][0]*conv, lattice[2][0]*conv))
f.write(" %20.10f %20.10f %20.10f\n" % (lattice[0][1]*conv, lattice[1][1]*conv, lattice[2][1]*conv))
f.write(" %20.10f %20.10f %20.10f\n" % (lattice[0][2]*conv, lattice[1][2]*conv, lattice[2][2]*conv))
f.write("<------- Mass and gamma of the box (used in connection with the barostat)\n")
f.write(" 240 0.005\n")
f.write("<------- Stress tensor (start)\n")
f.write(" 0 0 0\n")
f.write(" 0 0 0\n")
f.write(" 0 0 0\n")
f.write("<------- Stress tensor (end)\n")
f.write(" 0 0 0\n")
f.write(" 0 0 0\n")
f.write(" 0 0 0\n")
f.close()
|
def function[write_cyc, parameter[fn, this, conv]]:
constant[ Write the lattice information to a cyc.dat file (i.e., tblmd input file)
]
variable[lattice] assign[=] call[name[this].get_cell, parameter[]]
variable[f] assign[=] call[name[paropen], parameter[name[fn], constant[w]]]
call[name[f].write, parameter[constant[<------- Simulation box definition
]]]
call[name[f].write, parameter[constant[<------- Barostat (on = 1, off = 0)
]]]
call[name[f].write, parameter[constant[ 0
]]]
call[name[f].write, parameter[constant[<------- Box vectors (start)
]]]
call[name[f].write, parameter[binary_operation[constant[ %20.10f %20.10f %20.10f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b1254790>, <ast.BinOp object at 0x7da1b1256920>, <ast.BinOp object at 0x7da1b12b81f0>]]]]]
call[name[f].write, parameter[binary_operation[constant[ %20.10f %20.10f %20.10f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b12babc0>, <ast.BinOp object at 0x7da1b12b9e70>, <ast.BinOp object at 0x7da1b12bb580>]]]]]
call[name[f].write, parameter[binary_operation[constant[ %20.10f %20.10f %20.10f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b12b8d60>, <ast.BinOp object at 0x7da1b12b9600>, <ast.BinOp object at 0x7da1b12bbaf0>]]]]]
call[name[f].write, parameter[constant[<------- Box vectors (end)
]]]
call[name[f].write, parameter[binary_operation[constant[ %20.10f %20.10f %20.10f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b12bbc10>, <ast.BinOp object at 0x7da1b12b93c0>, <ast.BinOp object at 0x7da1b12b80d0>]]]]]
call[name[f].write, parameter[binary_operation[constant[ %20.10f %20.10f %20.10f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b12b8be0>, <ast.BinOp object at 0x7da1b12b9060>, <ast.BinOp object at 0x7da1b12b9a20>]]]]]
call[name[f].write, parameter[binary_operation[constant[ %20.10f %20.10f %20.10f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b12b9c60>, <ast.BinOp object at 0x7da1b12ba050>, <ast.BinOp object at 0x7da1b12b87c0>]]]]]
call[name[f].write, parameter[constant[<------- Mass and gamma of the box (used in connection with the barostat)
]]]
call[name[f].write, parameter[constant[ 240 0.005
]]]
call[name[f].write, parameter[constant[<------- Stress tensor (start)
]]]
call[name[f].write, parameter[constant[ 0 0 0
]]]
call[name[f].write, parameter[constant[ 0 0 0
]]]
call[name[f].write, parameter[constant[ 0 0 0
]]]
call[name[f].write, parameter[constant[<------- Stress tensor (end)
]]]
call[name[f].write, parameter[constant[ 0 0 0
]]]
call[name[f].write, parameter[constant[ 0 0 0
]]]
call[name[f].write, parameter[constant[ 0 0 0
]]]
call[name[f].close, parameter[]]
|
keyword[def] identifier[write_cyc] ( identifier[fn] , identifier[this] , identifier[conv] = literal[int] ):
literal[string]
identifier[lattice] = identifier[this] . identifier[get_cell] ()
identifier[f] = identifier[paropen] ( identifier[fn] , literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] %( identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] ))
identifier[f] . identifier[write] ( literal[string] %( identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] ))
identifier[f] . identifier[write] ( literal[string] %( identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] ))
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] %( identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] ))
identifier[f] . identifier[write] ( literal[string] %( identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] ))
identifier[f] . identifier[write] ( literal[string] %( identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] , identifier[lattice] [ literal[int] ][ literal[int] ]* identifier[conv] ))
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[close] ()
|
def write_cyc(fn, this, conv=1.0):
""" Write the lattice information to a cyc.dat file (i.e., tblmd input file)
"""
lattice = this.get_cell()
f = paropen(fn, 'w')
f.write('<------- Simulation box definition\n')
f.write('<------- Barostat (on = 1, off = 0)\n')
f.write(' 0\n')
f.write('<------- Box vectors (start)\n')
f.write(' %20.10f %20.10f %20.10f\n' % (lattice[0][0] * conv, lattice[1][0] * conv, lattice[2][0] * conv))
f.write(' %20.10f %20.10f %20.10f\n' % (lattice[0][1] * conv, lattice[1][1] * conv, lattice[2][1] * conv))
f.write(' %20.10f %20.10f %20.10f\n' % (lattice[0][2] * conv, lattice[1][2] * conv, lattice[2][2] * conv))
f.write('<------- Box vectors (end)\n')
f.write(' %20.10f %20.10f %20.10f\n' % (lattice[0][0] * conv, lattice[1][0] * conv, lattice[2][0] * conv))
f.write(' %20.10f %20.10f %20.10f\n' % (lattice[0][1] * conv, lattice[1][1] * conv, lattice[2][1] * conv))
f.write(' %20.10f %20.10f %20.10f\n' % (lattice[0][2] * conv, lattice[1][2] * conv, lattice[2][2] * conv))
f.write('<------- Mass and gamma of the box (used in connection with the barostat)\n')
f.write(' 240 0.005\n')
f.write('<------- Stress tensor (start)\n')
f.write(' 0 0 0\n')
f.write(' 0 0 0\n')
f.write(' 0 0 0\n')
f.write('<------- Stress tensor (end)\n')
f.write(' 0 0 0\n')
f.write(' 0 0 0\n')
f.write(' 0 0 0\n')
f.close()
|
def install(name=None, refresh=False, fromrepo=None,
pkgs=None, sources=None, **kwargs):
'''
Install the passed package
name
The name of the package to be installed.
refresh
Whether or not to refresh the package database before installing.
fromrepo
Specify a package repository (url) to install from.
Multiple Package Installation Options:
pkgs
A list of packages to install from a software repository. Must be
passed as a python list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo","bar"]'
sources
A list of packages to install. Must be passed as a list of dicts,
with the keys being package names, and the values being the source URI
or local path to the package.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sources='[{"foo": "salt://foo.deb"},{"bar": "salt://bar.deb"}]'
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package name>
'''
# XXX sources is not yet used in this code
try:
pkg_params, pkg_type = __salt__['pkg_resource.parse_targets'](
name, pkgs, sources, **kwargs
)
except MinionError as exc:
raise CommandExecutionError(exc)
if not pkg_params:
return {}
if pkg_type != 'repository':
log.error('xbps: pkg_type "%s" not supported.', pkg_type)
return {}
cmd = ['xbps-install']
if refresh:
cmd.append('-S') # update repo db
if fromrepo:
cmd.append('--repository={0}'.format(fromrepo))
cmd.append('-y') # assume yes when asked
cmd.extend(pkg_params)
old = list_pkgs()
__salt__['cmd.run'](cmd, output_loglevel='trace')
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
_rehash()
return salt.utils.data.compare_dicts(old, new)
|
def function[install, parameter[name, refresh, fromrepo, pkgs, sources]]:
constant[
Install the passed package
name
The name of the package to be installed.
refresh
Whether or not to refresh the package database before installing.
fromrepo
Specify a package repository (url) to install from.
Multiple Package Installation Options:
pkgs
A list of packages to install from a software repository. Must be
passed as a python list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo","bar"]'
sources
A list of packages to install. Must be passed as a list of dicts,
with the keys being package names, and the values being the source URI
or local path to the package.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sources='[{"foo": "salt://foo.deb"},{"bar": "salt://bar.deb"}]'
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package name>
]
<ast.Try object at 0x7da1b210a650>
if <ast.UnaryOp object at 0x7da1b210a2c0> begin[:]
return[dictionary[[], []]]
if compare[name[pkg_type] not_equal[!=] constant[repository]] begin[:]
call[name[log].error, parameter[constant[xbps: pkg_type "%s" not supported.], name[pkg_type]]]
return[dictionary[[], []]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b21092a0>]]
if name[refresh] begin[:]
call[name[cmd].append, parameter[constant[-S]]]
if name[fromrepo] begin[:]
call[name[cmd].append, parameter[call[constant[--repository={0}].format, parameter[name[fromrepo]]]]]
call[name[cmd].append, parameter[constant[-y]]]
call[name[cmd].extend, parameter[name[pkg_params]]]
variable[old] assign[=] call[name[list_pkgs], parameter[]]
call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
call[name[__context__].pop, parameter[constant[pkg.list_pkgs], constant[None]]]
variable[new] assign[=] call[name[list_pkgs], parameter[]]
call[name[_rehash], parameter[]]
return[call[name[salt].utils.data.compare_dicts, parameter[name[old], name[new]]]]
|
keyword[def] identifier[install] ( identifier[name] = keyword[None] , identifier[refresh] = keyword[False] , identifier[fromrepo] = keyword[None] ,
identifier[pkgs] = keyword[None] , identifier[sources] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[pkg_params] , identifier[pkg_type] = identifier[__salt__] [ literal[string] ](
identifier[name] , identifier[pkgs] , identifier[sources] ,** identifier[kwargs]
)
keyword[except] identifier[MinionError] keyword[as] identifier[exc] :
keyword[raise] identifier[CommandExecutionError] ( identifier[exc] )
keyword[if] keyword[not] identifier[pkg_params] :
keyword[return] {}
keyword[if] identifier[pkg_type] != literal[string] :
identifier[log] . identifier[error] ( literal[string] , identifier[pkg_type] )
keyword[return] {}
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[refresh] :
identifier[cmd] . identifier[append] ( literal[string] )
keyword[if] identifier[fromrepo] :
identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[fromrepo] ))
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[extend] ( identifier[pkg_params] )
identifier[old] = identifier[list_pkgs] ()
identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[output_loglevel] = literal[string] )
identifier[__context__] . identifier[pop] ( literal[string] , keyword[None] )
identifier[new] = identifier[list_pkgs] ()
identifier[_rehash] ()
keyword[return] identifier[salt] . identifier[utils] . identifier[data] . identifier[compare_dicts] ( identifier[old] , identifier[new] )
|
def install(name=None, refresh=False, fromrepo=None, pkgs=None, sources=None, **kwargs):
"""
Install the passed package
name
The name of the package to be installed.
refresh
Whether or not to refresh the package database before installing.
fromrepo
Specify a package repository (url) to install from.
Multiple Package Installation Options:
pkgs
A list of packages to install from a software repository. Must be
passed as a python list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo","bar"]'
sources
A list of packages to install. Must be passed as a list of dicts,
with the keys being package names, and the values being the source URI
or local path to the package.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sources='[{"foo": "salt://foo.deb"},{"bar": "salt://bar.deb"}]'
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package name>
"""
# XXX sources is not yet used in this code
try:
(pkg_params, pkg_type) = __salt__['pkg_resource.parse_targets'](name, pkgs, sources, **kwargs) # depends on [control=['try'], data=[]]
except MinionError as exc:
raise CommandExecutionError(exc) # depends on [control=['except'], data=['exc']]
if not pkg_params:
return {} # depends on [control=['if'], data=[]]
if pkg_type != 'repository':
log.error('xbps: pkg_type "%s" not supported.', pkg_type)
return {} # depends on [control=['if'], data=['pkg_type']]
cmd = ['xbps-install']
if refresh:
cmd.append('-S') # update repo db # depends on [control=['if'], data=[]]
if fromrepo:
cmd.append('--repository={0}'.format(fromrepo)) # depends on [control=['if'], data=[]]
cmd.append('-y') # assume yes when asked
cmd.extend(pkg_params)
old = list_pkgs()
__salt__['cmd.run'](cmd, output_loglevel='trace')
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
_rehash()
return salt.utils.data.compare_dicts(old, new)
|
def mset(self, mapping):
"""
Sets each key in the ``mapping`` dict to its corresponding value
"""
servers = {}
for key, value in mapping.items():
server_name = self.get_server_name(key)
servers.setdefault(server_name, [])
servers[server_name].append((key, value))
for name, items in servers.items():
self.connections[name].mset(dict(items))
return True
|
def function[mset, parameter[self, mapping]]:
constant[
Sets each key in the ``mapping`` dict to its corresponding value
]
variable[servers] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18f09f820>, <ast.Name object at 0x7da18f09e7a0>]]] in starred[call[name[mapping].items, parameter[]]] begin[:]
variable[server_name] assign[=] call[name[self].get_server_name, parameter[name[key]]]
call[name[servers].setdefault, parameter[name[server_name], list[[]]]]
call[call[name[servers]][name[server_name]].append, parameter[tuple[[<ast.Name object at 0x7da18f09c760>, <ast.Name object at 0x7da18f09fc40>]]]]
for taget[tuple[[<ast.Name object at 0x7da18f09ff10>, <ast.Name object at 0x7da18f09f730>]]] in starred[call[name[servers].items, parameter[]]] begin[:]
call[call[name[self].connections][name[name]].mset, parameter[call[name[dict], parameter[name[items]]]]]
return[constant[True]]
|
keyword[def] identifier[mset] ( identifier[self] , identifier[mapping] ):
literal[string]
identifier[servers] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[mapping] . identifier[items] ():
identifier[server_name] = identifier[self] . identifier[get_server_name] ( identifier[key] )
identifier[servers] . identifier[setdefault] ( identifier[server_name] ,[])
identifier[servers] [ identifier[server_name] ]. identifier[append] (( identifier[key] , identifier[value] ))
keyword[for] identifier[name] , identifier[items] keyword[in] identifier[servers] . identifier[items] ():
identifier[self] . identifier[connections] [ identifier[name] ]. identifier[mset] ( identifier[dict] ( identifier[items] ))
keyword[return] keyword[True]
|
def mset(self, mapping):
"""
Sets each key in the ``mapping`` dict to its corresponding value
"""
servers = {}
for (key, value) in mapping.items():
server_name = self.get_server_name(key)
servers.setdefault(server_name, [])
servers[server_name].append((key, value)) # depends on [control=['for'], data=[]]
for (name, items) in servers.items():
self.connections[name].mset(dict(items)) # depends on [control=['for'], data=[]]
return True
|
def _validate_data(data):
"""Validates the given data and raises an error if any non-allowed keys are
provided or any required keys are missing.
:param data: Data to send to API
:type data: dict
"""
data_keys = set(data.keys())
extra_keys = data_keys - set(ALLOWED_KEYS)
missing_keys = set(REQUIRED_KEYS) - data_keys
if extra_keys:
raise ValueError(
'Invalid data keys {!r}'.format(', '.join(extra_keys))
)
if missing_keys:
raise ValueError(
'Missing keys {!r}'.format(', '.join(missing_keys))
)
|
def function[_validate_data, parameter[data]]:
constant[Validates the given data and raises an error if any non-allowed keys are
provided or any required keys are missing.
:param data: Data to send to API
:type data: dict
]
variable[data_keys] assign[=] call[name[set], parameter[call[name[data].keys, parameter[]]]]
variable[extra_keys] assign[=] binary_operation[name[data_keys] - call[name[set], parameter[name[ALLOWED_KEYS]]]]
variable[missing_keys] assign[=] binary_operation[call[name[set], parameter[name[REQUIRED_KEYS]]] - name[data_keys]]
if name[extra_keys] begin[:]
<ast.Raise object at 0x7da207f03730>
if name[missing_keys] begin[:]
<ast.Raise object at 0x7da20e954df0>
|
keyword[def] identifier[_validate_data] ( identifier[data] ):
literal[string]
identifier[data_keys] = identifier[set] ( identifier[data] . identifier[keys] ())
identifier[extra_keys] = identifier[data_keys] - identifier[set] ( identifier[ALLOWED_KEYS] )
identifier[missing_keys] = identifier[set] ( identifier[REQUIRED_KEYS] )- identifier[data_keys]
keyword[if] identifier[extra_keys] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[extra_keys] ))
)
keyword[if] identifier[missing_keys] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[missing_keys] ))
)
|
def _validate_data(data):
"""Validates the given data and raises an error if any non-allowed keys are
provided or any required keys are missing.
:param data: Data to send to API
:type data: dict
"""
data_keys = set(data.keys())
extra_keys = data_keys - set(ALLOWED_KEYS)
missing_keys = set(REQUIRED_KEYS) - data_keys
if extra_keys:
raise ValueError('Invalid data keys {!r}'.format(', '.join(extra_keys))) # depends on [control=['if'], data=[]]
if missing_keys:
raise ValueError('Missing keys {!r}'.format(', '.join(missing_keys))) # depends on [control=['if'], data=[]]
|
def disable_all_breakpoints(cls):
""" Disable all breakpoints and udate `active_breakpoint_flag`.
"""
for bp in cls.breakpoints_by_number:
if bp: # breakpoint #0 exists and is always None
bp.enabled = False
cls.update_active_breakpoint_flag()
return
|
def function[disable_all_breakpoints, parameter[cls]]:
constant[ Disable all breakpoints and udate `active_breakpoint_flag`.
]
for taget[name[bp]] in starred[name[cls].breakpoints_by_number] begin[:]
if name[bp] begin[:]
name[bp].enabled assign[=] constant[False]
call[name[cls].update_active_breakpoint_flag, parameter[]]
return[None]
|
keyword[def] identifier[disable_all_breakpoints] ( identifier[cls] ):
literal[string]
keyword[for] identifier[bp] keyword[in] identifier[cls] . identifier[breakpoints_by_number] :
keyword[if] identifier[bp] :
identifier[bp] . identifier[enabled] = keyword[False]
identifier[cls] . identifier[update_active_breakpoint_flag] ()
keyword[return]
|
def disable_all_breakpoints(cls):
""" Disable all breakpoints and udate `active_breakpoint_flag`.
"""
for bp in cls.breakpoints_by_number:
if bp: # breakpoint #0 exists and is always None
bp.enabled = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bp']]
cls.update_active_breakpoint_flag()
return
|
def _do_validate_sources_present(self, target):
"""Checks whether sources is empty, and either raises a TaskError or just returns False.
The specifics of this behavior are defined by whether the user sets --allow-empty to True/False:
--allow-empty=False will result in a TaskError being raised in the event of an empty source
set. If --allow-empty=True, this method will just return false and log a warning.
Shared for all SimpleCodegenTask subclasses to help keep errors consistent and descriptive.
:param target: Target to validate.
:return: True if sources is not empty, False otherwise.
"""
if not self.validate_sources_present:
return True
sources = target.sources_relative_to_buildroot()
if not sources:
message = ('Target {} has no sources.'.format(target.address.spec))
if not self.get_options().allow_empty:
raise TaskError(message)
else:
logging.warn(message)
return False
return True
|
def function[_do_validate_sources_present, parameter[self, target]]:
constant[Checks whether sources is empty, and either raises a TaskError or just returns False.
The specifics of this behavior are defined by whether the user sets --allow-empty to True/False:
--allow-empty=False will result in a TaskError being raised in the event of an empty source
set. If --allow-empty=True, this method will just return false and log a warning.
Shared for all SimpleCodegenTask subclasses to help keep errors consistent and descriptive.
:param target: Target to validate.
:return: True if sources is not empty, False otherwise.
]
if <ast.UnaryOp object at 0x7da1b2248af0> begin[:]
return[constant[True]]
variable[sources] assign[=] call[name[target].sources_relative_to_buildroot, parameter[]]
if <ast.UnaryOp object at 0x7da1b2249ea0> begin[:]
variable[message] assign[=] call[constant[Target {} has no sources.].format, parameter[name[target].address.spec]]
if <ast.UnaryOp object at 0x7da1b22a7220> begin[:]
<ast.Raise object at 0x7da1b22a6800>
return[constant[True]]
|
keyword[def] identifier[_do_validate_sources_present] ( identifier[self] , identifier[target] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[validate_sources_present] :
keyword[return] keyword[True]
identifier[sources] = identifier[target] . identifier[sources_relative_to_buildroot] ()
keyword[if] keyword[not] identifier[sources] :
identifier[message] =( literal[string] . identifier[format] ( identifier[target] . identifier[address] . identifier[spec] ))
keyword[if] keyword[not] identifier[self] . identifier[get_options] (). identifier[allow_empty] :
keyword[raise] identifier[TaskError] ( identifier[message] )
keyword[else] :
identifier[logging] . identifier[warn] ( identifier[message] )
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def _do_validate_sources_present(self, target):
"""Checks whether sources is empty, and either raises a TaskError or just returns False.
The specifics of this behavior are defined by whether the user sets --allow-empty to True/False:
--allow-empty=False will result in a TaskError being raised in the event of an empty source
set. If --allow-empty=True, this method will just return false and log a warning.
Shared for all SimpleCodegenTask subclasses to help keep errors consistent and descriptive.
:param target: Target to validate.
:return: True if sources is not empty, False otherwise.
"""
if not self.validate_sources_present:
return True # depends on [control=['if'], data=[]]
sources = target.sources_relative_to_buildroot()
if not sources:
message = 'Target {} has no sources.'.format(target.address.spec)
if not self.get_options().allow_empty:
raise TaskError(message) # depends on [control=['if'], data=[]]
else:
logging.warn(message)
return False # depends on [control=['if'], data=[]]
return True
|
def get_rate_limits(response):
"""Returns a list of rate limit information from a given response's headers."""
periods = response.headers['X-RateLimit-Period']
if not periods:
return []
rate_limits = []
periods = periods.split(',')
limits = response.headers['X-RateLimit-Limit'].split(',')
remaining = response.headers['X-RateLimit-Remaining'].split(',')
reset = response.headers['X-RateLimit-Reset'].split(',')
for idx, period in enumerate(periods):
rate_limit = {}
limit_period = get_readable_time_string(period)
rate_limit["period"] = limit_period
rate_limit["period_seconds"] = period
rate_limit["request_limit"] = limits[idx]
rate_limit["requests_remaining"] = remaining[idx]
reset_datetime = get_datetime_from_timestamp(reset[idx])
rate_limit["reset"] = reset_datetime
right_now = datetime.now()
if (reset_datetime is not None) and (right_now < reset_datetime):
# add 1 second because of rounding
seconds_remaining = (reset_datetime - right_now).seconds + 1
else:
seconds_remaining = 0
rate_limit["reset_in_seconds"] = seconds_remaining
rate_limit["time_to_reset"] = get_readable_time_string(seconds_remaining)
rate_limits.append(rate_limit)
return rate_limits
|
def function[get_rate_limits, parameter[response]]:
constant[Returns a list of rate limit information from a given response's headers.]
variable[periods] assign[=] call[name[response].headers][constant[X-RateLimit-Period]]
if <ast.UnaryOp object at 0x7da204623820> begin[:]
return[list[[]]]
variable[rate_limits] assign[=] list[[]]
variable[periods] assign[=] call[name[periods].split, parameter[constant[,]]]
variable[limits] assign[=] call[call[name[response].headers][constant[X-RateLimit-Limit]].split, parameter[constant[,]]]
variable[remaining] assign[=] call[call[name[response].headers][constant[X-RateLimit-Remaining]].split, parameter[constant[,]]]
variable[reset] assign[=] call[call[name[response].headers][constant[X-RateLimit-Reset]].split, parameter[constant[,]]]
for taget[tuple[[<ast.Name object at 0x7da204623e80>, <ast.Name object at 0x7da204623cd0>]]] in starred[call[name[enumerate], parameter[name[periods]]]] begin[:]
variable[rate_limit] assign[=] dictionary[[], []]
variable[limit_period] assign[=] call[name[get_readable_time_string], parameter[name[period]]]
call[name[rate_limit]][constant[period]] assign[=] name[limit_period]
call[name[rate_limit]][constant[period_seconds]] assign[=] name[period]
call[name[rate_limit]][constant[request_limit]] assign[=] call[name[limits]][name[idx]]
call[name[rate_limit]][constant[requests_remaining]] assign[=] call[name[remaining]][name[idx]]
variable[reset_datetime] assign[=] call[name[get_datetime_from_timestamp], parameter[call[name[reset]][name[idx]]]]
call[name[rate_limit]][constant[reset]] assign[=] name[reset_datetime]
variable[right_now] assign[=] call[name[datetime].now, parameter[]]
if <ast.BoolOp object at 0x7da204623b80> begin[:]
variable[seconds_remaining] assign[=] binary_operation[binary_operation[name[reset_datetime] - name[right_now]].seconds + constant[1]]
call[name[rate_limit]][constant[reset_in_seconds]] assign[=] name[seconds_remaining]
call[name[rate_limit]][constant[time_to_reset]] assign[=] call[name[get_readable_time_string], parameter[name[seconds_remaining]]]
call[name[rate_limits].append, parameter[name[rate_limit]]]
return[name[rate_limits]]
|
keyword[def] identifier[get_rate_limits] ( identifier[response] ):
literal[string]
identifier[periods] = identifier[response] . identifier[headers] [ literal[string] ]
keyword[if] keyword[not] identifier[periods] :
keyword[return] []
identifier[rate_limits] =[]
identifier[periods] = identifier[periods] . identifier[split] ( literal[string] )
identifier[limits] = identifier[response] . identifier[headers] [ literal[string] ]. identifier[split] ( literal[string] )
identifier[remaining] = identifier[response] . identifier[headers] [ literal[string] ]. identifier[split] ( literal[string] )
identifier[reset] = identifier[response] . identifier[headers] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[idx] , identifier[period] keyword[in] identifier[enumerate] ( identifier[periods] ):
identifier[rate_limit] ={}
identifier[limit_period] = identifier[get_readable_time_string] ( identifier[period] )
identifier[rate_limit] [ literal[string] ]= identifier[limit_period]
identifier[rate_limit] [ literal[string] ]= identifier[period]
identifier[rate_limit] [ literal[string] ]= identifier[limits] [ identifier[idx] ]
identifier[rate_limit] [ literal[string] ]= identifier[remaining] [ identifier[idx] ]
identifier[reset_datetime] = identifier[get_datetime_from_timestamp] ( identifier[reset] [ identifier[idx] ])
identifier[rate_limit] [ literal[string] ]= identifier[reset_datetime]
identifier[right_now] = identifier[datetime] . identifier[now] ()
keyword[if] ( identifier[reset_datetime] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[right_now] < identifier[reset_datetime] ):
identifier[seconds_remaining] =( identifier[reset_datetime] - identifier[right_now] ). identifier[seconds] + literal[int]
keyword[else] :
identifier[seconds_remaining] = literal[int]
identifier[rate_limit] [ literal[string] ]= identifier[seconds_remaining]
identifier[rate_limit] [ literal[string] ]= identifier[get_readable_time_string] ( identifier[seconds_remaining] )
identifier[rate_limits] . identifier[append] ( identifier[rate_limit] )
keyword[return] identifier[rate_limits]
|
def get_rate_limits(response):
"""Returns a list of rate limit information from a given response's headers."""
periods = response.headers['X-RateLimit-Period']
if not periods:
return [] # depends on [control=['if'], data=[]]
rate_limits = []
periods = periods.split(',')
limits = response.headers['X-RateLimit-Limit'].split(',')
remaining = response.headers['X-RateLimit-Remaining'].split(',')
reset = response.headers['X-RateLimit-Reset'].split(',')
for (idx, period) in enumerate(periods):
rate_limit = {}
limit_period = get_readable_time_string(period)
rate_limit['period'] = limit_period
rate_limit['period_seconds'] = period
rate_limit['request_limit'] = limits[idx]
rate_limit['requests_remaining'] = remaining[idx]
reset_datetime = get_datetime_from_timestamp(reset[idx])
rate_limit['reset'] = reset_datetime
right_now = datetime.now()
if reset_datetime is not None and right_now < reset_datetime:
# add 1 second because of rounding
seconds_remaining = (reset_datetime - right_now).seconds + 1 # depends on [control=['if'], data=[]]
else:
seconds_remaining = 0
rate_limit['reset_in_seconds'] = seconds_remaining
rate_limit['time_to_reset'] = get_readable_time_string(seconds_remaining)
rate_limits.append(rate_limit) # depends on [control=['for'], data=[]]
return rate_limits
|
def await_flush_completion(self, timeout=None):
"""
Mark all partitions as ready to send and block until the send is complete
"""
try:
for batch in self._incomplete.all():
log.debug('Waiting on produce to %s',
batch.produce_future.topic_partition)
if not batch.produce_future.wait(timeout=timeout):
raise Errors.KafkaTimeoutError('Timeout waiting for future')
if not batch.produce_future.is_done:
raise Errors.UnknownError('Future not done')
if batch.produce_future.failed():
log.warning(batch.produce_future.exception)
finally:
self._flushes_in_progress.decrement()
|
def function[await_flush_completion, parameter[self, timeout]]:
constant[
Mark all partitions as ready to send and block until the send is complete
]
<ast.Try object at 0x7da1b1c29de0>
|
keyword[def] identifier[await_flush_completion] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[try] :
keyword[for] identifier[batch] keyword[in] identifier[self] . identifier[_incomplete] . identifier[all] ():
identifier[log] . identifier[debug] ( literal[string] ,
identifier[batch] . identifier[produce_future] . identifier[topic_partition] )
keyword[if] keyword[not] identifier[batch] . identifier[produce_future] . identifier[wait] ( identifier[timeout] = identifier[timeout] ):
keyword[raise] identifier[Errors] . identifier[KafkaTimeoutError] ( literal[string] )
keyword[if] keyword[not] identifier[batch] . identifier[produce_future] . identifier[is_done] :
keyword[raise] identifier[Errors] . identifier[UnknownError] ( literal[string] )
keyword[if] identifier[batch] . identifier[produce_future] . identifier[failed] ():
identifier[log] . identifier[warning] ( identifier[batch] . identifier[produce_future] . identifier[exception] )
keyword[finally] :
identifier[self] . identifier[_flushes_in_progress] . identifier[decrement] ()
|
def await_flush_completion(self, timeout=None):
"""
Mark all partitions as ready to send and block until the send is complete
"""
try:
for batch in self._incomplete.all():
log.debug('Waiting on produce to %s', batch.produce_future.topic_partition)
if not batch.produce_future.wait(timeout=timeout):
raise Errors.KafkaTimeoutError('Timeout waiting for future') # depends on [control=['if'], data=[]]
if not batch.produce_future.is_done:
raise Errors.UnknownError('Future not done') # depends on [control=['if'], data=[]]
if batch.produce_future.failed():
log.warning(batch.produce_future.exception) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['batch']] # depends on [control=['try'], data=[]]
finally:
self._flushes_in_progress.decrement()
|
def prompt_and_select_link(self):
"""
Prompt the user to select a link from a list to open.
Return the link that was selected, or ``None`` if no link was selected.
"""
data = self.get_selected_item()
url_full = data.get('url_full')
permalink = data.get('permalink')
if url_full and url_full != permalink:
# The item is a link-only submission that won't contain text
link = url_full
else:
html = data.get('html')
if html:
extracted_links = self.content.extract_links(html)
if not extracted_links:
# Only one selection to choose from, so just pick it
link = permalink
else:
# Let the user decide which link to open
links = []
if permalink:
links += [{'text': 'Permalink', 'href': permalink}]
links += extracted_links
link = self.term.prompt_user_to_select_link(links)
else:
# Some items like hidden comments don't have any HTML to parse
link = permalink
return link
|
def function[prompt_and_select_link, parameter[self]]:
constant[
Prompt the user to select a link from a list to open.
Return the link that was selected, or ``None`` if no link was selected.
]
variable[data] assign[=] call[name[self].get_selected_item, parameter[]]
variable[url_full] assign[=] call[name[data].get, parameter[constant[url_full]]]
variable[permalink] assign[=] call[name[data].get, parameter[constant[permalink]]]
if <ast.BoolOp object at 0x7da18dc05e70> begin[:]
variable[link] assign[=] name[url_full]
return[name[link]]
|
keyword[def] identifier[prompt_and_select_link] ( identifier[self] ):
literal[string]
identifier[data] = identifier[self] . identifier[get_selected_item] ()
identifier[url_full] = identifier[data] . identifier[get] ( literal[string] )
identifier[permalink] = identifier[data] . identifier[get] ( literal[string] )
keyword[if] identifier[url_full] keyword[and] identifier[url_full] != identifier[permalink] :
identifier[link] = identifier[url_full]
keyword[else] :
identifier[html] = identifier[data] . identifier[get] ( literal[string] )
keyword[if] identifier[html] :
identifier[extracted_links] = identifier[self] . identifier[content] . identifier[extract_links] ( identifier[html] )
keyword[if] keyword[not] identifier[extracted_links] :
identifier[link] = identifier[permalink]
keyword[else] :
identifier[links] =[]
keyword[if] identifier[permalink] :
identifier[links] +=[{ literal[string] : literal[string] , literal[string] : identifier[permalink] }]
identifier[links] += identifier[extracted_links]
identifier[link] = identifier[self] . identifier[term] . identifier[prompt_user_to_select_link] ( identifier[links] )
keyword[else] :
identifier[link] = identifier[permalink]
keyword[return] identifier[link]
|
def prompt_and_select_link(self):
"""
Prompt the user to select a link from a list to open.
Return the link that was selected, or ``None`` if no link was selected.
"""
data = self.get_selected_item()
url_full = data.get('url_full')
permalink = data.get('permalink')
if url_full and url_full != permalink:
# The item is a link-only submission that won't contain text
link = url_full # depends on [control=['if'], data=[]]
else:
html = data.get('html')
if html:
extracted_links = self.content.extract_links(html)
if not extracted_links:
# Only one selection to choose from, so just pick it
link = permalink # depends on [control=['if'], data=[]]
else:
# Let the user decide which link to open
links = []
if permalink:
links += [{'text': 'Permalink', 'href': permalink}] # depends on [control=['if'], data=[]]
links += extracted_links
link = self.term.prompt_user_to_select_link(links) # depends on [control=['if'], data=[]]
else:
# Some items like hidden comments don't have any HTML to parse
link = permalink
return link
|
def convert_dt_time(duration, return_iter=False):
"""
Summary:
convert timedelta objects to human readable output
Args:
:duration (datetime.timedelta): time duration to convert
:return_iter (tuple): tuple containing time sequence
Returns:
days, hours, minutes, seconds | TYPE: tuple (integers), OR
human readable, notated units | TYPE: string
"""
try:
days, hours, minutes, seconds = convert_timedelta(duration)
if return_iter:
return days, hours, minutes, seconds
# string format conversions
if days > 0:
format_string = (
'{} day{}, {} hour{}'.format(
days, 's' if days != 1 else '', hours, 's' if hours != 1 else ''))
elif hours > 1:
format_string = (
'{} hour{}, {} minute{}'.format(
hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else ''))
else:
format_string = (
'{} minute{}, {} sec{}'.format(
minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else ''))
except AttributeError as e:
logger.exception(
'%s: Type mismatch when converting timedelta objects (Code: %s)' %
(inspect.stack()[0][3], str(e)))
except Exception as e:
logger.exception(
'%s: Unknown error when converting datetime objects (Code: %s)' %
(inspect.stack()[0][3], str(e)))
return format_string
|
def function[convert_dt_time, parameter[duration, return_iter]]:
constant[
Summary:
convert timedelta objects to human readable output
Args:
:duration (datetime.timedelta): time duration to convert
:return_iter (tuple): tuple containing time sequence
Returns:
days, hours, minutes, seconds | TYPE: tuple (integers), OR
human readable, notated units | TYPE: string
]
<ast.Try object at 0x7da1b133d750>
return[name[format_string]]
|
keyword[def] identifier[convert_dt_time] ( identifier[duration] , identifier[return_iter] = keyword[False] ):
literal[string]
keyword[try] :
identifier[days] , identifier[hours] , identifier[minutes] , identifier[seconds] = identifier[convert_timedelta] ( identifier[duration] )
keyword[if] identifier[return_iter] :
keyword[return] identifier[days] , identifier[hours] , identifier[minutes] , identifier[seconds]
keyword[if] identifier[days] > literal[int] :
identifier[format_string] =(
literal[string] . identifier[format] (
identifier[days] , literal[string] keyword[if] identifier[days] != literal[int] keyword[else] literal[string] , identifier[hours] , literal[string] keyword[if] identifier[hours] != literal[int] keyword[else] literal[string] ))
keyword[elif] identifier[hours] > literal[int] :
identifier[format_string] =(
literal[string] . identifier[format] (
identifier[hours] , literal[string] keyword[if] identifier[hours] != literal[int] keyword[else] literal[string] , identifier[minutes] , literal[string] keyword[if] identifier[minutes] != literal[int] keyword[else] literal[string] ))
keyword[else] :
identifier[format_string] =(
literal[string] . identifier[format] (
identifier[minutes] , literal[string] keyword[if] identifier[minutes] != literal[int] keyword[else] literal[string] , identifier[seconds] , literal[string] keyword[if] identifier[seconds] != literal[int] keyword[else] literal[string] ))
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] (
literal[string] %
( identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ], identifier[str] ( identifier[e] )))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] (
literal[string] %
( identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ], identifier[str] ( identifier[e] )))
keyword[return] identifier[format_string]
|
def convert_dt_time(duration, return_iter=False):
"""
Summary:
convert timedelta objects to human readable output
Args:
:duration (datetime.timedelta): time duration to convert
:return_iter (tuple): tuple containing time sequence
Returns:
days, hours, minutes, seconds | TYPE: tuple (integers), OR
human readable, notated units | TYPE: string
"""
try:
(days, hours, minutes, seconds) = convert_timedelta(duration)
if return_iter:
return (days, hours, minutes, seconds) # depends on [control=['if'], data=[]]
# string format conversions
if days > 0:
format_string = '{} day{}, {} hour{}'.format(days, 's' if days != 1 else '', hours, 's' if hours != 1 else '') # depends on [control=['if'], data=['days']]
elif hours > 1:
format_string = '{} hour{}, {} minute{}'.format(hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '') # depends on [control=['if'], data=['hours']]
else:
format_string = '{} minute{}, {} sec{}'.format(minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '') # depends on [control=['try'], data=[]]
except AttributeError as e:
logger.exception('%s: Type mismatch when converting timedelta objects (Code: %s)' % (inspect.stack()[0][3], str(e))) # depends on [control=['except'], data=['e']]
except Exception as e:
logger.exception('%s: Unknown error when converting datetime objects (Code: %s)' % (inspect.stack()[0][3], str(e))) # depends on [control=['except'], data=['e']]
return format_string
|
def kill(self, signal=None):
"""
Kill or send a signal to the container.
Args:
signal (str or int): The signal to send. Defaults to ``SIGKILL``
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self.client.api.kill(self.id, signal=signal)
|
def function[kill, parameter[self, signal]]:
constant[
Kill or send a signal to the container.
Args:
signal (str or int): The signal to send. Defaults to ``SIGKILL``
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
]
return[call[name[self].client.api.kill, parameter[name[self].id]]]
|
keyword[def] identifier[kill] ( identifier[self] , identifier[signal] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[client] . identifier[api] . identifier[kill] ( identifier[self] . identifier[id] , identifier[signal] = identifier[signal] )
|
def kill(self, signal=None):
"""
Kill or send a signal to the container.
Args:
signal (str or int): The signal to send. Defaults to ``SIGKILL``
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self.client.api.kill(self.id, signal=signal)
|
def interpolate_to_netcdf(self, in_lon, in_lat, out_path, date_unit="seconds since 1970-01-01T00:00",
interp_type="spline"):
"""
Calls the interpolation function and then saves the MRMS data to a netCDF file. It will also create
separate directories for each variable if they are not already available.
"""
if interp_type == "spline":
out_data = self.interpolate_grid(in_lon, in_lat)
else:
out_data = self.max_neighbor(in_lon, in_lat)
if not os.access(out_path + self.variable, os.R_OK):
try:
os.mkdir(out_path + self.variable)
except OSError:
print(out_path + self.variable + " already created")
out_file = out_path + self.variable + "/" + "{0}_{1}_{2}.nc".format(self.variable,
self.start_date.strftime("%Y%m%d-%H:%M"),
self.end_date.strftime("%Y%m%d-%H:%M"))
out_obj = Dataset(out_file, "w")
out_obj.createDimension("time", out_data.shape[0])
out_obj.createDimension("y", out_data.shape[1])
out_obj.createDimension("x", out_data.shape[2])
data_var = out_obj.createVariable(self.variable, "f4", ("time", "y", "x"), zlib=True,
fill_value=-9999.0,
least_significant_digit=3)
data_var[:] = out_data
data_var.long_name = self.variable
data_var.coordinates = "latitude longitude"
if "MESH" in self.variable or "QPE" in self.variable:
data_var.units = "mm"
elif "Reflectivity" in self.variable:
data_var.units = "dBZ"
elif "Rotation" in self.variable:
data_var.units = "s-1"
else:
data_var.units = ""
out_lon = out_obj.createVariable("longitude", "f4", ("y", "x"), zlib=True)
out_lon[:] = in_lon
out_lon.units = "degrees_east"
out_lat = out_obj.createVariable("latitude", "f4", ("y", "x"), zlib=True)
out_lat[:] = in_lat
out_lat.units = "degrees_north"
dates = out_obj.createVariable("time", "i8", ("time",), zlib=True)
dates[:] = np.round(date2num(self.all_dates.to_pydatetime(), date_unit)).astype(np.int64)
dates.long_name = "Valid date"
dates.units = date_unit
out_obj.Conventions="CF-1.6"
out_obj.close()
return
|
def function[interpolate_to_netcdf, parameter[self, in_lon, in_lat, out_path, date_unit, interp_type]]:
constant[
Calls the interpolation function and then saves the MRMS data to a netCDF file. It will also create
separate directories for each variable if they are not already available.
]
if compare[name[interp_type] equal[==] constant[spline]] begin[:]
variable[out_data] assign[=] call[name[self].interpolate_grid, parameter[name[in_lon], name[in_lat]]]
if <ast.UnaryOp object at 0x7da1b0ef5600> begin[:]
<ast.Try object at 0x7da1b0ef5240>
variable[out_file] assign[=] binary_operation[binary_operation[binary_operation[name[out_path] + name[self].variable] + constant[/]] + call[constant[{0}_{1}_{2}.nc].format, parameter[name[self].variable, call[name[self].start_date.strftime, parameter[constant[%Y%m%d-%H:%M]]], call[name[self].end_date.strftime, parameter[constant[%Y%m%d-%H:%M]]]]]]
variable[out_obj] assign[=] call[name[Dataset], parameter[name[out_file], constant[w]]]
call[name[out_obj].createDimension, parameter[constant[time], call[name[out_data].shape][constant[0]]]]
call[name[out_obj].createDimension, parameter[constant[y], call[name[out_data].shape][constant[1]]]]
call[name[out_obj].createDimension, parameter[constant[x], call[name[out_data].shape][constant[2]]]]
variable[data_var] assign[=] call[name[out_obj].createVariable, parameter[name[self].variable, constant[f4], tuple[[<ast.Constant object at 0x7da1b0ef6ef0>, <ast.Constant object at 0x7da1b0ef5c30>, <ast.Constant object at 0x7da1b0ef7640>]]]]
call[name[data_var]][<ast.Slice object at 0x7da2041db6d0>] assign[=] name[out_data]
name[data_var].long_name assign[=] name[self].variable
name[data_var].coordinates assign[=] constant[latitude longitude]
if <ast.BoolOp object at 0x7da2041dbf10> begin[:]
name[data_var].units assign[=] constant[mm]
variable[out_lon] assign[=] call[name[out_obj].createVariable, parameter[constant[longitude], constant[f4], tuple[[<ast.Constant object at 0x7da20c6c7c70>, <ast.Constant object at 0x7da20c6c6b30>]]]]
call[name[out_lon]][<ast.Slice object at 0x7da204346860>] assign[=] name[in_lon]
name[out_lon].units assign[=] constant[degrees_east]
variable[out_lat] assign[=] call[name[out_obj].createVariable, parameter[constant[latitude], constant[f4], tuple[[<ast.Constant object at 0x7da204344fd0>, <ast.Constant object at 0x7da204344ee0>]]]]
call[name[out_lat]][<ast.Slice object at 0x7da204346dd0>] assign[=] name[in_lat]
name[out_lat].units assign[=] constant[degrees_north]
variable[dates] assign[=] call[name[out_obj].createVariable, parameter[constant[time], constant[i8], tuple[[<ast.Constant object at 0x7da204347790>]]]]
call[name[dates]][<ast.Slice object at 0x7da204347ca0>] assign[=] call[call[name[np].round, parameter[call[name[date2num], parameter[call[name[self].all_dates.to_pydatetime, parameter[]], name[date_unit]]]]].astype, parameter[name[np].int64]]
name[dates].long_name assign[=] constant[Valid date]
name[dates].units assign[=] name[date_unit]
name[out_obj].Conventions assign[=] constant[CF-1.6]
call[name[out_obj].close, parameter[]]
return[None]
|
keyword[def] identifier[interpolate_to_netcdf] ( identifier[self] , identifier[in_lon] , identifier[in_lat] , identifier[out_path] , identifier[date_unit] = literal[string] ,
identifier[interp_type] = literal[string] ):
literal[string]
keyword[if] identifier[interp_type] == literal[string] :
identifier[out_data] = identifier[self] . identifier[interpolate_grid] ( identifier[in_lon] , identifier[in_lat] )
keyword[else] :
identifier[out_data] = identifier[self] . identifier[max_neighbor] ( identifier[in_lon] , identifier[in_lat] )
keyword[if] keyword[not] identifier[os] . identifier[access] ( identifier[out_path] + identifier[self] . identifier[variable] , identifier[os] . identifier[R_OK] ):
keyword[try] :
identifier[os] . identifier[mkdir] ( identifier[out_path] + identifier[self] . identifier[variable] )
keyword[except] identifier[OSError] :
identifier[print] ( identifier[out_path] + identifier[self] . identifier[variable] + literal[string] )
identifier[out_file] = identifier[out_path] + identifier[self] . identifier[variable] + literal[string] + literal[string] . identifier[format] ( identifier[self] . identifier[variable] ,
identifier[self] . identifier[start_date] . identifier[strftime] ( literal[string] ),
identifier[self] . identifier[end_date] . identifier[strftime] ( literal[string] ))
identifier[out_obj] = identifier[Dataset] ( identifier[out_file] , literal[string] )
identifier[out_obj] . identifier[createDimension] ( literal[string] , identifier[out_data] . identifier[shape] [ literal[int] ])
identifier[out_obj] . identifier[createDimension] ( literal[string] , identifier[out_data] . identifier[shape] [ literal[int] ])
identifier[out_obj] . identifier[createDimension] ( literal[string] , identifier[out_data] . identifier[shape] [ literal[int] ])
identifier[data_var] = identifier[out_obj] . identifier[createVariable] ( identifier[self] . identifier[variable] , literal[string] ,( literal[string] , literal[string] , literal[string] ), identifier[zlib] = keyword[True] ,
identifier[fill_value] =- literal[int] ,
identifier[least_significant_digit] = literal[int] )
identifier[data_var] [:]= identifier[out_data]
identifier[data_var] . identifier[long_name] = identifier[self] . identifier[variable]
identifier[data_var] . identifier[coordinates] = literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[variable] keyword[or] literal[string] keyword[in] identifier[self] . identifier[variable] :
identifier[data_var] . identifier[units] = literal[string]
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[variable] :
identifier[data_var] . identifier[units] = literal[string]
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[variable] :
identifier[data_var] . identifier[units] = literal[string]
keyword[else] :
identifier[data_var] . identifier[units] = literal[string]
identifier[out_lon] = identifier[out_obj] . identifier[createVariable] ( literal[string] , literal[string] ,( literal[string] , literal[string] ), identifier[zlib] = keyword[True] )
identifier[out_lon] [:]= identifier[in_lon]
identifier[out_lon] . identifier[units] = literal[string]
identifier[out_lat] = identifier[out_obj] . identifier[createVariable] ( literal[string] , literal[string] ,( literal[string] , literal[string] ), identifier[zlib] = keyword[True] )
identifier[out_lat] [:]= identifier[in_lat]
identifier[out_lat] . identifier[units] = literal[string]
identifier[dates] = identifier[out_obj] . identifier[createVariable] ( literal[string] , literal[string] ,( literal[string] ,), identifier[zlib] = keyword[True] )
identifier[dates] [:]= identifier[np] . identifier[round] ( identifier[date2num] ( identifier[self] . identifier[all_dates] . identifier[to_pydatetime] (), identifier[date_unit] )). identifier[astype] ( identifier[np] . identifier[int64] )
identifier[dates] . identifier[long_name] = literal[string]
identifier[dates] . identifier[units] = identifier[date_unit]
identifier[out_obj] . identifier[Conventions] = literal[string]
identifier[out_obj] . identifier[close] ()
keyword[return]
|
def interpolate_to_netcdf(self, in_lon, in_lat, out_path, date_unit='seconds since 1970-01-01T00:00', interp_type='spline'):
"""
Calls the interpolation function and then saves the MRMS data to a netCDF file. It will also create
separate directories for each variable if they are not already available.
"""
if interp_type == 'spline':
out_data = self.interpolate_grid(in_lon, in_lat) # depends on [control=['if'], data=[]]
else:
out_data = self.max_neighbor(in_lon, in_lat)
if not os.access(out_path + self.variable, os.R_OK):
try:
os.mkdir(out_path + self.variable) # depends on [control=['try'], data=[]]
except OSError:
print(out_path + self.variable + ' already created') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
out_file = out_path + self.variable + '/' + '{0}_{1}_{2}.nc'.format(self.variable, self.start_date.strftime('%Y%m%d-%H:%M'), self.end_date.strftime('%Y%m%d-%H:%M'))
out_obj = Dataset(out_file, 'w')
out_obj.createDimension('time', out_data.shape[0])
out_obj.createDimension('y', out_data.shape[1])
out_obj.createDimension('x', out_data.shape[2])
data_var = out_obj.createVariable(self.variable, 'f4', ('time', 'y', 'x'), zlib=True, fill_value=-9999.0, least_significant_digit=3)
data_var[:] = out_data
data_var.long_name = self.variable
data_var.coordinates = 'latitude longitude'
if 'MESH' in self.variable or 'QPE' in self.variable:
data_var.units = 'mm' # depends on [control=['if'], data=[]]
elif 'Reflectivity' in self.variable:
data_var.units = 'dBZ' # depends on [control=['if'], data=[]]
elif 'Rotation' in self.variable:
data_var.units = 's-1' # depends on [control=['if'], data=[]]
else:
data_var.units = ''
out_lon = out_obj.createVariable('longitude', 'f4', ('y', 'x'), zlib=True)
out_lon[:] = in_lon
out_lon.units = 'degrees_east'
out_lat = out_obj.createVariable('latitude', 'f4', ('y', 'x'), zlib=True)
out_lat[:] = in_lat
out_lat.units = 'degrees_north'
dates = out_obj.createVariable('time', 'i8', ('time',), zlib=True)
dates[:] = np.round(date2num(self.all_dates.to_pydatetime(), date_unit)).astype(np.int64)
dates.long_name = 'Valid date'
dates.units = date_unit
out_obj.Conventions = 'CF-1.6'
out_obj.close()
return
|
def read_uic2tag(fh, byteorder, dtype, planecount, offsetsize):
"""Read MetaMorph STK UIC2Tag from file and return as dict."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 6*planecount).reshape(planecount, 6)
return {
'ZDistance': values[:, 0] / values[:, 1],
'DateCreated': values[:, 2], # julian days
'TimeCreated': values[:, 3], # milliseconds
'DateModified': values[:, 4], # julian days
'TimeModified': values[:, 5]}
|
def function[read_uic2tag, parameter[fh, byteorder, dtype, planecount, offsetsize]]:
constant[Read MetaMorph STK UIC2Tag from file and return as dict.]
assert[<ast.BoolOp object at 0x7da1b19a3670>]
variable[values] assign[=] call[call[name[fh].read_array, parameter[constant[<u4], binary_operation[constant[6] * name[planecount]]]].reshape, parameter[name[planecount], constant[6]]]
return[dictionary[[<ast.Constant object at 0x7da1b19f2fb0>, <ast.Constant object at 0x7da1b19f2f80>, <ast.Constant object at 0x7da1b19f2f50>, <ast.Constant object at 0x7da1b19f2f20>, <ast.Constant object at 0x7da1b19f2ef0>], [<ast.BinOp object at 0x7da1b19f2e90>, <ast.Subscript object at 0x7da1b19f2c80>, <ast.Subscript object at 0x7da1b19f2b90>, <ast.Subscript object at 0x7da1b19f2aa0>, <ast.Subscript object at 0x7da1b19f29b0>]]]
|
keyword[def] identifier[read_uic2tag] ( identifier[fh] , identifier[byteorder] , identifier[dtype] , identifier[planecount] , identifier[offsetsize] ):
literal[string]
keyword[assert] identifier[dtype] == literal[string] keyword[and] identifier[byteorder] == literal[string]
identifier[values] = identifier[fh] . identifier[read_array] ( literal[string] , literal[int] * identifier[planecount] ). identifier[reshape] ( identifier[planecount] , literal[int] )
keyword[return] {
literal[string] : identifier[values] [:, literal[int] ]/ identifier[values] [:, literal[int] ],
literal[string] : identifier[values] [:, literal[int] ],
literal[string] : identifier[values] [:, literal[int] ],
literal[string] : identifier[values] [:, literal[int] ],
literal[string] : identifier[values] [:, literal[int] ]}
|
def read_uic2tag(fh, byteorder, dtype, planecount, offsetsize):
"""Read MetaMorph STK UIC2Tag from file and return as dict."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 6 * planecount).reshape(planecount, 6) # julian days
# milliseconds
# julian days
return {'ZDistance': values[:, 0] / values[:, 1], 'DateCreated': values[:, 2], 'TimeCreated': values[:, 3], 'DateModified': values[:, 4], 'TimeModified': values[:, 5]}
|
def _calculate(self, startingPercentage, endPercentage, startDate, endDate):
"""This is the error calculation function that gets called by :py:meth:`BaseErrorMeasure.get_error`.
Both parameters will be correct at this time.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:param float startDate: Epoch representing the start date used for error calculation.
:param float endDate: Epoch representing the end date used in the error calculation.
:return: Returns a float representing the error.
:rtype: float
"""
# get the defined subset of error values
errorValues = self._get_error_values(startingPercentage, endPercentage, startDate, endDate)
# get the historic mean
if startDate is not None:
possibleDates = filter(lambda date: date >= startDate, self._errorDates)
# This piece of code is not required, because _get_error_values already ensured that the startDate
# was correct. Otherwise it would have thrown an exception.
#if 0 == len(possibleDates):
# raise ValueError("%s does not represent a valid startDate." % startDate)
meanIdx = self._errorDates.index(min(possibleDates))
else:
meanIdx = int((startingPercentage * len(self._errorValues)) / 100.0)
mad = sum(errorValues) / float(len(errorValues))
historicMean = self._historicMeans[meanIdx]
return mad / historicMean
|
def function[_calculate, parameter[self, startingPercentage, endPercentage, startDate, endDate]]:
constant[This is the error calculation function that gets called by :py:meth:`BaseErrorMeasure.get_error`.
Both parameters will be correct at this time.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:param float startDate: Epoch representing the start date used for error calculation.
:param float endDate: Epoch representing the end date used in the error calculation.
:return: Returns a float representing the error.
:rtype: float
]
variable[errorValues] assign[=] call[name[self]._get_error_values, parameter[name[startingPercentage], name[endPercentage], name[startDate], name[endDate]]]
if compare[name[startDate] is_not constant[None]] begin[:]
variable[possibleDates] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da2041d9630>, name[self]._errorDates]]
variable[meanIdx] assign[=] call[name[self]._errorDates.index, parameter[call[name[min], parameter[name[possibleDates]]]]]
variable[mad] assign[=] binary_operation[call[name[sum], parameter[name[errorValues]]] / call[name[float], parameter[call[name[len], parameter[name[errorValues]]]]]]
variable[historicMean] assign[=] call[name[self]._historicMeans][name[meanIdx]]
return[binary_operation[name[mad] / name[historicMean]]]
|
keyword[def] identifier[_calculate] ( identifier[self] , identifier[startingPercentage] , identifier[endPercentage] , identifier[startDate] , identifier[endDate] ):
literal[string]
identifier[errorValues] = identifier[self] . identifier[_get_error_values] ( identifier[startingPercentage] , identifier[endPercentage] , identifier[startDate] , identifier[endDate] )
keyword[if] identifier[startDate] keyword[is] keyword[not] keyword[None] :
identifier[possibleDates] = identifier[filter] ( keyword[lambda] identifier[date] : identifier[date] >= identifier[startDate] , identifier[self] . identifier[_errorDates] )
identifier[meanIdx] = identifier[self] . identifier[_errorDates] . identifier[index] ( identifier[min] ( identifier[possibleDates] ))
keyword[else] :
identifier[meanIdx] = identifier[int] (( identifier[startingPercentage] * identifier[len] ( identifier[self] . identifier[_errorValues] ))/ literal[int] )
identifier[mad] = identifier[sum] ( identifier[errorValues] )/ identifier[float] ( identifier[len] ( identifier[errorValues] ))
identifier[historicMean] = identifier[self] . identifier[_historicMeans] [ identifier[meanIdx] ]
keyword[return] identifier[mad] / identifier[historicMean]
|
def _calculate(self, startingPercentage, endPercentage, startDate, endDate):
"""This is the error calculation function that gets called by :py:meth:`BaseErrorMeasure.get_error`.
Both parameters will be correct at this time.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:param float startDate: Epoch representing the start date used for error calculation.
:param float endDate: Epoch representing the end date used in the error calculation.
:return: Returns a float representing the error.
:rtype: float
"""
# get the defined subset of error values
errorValues = self._get_error_values(startingPercentage, endPercentage, startDate, endDate)
# get the historic mean
if startDate is not None:
possibleDates = filter(lambda date: date >= startDate, self._errorDates)
# This piece of code is not required, because _get_error_values already ensured that the startDate
# was correct. Otherwise it would have thrown an exception.
#if 0 == len(possibleDates):
# raise ValueError("%s does not represent a valid startDate." % startDate)
meanIdx = self._errorDates.index(min(possibleDates)) # depends on [control=['if'], data=['startDate']]
else:
meanIdx = int(startingPercentage * len(self._errorValues) / 100.0)
mad = sum(errorValues) / float(len(errorValues))
historicMean = self._historicMeans[meanIdx]
return mad / historicMean
|
def which(self, path, mode="r"):
# type: (Text, Text) -> Tuple[Optional[Text], Optional[FS]]
"""Get a tuple of (name, fs) that the given path would map to.
Arguments:
path (str): A path on the filesystem.
mode (str): An `io.open` mode.
"""
if check_writable(mode):
return self._write_fs_name, self.write_fs
for name, fs in self.iterate_fs():
if fs.exists(path):
return name, fs
return None, None
|
def function[which, parameter[self, path, mode]]:
constant[Get a tuple of (name, fs) that the given path would map to.
Arguments:
path (str): A path on the filesystem.
mode (str): An `io.open` mode.
]
if call[name[check_writable], parameter[name[mode]]] begin[:]
return[tuple[[<ast.Attribute object at 0x7da1b16043d0>, <ast.Attribute object at 0x7da1b1604550>]]]
for taget[tuple[[<ast.Name object at 0x7da1b1604730>, <ast.Name object at 0x7da1b16047c0>]]] in starred[call[name[self].iterate_fs, parameter[]]] begin[:]
if call[name[fs].exists, parameter[name[path]]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b1605060>, <ast.Name object at 0x7da1b1605210>]]]
return[tuple[[<ast.Constant object at 0x7da1b1604fd0>, <ast.Constant object at 0x7da1b1605240>]]]
|
keyword[def] identifier[which] ( identifier[self] , identifier[path] , identifier[mode] = literal[string] ):
literal[string]
keyword[if] identifier[check_writable] ( identifier[mode] ):
keyword[return] identifier[self] . identifier[_write_fs_name] , identifier[self] . identifier[write_fs]
keyword[for] identifier[name] , identifier[fs] keyword[in] identifier[self] . identifier[iterate_fs] ():
keyword[if] identifier[fs] . identifier[exists] ( identifier[path] ):
keyword[return] identifier[name] , identifier[fs]
keyword[return] keyword[None] , keyword[None]
|
def which(self, path, mode='r'):
# type: (Text, Text) -> Tuple[Optional[Text], Optional[FS]]
'Get a tuple of (name, fs) that the given path would map to.\n\n Arguments:\n path (str): A path on the filesystem.\n mode (str): An `io.open` mode.\n\n '
if check_writable(mode):
return (self._write_fs_name, self.write_fs) # depends on [control=['if'], data=[]]
for (name, fs) in self.iterate_fs():
if fs.exists(path):
return (name, fs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (None, None)
|
def score(self):
"""
Calculate and return a heuristic score for this Parser against the provided
script source and path. This is used to order the ArgumentParsers as "most likely to work"
against a given script/source file.
Each parser has a calculate_score() function that returns a list of booleans representing
the matches against conditions. This is converted into a % match and used to sort parse engines.
:return: float
"""
if self._heuristic_score is None:
matches = self.heuristic()
self._heuristic_score = float(sum(matches)) / float(len(matches))
return self._heuristic_score
|
def function[score, parameter[self]]:
constant[
Calculate and return a heuristic score for this Parser against the provided
script source and path. This is used to order the ArgumentParsers as "most likely to work"
against a given script/source file.
Each parser has a calculate_score() function that returns a list of booleans representing
the matches against conditions. This is converted into a % match and used to sort parse engines.
:return: float
]
if compare[name[self]._heuristic_score is constant[None]] begin[:]
variable[matches] assign[=] call[name[self].heuristic, parameter[]]
name[self]._heuristic_score assign[=] binary_operation[call[name[float], parameter[call[name[sum], parameter[name[matches]]]]] / call[name[float], parameter[call[name[len], parameter[name[matches]]]]]]
return[name[self]._heuristic_score]
|
keyword[def] identifier[score] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_heuristic_score] keyword[is] keyword[None] :
identifier[matches] = identifier[self] . identifier[heuristic] ()
identifier[self] . identifier[_heuristic_score] = identifier[float] ( identifier[sum] ( identifier[matches] ))/ identifier[float] ( identifier[len] ( identifier[matches] ))
keyword[return] identifier[self] . identifier[_heuristic_score]
|
def score(self):
"""
Calculate and return a heuristic score for this Parser against the provided
script source and path. This is used to order the ArgumentParsers as "most likely to work"
against a given script/source file.
Each parser has a calculate_score() function that returns a list of booleans representing
the matches against conditions. This is converted into a % match and used to sort parse engines.
:return: float
"""
if self._heuristic_score is None:
matches = self.heuristic()
self._heuristic_score = float(sum(matches)) / float(len(matches)) # depends on [control=['if'], data=[]]
return self._heuristic_score
|
def raise_args_err(message='bad arguments', error_class=TypeError):
"""Throw an error with standard message, displaying function call.
>>> def f(a, *args, **kwargs):
... raise_args_err()
...
>>> f(1, 2, x='y')
Traceback (most recent call last):
...
TypeError: bad arguments: f(1, 2, x='y')
"""
frame = inspect.currentframe().f_back
raise error_class(message + ': ' + format_call(frame))
|
def function[raise_args_err, parameter[message, error_class]]:
constant[Throw an error with standard message, displaying function call.
>>> def f(a, *args, **kwargs):
... raise_args_err()
...
>>> f(1, 2, x='y')
Traceback (most recent call last):
...
TypeError: bad arguments: f(1, 2, x='y')
]
variable[frame] assign[=] call[name[inspect].currentframe, parameter[]].f_back
<ast.Raise object at 0x7da20c7c8b50>
|
keyword[def] identifier[raise_args_err] ( identifier[message] = literal[string] , identifier[error_class] = identifier[TypeError] ):
literal[string]
identifier[frame] = identifier[inspect] . identifier[currentframe] (). identifier[f_back]
keyword[raise] identifier[error_class] ( identifier[message] + literal[string] + identifier[format_call] ( identifier[frame] ))
|
def raise_args_err(message='bad arguments', error_class=TypeError):
"""Throw an error with standard message, displaying function call.
>>> def f(a, *args, **kwargs):
... raise_args_err()
...
>>> f(1, 2, x='y')
Traceback (most recent call last):
...
TypeError: bad arguments: f(1, 2, x='y')
"""
frame = inspect.currentframe().f_back
raise error_class(message + ': ' + format_call(frame))
|
def image_url(self, width=None, height=None):
"""
Returns URL to placeholder image
Example: http://placehold.it/640x480
"""
width_ = width or self.random_int(max=1024)
height_ = height or self.random_int(max=1024)
placeholder_url = self.random_element(self.image_placeholder_services)
return placeholder_url.format(width=width_, height=height_)
|
def function[image_url, parameter[self, width, height]]:
constant[
Returns URL to placeholder image
Example: http://placehold.it/640x480
]
variable[width_] assign[=] <ast.BoolOp object at 0x7da18dc9b8b0>
variable[height_] assign[=] <ast.BoolOp object at 0x7da18dc9a9b0>
variable[placeholder_url] assign[=] call[name[self].random_element, parameter[name[self].image_placeholder_services]]
return[call[name[placeholder_url].format, parameter[]]]
|
keyword[def] identifier[image_url] ( identifier[self] , identifier[width] = keyword[None] , identifier[height] = keyword[None] ):
literal[string]
identifier[width_] = identifier[width] keyword[or] identifier[self] . identifier[random_int] ( identifier[max] = literal[int] )
identifier[height_] = identifier[height] keyword[or] identifier[self] . identifier[random_int] ( identifier[max] = literal[int] )
identifier[placeholder_url] = identifier[self] . identifier[random_element] ( identifier[self] . identifier[image_placeholder_services] )
keyword[return] identifier[placeholder_url] . identifier[format] ( identifier[width] = identifier[width_] , identifier[height] = identifier[height_] )
|
def image_url(self, width=None, height=None):
"""
Returns URL to placeholder image
Example: http://placehold.it/640x480
"""
width_ = width or self.random_int(max=1024)
height_ = height or self.random_int(max=1024)
placeholder_url = self.random_element(self.image_placeholder_services)
return placeholder_url.format(width=width_, height=height_)
|
def slope_percentile(self, date, mag):
"""
Return 10% and 90% percentile of slope.
Parameters
----------
date : array_like
An array of phase-folded date. Sorted.
mag : array_like
An array of phase-folded magnitudes. Sorted by date.
Returns
-------
per_10 : float
10% percentile values of slope.
per_90 : float
90% percentile values of slope.
"""
date_diff = date[1:] - date[:len(date) - 1]
mag_diff = mag[1:] - mag[:len(mag) - 1]
# Remove zero mag_diff.
index = np.where(mag_diff != 0.)
date_diff = date_diff[index]
mag_diff = mag_diff[index]
# Derive slope.
slope = date_diff / mag_diff
percentile_10 = np.percentile(slope, 10.)
percentile_90 = np.percentile(slope, 90.)
return percentile_10, percentile_90
|
def function[slope_percentile, parameter[self, date, mag]]:
constant[
Return 10% and 90% percentile of slope.
Parameters
----------
date : array_like
An array of phase-folded date. Sorted.
mag : array_like
An array of phase-folded magnitudes. Sorted by date.
Returns
-------
per_10 : float
10% percentile values of slope.
per_90 : float
90% percentile values of slope.
]
variable[date_diff] assign[=] binary_operation[call[name[date]][<ast.Slice object at 0x7da204960820>] - call[name[date]][<ast.Slice object at 0x7da204963ca0>]]
variable[mag_diff] assign[=] binary_operation[call[name[mag]][<ast.Slice object at 0x7da207f02530>] - call[name[mag]][<ast.Slice object at 0x7da207f03ee0>]]
variable[index] assign[=] call[name[np].where, parameter[compare[name[mag_diff] not_equal[!=] constant[0.0]]]]
variable[date_diff] assign[=] call[name[date_diff]][name[index]]
variable[mag_diff] assign[=] call[name[mag_diff]][name[index]]
variable[slope] assign[=] binary_operation[name[date_diff] / name[mag_diff]]
variable[percentile_10] assign[=] call[name[np].percentile, parameter[name[slope], constant[10.0]]]
variable[percentile_90] assign[=] call[name[np].percentile, parameter[name[slope], constant[90.0]]]
return[tuple[[<ast.Name object at 0x7da207f00eb0>, <ast.Name object at 0x7da207f02d10>]]]
|
keyword[def] identifier[slope_percentile] ( identifier[self] , identifier[date] , identifier[mag] ):
literal[string]
identifier[date_diff] = identifier[date] [ literal[int] :]- identifier[date] [: identifier[len] ( identifier[date] )- literal[int] ]
identifier[mag_diff] = identifier[mag] [ literal[int] :]- identifier[mag] [: identifier[len] ( identifier[mag] )- literal[int] ]
identifier[index] = identifier[np] . identifier[where] ( identifier[mag_diff] != literal[int] )
identifier[date_diff] = identifier[date_diff] [ identifier[index] ]
identifier[mag_diff] = identifier[mag_diff] [ identifier[index] ]
identifier[slope] = identifier[date_diff] / identifier[mag_diff]
identifier[percentile_10] = identifier[np] . identifier[percentile] ( identifier[slope] , literal[int] )
identifier[percentile_90] = identifier[np] . identifier[percentile] ( identifier[slope] , literal[int] )
keyword[return] identifier[percentile_10] , identifier[percentile_90]
|
def slope_percentile(self, date, mag):
"""
Return 10% and 90% percentile of slope.
Parameters
----------
date : array_like
An array of phase-folded date. Sorted.
mag : array_like
An array of phase-folded magnitudes. Sorted by date.
Returns
-------
per_10 : float
10% percentile values of slope.
per_90 : float
90% percentile values of slope.
"""
date_diff = date[1:] - date[:len(date) - 1]
mag_diff = mag[1:] - mag[:len(mag) - 1]
# Remove zero mag_diff.
index = np.where(mag_diff != 0.0)
date_diff = date_diff[index]
mag_diff = mag_diff[index]
# Derive slope.
slope = date_diff / mag_diff
percentile_10 = np.percentile(slope, 10.0)
percentile_90 = np.percentile(slope, 90.0)
return (percentile_10, percentile_90)
|
def addFASTAEditingCommandLineOptions(parser):
"""
Add standard FASTA editing command-line options to an argparse parser.
These are options that can be used to alter FASTA records, NOT options
that simply select or reject those things (for those see
addFASTAFilteringCommandLineOptions).
@param parser: An C{argparse.ArgumentParser} instance.
"""
# A mutually exclusive group for --keepSites, --keepSitesFile,
# --removeSites, and --removeSitesFile.
group = parser.add_mutually_exclusive_group()
# In the 4 options below, the 'indices' alternate names are kept for
# backwards compatibility.
group.add_argument(
'--keepSites', '--keepIndices',
help=('Specify 1-based sequence sites to keep. All other sites will '
'be removed. The sites must be given in the form e.g., '
'24,100-200,260. Note that the requested sites will be taken '
'from the input sequences in order, not in the order given by '
'--keepSites. I.e., --keepSites 5,8-10 will get you the same '
'result as --keepSites 8-10,5.'))
group.add_argument(
'--keepSitesFile', '--keepIndicesFile',
help=('Specify a file containing 1-based sites to keep. All other '
'sequence sites will be removed. Lines in the file must be '
'given in the form e.g., 24,100-200,260. See --keepSites for '
'more detail.'))
group.add_argument(
'--removeSites', '--removeIndices',
help=('Specify 1-based sites to remove. All other sequence sites will '
'be kept. The sites must be given in the form e.g., '
'24,100-200,260. See --keepSites for more detail.'))
group.add_argument(
'--removeSitesFile', '--removeIndicesFile',
help=('Specify a file containing 1-based sites to remove. All other '
'sequence sites will be kept. Lines in the file must be given '
'in the form e.g., 24,100-200,260. See --keepSites for more '
'detail.'))
parser.add_argument(
'--removeGaps', action='store_true', default=False,
help="If True, gap ('-') characters in sequences will be removed.")
parser.add_argument(
'--truncateTitlesAfter',
help=('A string that sequence titles (ids) will be truncated beyond. '
'If the truncated version of a title has already been seen, '
'that title will be skipped.'))
parser.add_argument(
'--removeDescriptions', action='store_true', default=False,
help=('Read id descriptions will be removed. The '
'description is the part of a sequence id after the '
'first whitespace (if any).'))
parser.add_argument(
'--idLambda', metavar='LAMBDA-FUNCTION',
help=('A one-argument function taking and returning a read id. '
'E.g., --idLambda "lambda id: id.split(\'_\')[0]" or '
'--idLambda "lambda id: id[:10]". If the function returns None, '
'the read will be filtered out.'))
parser.add_argument(
'--readLambda', metavar='LAMBDA-FUNCTION',
help=('A one-argument function taking and returning a read. '
'E.g., --readLambda "lambda r: Read(r.id.split(\'_\')[0], '
'r.sequence.strip(\'-\')". Make sure to also modify the quality '
'string if you change the length of a FASTQ sequence. If the '
'function returns None, the read will be filtered out. The '
'function will be passed to eval with the dark.reads classes '
'Read, DNARead, AARead, etc. all in scope.'))
parser.add_argument(
'--reverse', action='store_true', default=False,
help=('Reverse the sequences. Note that this is NOT reverse '
'complementing.'))
parser.add_argument(
'--reverseComplement', action='store_true', default=False,
help='Reverse complement the sequences.')
|
def function[addFASTAEditingCommandLineOptions, parameter[parser]]:
constant[
Add standard FASTA editing command-line options to an argparse parser.
These are options that can be used to alter FASTA records, NOT options
that simply select or reject those things (for those see
addFASTAFilteringCommandLineOptions).
@param parser: An C{argparse.ArgumentParser} instance.
]
variable[group] assign[=] call[name[parser].add_mutually_exclusive_group, parameter[]]
call[name[group].add_argument, parameter[constant[--keepSites], constant[--keepIndices]]]
call[name[group].add_argument, parameter[constant[--keepSitesFile], constant[--keepIndicesFile]]]
call[name[group].add_argument, parameter[constant[--removeSites], constant[--removeIndices]]]
call[name[group].add_argument, parameter[constant[--removeSitesFile], constant[--removeIndicesFile]]]
call[name[parser].add_argument, parameter[constant[--removeGaps]]]
call[name[parser].add_argument, parameter[constant[--truncateTitlesAfter]]]
call[name[parser].add_argument, parameter[constant[--removeDescriptions]]]
call[name[parser].add_argument, parameter[constant[--idLambda]]]
call[name[parser].add_argument, parameter[constant[--readLambda]]]
call[name[parser].add_argument, parameter[constant[--reverse]]]
call[name[parser].add_argument, parameter[constant[--reverseComplement]]]
|
keyword[def] identifier[addFASTAEditingCommandLineOptions] ( identifier[parser] ):
literal[string]
identifier[group] = identifier[parser] . identifier[add_mutually_exclusive_group] ()
identifier[group] . identifier[add_argument] (
literal[string] , literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ))
identifier[group] . identifier[add_argument] (
literal[string] , literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string] ))
identifier[group] . identifier[add_argument] (
literal[string] , literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string] ))
identifier[group] . identifier[add_argument] (
literal[string] , literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] =( literal[string]
literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[metavar] = literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[metavar] = literal[string] ,
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string] )
|
def addFASTAEditingCommandLineOptions(parser):
"""
Add standard FASTA editing command-line options to an argparse parser.
These are options that can be used to alter FASTA records, NOT options
that simply select or reject those things (for those see
addFASTAFilteringCommandLineOptions).
@param parser: An C{argparse.ArgumentParser} instance.
"""
# A mutually exclusive group for --keepSites, --keepSitesFile,
# --removeSites, and --removeSitesFile.
group = parser.add_mutually_exclusive_group()
# In the 4 options below, the 'indices' alternate names are kept for
# backwards compatibility.
group.add_argument('--keepSites', '--keepIndices', help='Specify 1-based sequence sites to keep. All other sites will be removed. The sites must be given in the form e.g., 24,100-200,260. Note that the requested sites will be taken from the input sequences in order, not in the order given by --keepSites. I.e., --keepSites 5,8-10 will get you the same result as --keepSites 8-10,5.')
group.add_argument('--keepSitesFile', '--keepIndicesFile', help='Specify a file containing 1-based sites to keep. All other sequence sites will be removed. Lines in the file must be given in the form e.g., 24,100-200,260. See --keepSites for more detail.')
group.add_argument('--removeSites', '--removeIndices', help='Specify 1-based sites to remove. All other sequence sites will be kept. The sites must be given in the form e.g., 24,100-200,260. See --keepSites for more detail.')
group.add_argument('--removeSitesFile', '--removeIndicesFile', help='Specify a file containing 1-based sites to remove. All other sequence sites will be kept. Lines in the file must be given in the form e.g., 24,100-200,260. See --keepSites for more detail.')
parser.add_argument('--removeGaps', action='store_true', default=False, help="If True, gap ('-') characters in sequences will be removed.")
parser.add_argument('--truncateTitlesAfter', help='A string that sequence titles (ids) will be truncated beyond. If the truncated version of a title has already been seen, that title will be skipped.')
parser.add_argument('--removeDescriptions', action='store_true', default=False, help='Read id descriptions will be removed. The description is the part of a sequence id after the first whitespace (if any).')
parser.add_argument('--idLambda', metavar='LAMBDA-FUNCTION', help='A one-argument function taking and returning a read id. E.g., --idLambda "lambda id: id.split(\'_\')[0]" or --idLambda "lambda id: id[:10]". If the function returns None, the read will be filtered out.')
parser.add_argument('--readLambda', metavar='LAMBDA-FUNCTION', help='A one-argument function taking and returning a read. E.g., --readLambda "lambda r: Read(r.id.split(\'_\')[0], r.sequence.strip(\'-\')". Make sure to also modify the quality string if you change the length of a FASTQ sequence. If the function returns None, the read will be filtered out. The function will be passed to eval with the dark.reads classes Read, DNARead, AARead, etc. all in scope.')
parser.add_argument('--reverse', action='store_true', default=False, help='Reverse the sequences. Note that this is NOT reverse complementing.')
parser.add_argument('--reverseComplement', action='store_true', default=False, help='Reverse complement the sequences.')
|
def acquire_subsamples_gp1(input_data, file_name=None):
"""
Function invoked for plotting a grid-plot with 3x2 format, showing the differences in ECG
signals accordingly to the chosen sampling frequency.
Applied in the cell with tag "subsampling_grid_plot_1".
----------
Parameters
----------
input_data : dict
Dictionary with ECG signal to present.
file_name : str
Path containing the destination folder where the Bokeh figure will be stored.
"""
# Generation of the HTML file where the plot will be stored.
#file_name = _generate_bokeh_file(file_name)
# Number of acquired samples (Original sample_rate = 4000 Hz)
fs_orig = 4000
nbr_samples_orig = len(input_data)
data_interp = {"4000": {}}
data_interp["4000"]["data"] = input_data
data_interp["4000"]["time"] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_orig)
# Constants
time_orig = data_interp["4000"]["time"]
data_orig = data_interp["4000"]["data"]
# ============ Interpolation of data accordingly to the desired sampling frequency ============
# sample_rate in [3000, 1000, 500, 200, 100] - Some of the available sample frequencies at Plux
# acquisition systems
# sample_rate in [50, 20] - Non-functional sampling frequencies (Not available at Plux devices
# because of their limited application)
for sample_rate in [3000, 1000, 500, 200, 100, 50, 20]:
fs_str = str(sample_rate)
nbr_samples_interp = int((nbr_samples_orig * sample_rate) / fs_orig)
data_interp[fs_str] = {}
data_interp[fs_str]["time"] = numpy.linspace(0, nbr_samples_orig / fs_orig,
nbr_samples_interp)
data_interp[fs_str]["data"] = numpy.interp(data_interp[fs_str]["time"], time_orig,
data_orig)
# List that store the figure handler.
list_figures = []
# Generation of Bokeh Figures.
for iter_nbr, sample_rate in enumerate(["4000", "3000", "1000", "500", "200", "100"]):
# If figure number is a multiple of 3 or if we are generating the first figure...
if iter_nbr == 0 or iter_nbr % 2 == 0:
list_figures.append([])
# Plotting phase.
list_figures[-1].append(figure(x_axis_label='Time (s)', y_axis_label='Raw Data',
title="Sampling Frequency: " + sample_rate + " Hz",
**opensignals_kwargs("figure")))
list_figures[-1][-1].line(data_interp[sample_rate]["time"][:int(sample_rate)],
data_interp[sample_rate]["data"][:int(sample_rate)],
**opensignals_kwargs("line"))
|
def function[acquire_subsamples_gp1, parameter[input_data, file_name]]:
constant[
Function invoked for plotting a grid-plot with 3x2 format, showing the differences in ECG
signals accordingly to the chosen sampling frequency.
Applied in the cell with tag "subsampling_grid_plot_1".
----------
Parameters
----------
input_data : dict
Dictionary with ECG signal to present.
file_name : str
Path containing the destination folder where the Bokeh figure will be stored.
]
variable[fs_orig] assign[=] constant[4000]
variable[nbr_samples_orig] assign[=] call[name[len], parameter[name[input_data]]]
variable[data_interp] assign[=] dictionary[[<ast.Constant object at 0x7da20e9623e0>], [<ast.Dict object at 0x7da20e962ef0>]]
call[call[name[data_interp]][constant[4000]]][constant[data]] assign[=] name[input_data]
call[call[name[data_interp]][constant[4000]]][constant[time]] assign[=] call[name[numpy].linspace, parameter[constant[0], binary_operation[name[nbr_samples_orig] / name[fs_orig]], name[nbr_samples_orig]]]
variable[time_orig] assign[=] call[call[name[data_interp]][constant[4000]]][constant[time]]
variable[data_orig] assign[=] call[call[name[data_interp]][constant[4000]]][constant[data]]
for taget[name[sample_rate]] in starred[list[[<ast.Constant object at 0x7da1b25ece20>, <ast.Constant object at 0x7da1b25ed6c0>, <ast.Constant object at 0x7da1b25ecfd0>, <ast.Constant object at 0x7da1b25ef730>, <ast.Constant object at 0x7da1b25ecd30>, <ast.Constant object at 0x7da1b25ed330>, <ast.Constant object at 0x7da1b25eee60>]]] begin[:]
variable[fs_str] assign[=] call[name[str], parameter[name[sample_rate]]]
variable[nbr_samples_interp] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[nbr_samples_orig] * name[sample_rate]] / name[fs_orig]]]]
call[name[data_interp]][name[fs_str]] assign[=] dictionary[[], []]
call[call[name[data_interp]][name[fs_str]]][constant[time]] assign[=] call[name[numpy].linspace, parameter[constant[0], binary_operation[name[nbr_samples_orig] / name[fs_orig]], name[nbr_samples_interp]]]
call[call[name[data_interp]][name[fs_str]]][constant[data]] assign[=] call[name[numpy].interp, parameter[call[call[name[data_interp]][name[fs_str]]][constant[time]], name[time_orig], name[data_orig]]]
variable[list_figures] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b25ed780>, <ast.Name object at 0x7da1b25ee950>]]] in starred[call[name[enumerate], parameter[list[[<ast.Constant object at 0x7da1b25eccd0>, <ast.Constant object at 0x7da1b25ec7f0>, <ast.Constant object at 0x7da1b25edcf0>, <ast.Constant object at 0x7da1b25edde0>, <ast.Constant object at 0x7da1b25ec6d0>, <ast.Constant object at 0x7da1b25efdc0>]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b25ee620> begin[:]
call[name[list_figures].append, parameter[list[[]]]]
call[call[name[list_figures]][<ast.UnaryOp object at 0x7da1b25ec8b0>].append, parameter[call[name[figure], parameter[]]]]
call[call[call[name[list_figures]][<ast.UnaryOp object at 0x7da1b25ecbb0>]][<ast.UnaryOp object at 0x7da1b25ef160>].line, parameter[call[call[call[name[data_interp]][name[sample_rate]]][constant[time]]][<ast.Slice object at 0x7da1b25edc30>], call[call[call[name[data_interp]][name[sample_rate]]][constant[data]]][<ast.Slice object at 0x7da1b25ec9d0>]]]
|
keyword[def] identifier[acquire_subsamples_gp1] ( identifier[input_data] , identifier[file_name] = keyword[None] ):
literal[string]
identifier[fs_orig] = literal[int]
identifier[nbr_samples_orig] = identifier[len] ( identifier[input_data] )
identifier[data_interp] ={ literal[string] :{}}
identifier[data_interp] [ literal[string] ][ literal[string] ]= identifier[input_data]
identifier[data_interp] [ literal[string] ][ literal[string] ]= identifier[numpy] . identifier[linspace] ( literal[int] , identifier[nbr_samples_orig] / identifier[fs_orig] , identifier[nbr_samples_orig] )
identifier[time_orig] = identifier[data_interp] [ literal[string] ][ literal[string] ]
identifier[data_orig] = identifier[data_interp] [ literal[string] ][ literal[string] ]
keyword[for] identifier[sample_rate] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]:
identifier[fs_str] = identifier[str] ( identifier[sample_rate] )
identifier[nbr_samples_interp] = identifier[int] (( identifier[nbr_samples_orig] * identifier[sample_rate] )/ identifier[fs_orig] )
identifier[data_interp] [ identifier[fs_str] ]={}
identifier[data_interp] [ identifier[fs_str] ][ literal[string] ]= identifier[numpy] . identifier[linspace] ( literal[int] , identifier[nbr_samples_orig] / identifier[fs_orig] ,
identifier[nbr_samples_interp] )
identifier[data_interp] [ identifier[fs_str] ][ literal[string] ]= identifier[numpy] . identifier[interp] ( identifier[data_interp] [ identifier[fs_str] ][ literal[string] ], identifier[time_orig] ,
identifier[data_orig] )
identifier[list_figures] =[]
keyword[for] identifier[iter_nbr] , identifier[sample_rate] keyword[in] identifier[enumerate] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]):
keyword[if] identifier[iter_nbr] == literal[int] keyword[or] identifier[iter_nbr] % literal[int] == literal[int] :
identifier[list_figures] . identifier[append] ([])
identifier[list_figures] [- literal[int] ]. identifier[append] ( identifier[figure] ( identifier[x_axis_label] = literal[string] , identifier[y_axis_label] = literal[string] ,
identifier[title] = literal[string] + identifier[sample_rate] + literal[string] ,
** identifier[opensignals_kwargs] ( literal[string] )))
identifier[list_figures] [- literal[int] ][- literal[int] ]. identifier[line] ( identifier[data_interp] [ identifier[sample_rate] ][ literal[string] ][: identifier[int] ( identifier[sample_rate] )],
identifier[data_interp] [ identifier[sample_rate] ][ literal[string] ][: identifier[int] ( identifier[sample_rate] )],
** identifier[opensignals_kwargs] ( literal[string] ))
|
def acquire_subsamples_gp1(input_data, file_name=None):
"""
Function invoked for plotting a grid-plot with 3x2 format, showing the differences in ECG
signals accordingly to the chosen sampling frequency.
Applied in the cell with tag "subsampling_grid_plot_1".
----------
Parameters
----------
input_data : dict
Dictionary with ECG signal to present.
file_name : str
Path containing the destination folder where the Bokeh figure will be stored.
"""
# Generation of the HTML file where the plot will be stored.
#file_name = _generate_bokeh_file(file_name)
# Number of acquired samples (Original sample_rate = 4000 Hz)
fs_orig = 4000
nbr_samples_orig = len(input_data)
data_interp = {'4000': {}}
data_interp['4000']['data'] = input_data
data_interp['4000']['time'] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_orig)
# Constants
time_orig = data_interp['4000']['time']
data_orig = data_interp['4000']['data']
# ============ Interpolation of data accordingly to the desired sampling frequency ============
# sample_rate in [3000, 1000, 500, 200, 100] - Some of the available sample frequencies at Plux
# acquisition systems
# sample_rate in [50, 20] - Non-functional sampling frequencies (Not available at Plux devices
# because of their limited application)
for sample_rate in [3000, 1000, 500, 200, 100, 50, 20]:
fs_str = str(sample_rate)
nbr_samples_interp = int(nbr_samples_orig * sample_rate / fs_orig)
data_interp[fs_str] = {}
data_interp[fs_str]['time'] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_interp)
data_interp[fs_str]['data'] = numpy.interp(data_interp[fs_str]['time'], time_orig, data_orig) # depends on [control=['for'], data=['sample_rate']]
# List that store the figure handler.
list_figures = []
# Generation of Bokeh Figures.
for (iter_nbr, sample_rate) in enumerate(['4000', '3000', '1000', '500', '200', '100']):
# If figure number is a multiple of 3 or if we are generating the first figure...
if iter_nbr == 0 or iter_nbr % 2 == 0:
list_figures.append([]) # depends on [control=['if'], data=[]]
# Plotting phase.
list_figures[-1].append(figure(x_axis_label='Time (s)', y_axis_label='Raw Data', title='Sampling Frequency: ' + sample_rate + ' Hz', **opensignals_kwargs('figure')))
list_figures[-1][-1].line(data_interp[sample_rate]['time'][:int(sample_rate)], data_interp[sample_rate]['data'][:int(sample_rate)], **opensignals_kwargs('line')) # depends on [control=['for'], data=[]]
|
def process_binding_statements(self):
"""Looks for Binding events in the graph and extracts them into INDRA
statements.
In particular, looks for a Binding event node with outgoing edges
with relations Theme and Theme2 - the entities these edges point to
are the two constituents of the Complex INDRA statement.
"""
G = self.G
statements = []
binding_nodes = self.find_event_with_outgoing_edges('Binding',
['Theme',
'Theme2'])
for node in binding_nodes:
theme1 = self.get_entity_text_for_relation(node, 'Theme')
theme1_node = self.get_related_node(node, 'Theme')
theme2 = self.get_entity_text_for_relation(node, 'Theme2')
assert(theme1 is not None)
assert(theme2 is not None)
evidence = self.node_to_evidence(theme1_node, is_direct=True)
statements.append(Complex([s2a(theme1), s2a(theme2)],
evidence=evidence))
return statements
|
def function[process_binding_statements, parameter[self]]:
constant[Looks for Binding events in the graph and extracts them into INDRA
statements.
In particular, looks for a Binding event node with outgoing edges
with relations Theme and Theme2 - the entities these edges point to
are the two constituents of the Complex INDRA statement.
]
variable[G] assign[=] name[self].G
variable[statements] assign[=] list[[]]
variable[binding_nodes] assign[=] call[name[self].find_event_with_outgoing_edges, parameter[constant[Binding], list[[<ast.Constant object at 0x7da1b23454b0>, <ast.Constant object at 0x7da1b2347be0>]]]]
for taget[name[node]] in starred[name[binding_nodes]] begin[:]
variable[theme1] assign[=] call[name[self].get_entity_text_for_relation, parameter[name[node], constant[Theme]]]
variable[theme1_node] assign[=] call[name[self].get_related_node, parameter[name[node], constant[Theme]]]
variable[theme2] assign[=] call[name[self].get_entity_text_for_relation, parameter[name[node], constant[Theme2]]]
assert[compare[name[theme1] is_not constant[None]]]
assert[compare[name[theme2] is_not constant[None]]]
variable[evidence] assign[=] call[name[self].node_to_evidence, parameter[name[theme1_node]]]
call[name[statements].append, parameter[call[name[Complex], parameter[list[[<ast.Call object at 0x7da1b23467d0>, <ast.Call object at 0x7da1b23459c0>]]]]]]
return[name[statements]]
|
keyword[def] identifier[process_binding_statements] ( identifier[self] ):
literal[string]
identifier[G] = identifier[self] . identifier[G]
identifier[statements] =[]
identifier[binding_nodes] = identifier[self] . identifier[find_event_with_outgoing_edges] ( literal[string] ,
[ literal[string] ,
literal[string] ])
keyword[for] identifier[node] keyword[in] identifier[binding_nodes] :
identifier[theme1] = identifier[self] . identifier[get_entity_text_for_relation] ( identifier[node] , literal[string] )
identifier[theme1_node] = identifier[self] . identifier[get_related_node] ( identifier[node] , literal[string] )
identifier[theme2] = identifier[self] . identifier[get_entity_text_for_relation] ( identifier[node] , literal[string] )
keyword[assert] ( identifier[theme1] keyword[is] keyword[not] keyword[None] )
keyword[assert] ( identifier[theme2] keyword[is] keyword[not] keyword[None] )
identifier[evidence] = identifier[self] . identifier[node_to_evidence] ( identifier[theme1_node] , identifier[is_direct] = keyword[True] )
identifier[statements] . identifier[append] ( identifier[Complex] ([ identifier[s2a] ( identifier[theme1] ), identifier[s2a] ( identifier[theme2] )],
identifier[evidence] = identifier[evidence] ))
keyword[return] identifier[statements]
|
def process_binding_statements(self):
"""Looks for Binding events in the graph and extracts them into INDRA
statements.
In particular, looks for a Binding event node with outgoing edges
with relations Theme and Theme2 - the entities these edges point to
are the two constituents of the Complex INDRA statement.
"""
G = self.G
statements = []
binding_nodes = self.find_event_with_outgoing_edges('Binding', ['Theme', 'Theme2'])
for node in binding_nodes:
theme1 = self.get_entity_text_for_relation(node, 'Theme')
theme1_node = self.get_related_node(node, 'Theme')
theme2 = self.get_entity_text_for_relation(node, 'Theme2')
assert theme1 is not None
assert theme2 is not None
evidence = self.node_to_evidence(theme1_node, is_direct=True)
statements.append(Complex([s2a(theme1), s2a(theme2)], evidence=evidence)) # depends on [control=['for'], data=['node']]
return statements
|
def run_setup_error_group():
"""Run the phase group example where an error occurs in a setup phase.
The terminal setup phase shortcuts the test. The main phases are
skipped. The PhaseGroup is not entered, so the teardown phases are also
skipped.
"""
test = htf.Test(htf.PhaseGroup(
setup=[error_setup_phase],
main=[main_phase],
teardown=[teardown_phase],
))
test.execute()
|
def function[run_setup_error_group, parameter[]]:
constant[Run the phase group example where an error occurs in a setup phase.
The terminal setup phase shortcuts the test. The main phases are
skipped. The PhaseGroup is not entered, so the teardown phases are also
skipped.
]
variable[test] assign[=] call[name[htf].Test, parameter[call[name[htf].PhaseGroup, parameter[]]]]
call[name[test].execute, parameter[]]
|
keyword[def] identifier[run_setup_error_group] ():
literal[string]
identifier[test] = identifier[htf] . identifier[Test] ( identifier[htf] . identifier[PhaseGroup] (
identifier[setup] =[ identifier[error_setup_phase] ],
identifier[main] =[ identifier[main_phase] ],
identifier[teardown] =[ identifier[teardown_phase] ],
))
identifier[test] . identifier[execute] ()
|
def run_setup_error_group():
"""Run the phase group example where an error occurs in a setup phase.
The terminal setup phase shortcuts the test. The main phases are
skipped. The PhaseGroup is not entered, so the teardown phases are also
skipped.
"""
test = htf.Test(htf.PhaseGroup(setup=[error_setup_phase], main=[main_phase], teardown=[teardown_phase]))
test.execute()
|
def reconstitute_path(drive, folders):
"""Reverts a tuple from `get_path_components` into a path.
:param drive: A drive (eg 'c:'). Only applicable for NT systems
:param folders: A list of folder names
:return: A path comprising the drive and list of folder names. The path terminate
with a `os.path.sep` *only* if it is a root directory
"""
reconstituted = os.path.join(drive, os.path.sep, *folders)
return reconstituted
|
def function[reconstitute_path, parameter[drive, folders]]:
constant[Reverts a tuple from `get_path_components` into a path.
:param drive: A drive (eg 'c:'). Only applicable for NT systems
:param folders: A list of folder names
:return: A path comprising the drive and list of folder names. The path terminate
with a `os.path.sep` *only* if it is a root directory
]
variable[reconstituted] assign[=] call[name[os].path.join, parameter[name[drive], name[os].path.sep, <ast.Starred object at 0x7da1b0ae1660>]]
return[name[reconstituted]]
|
keyword[def] identifier[reconstitute_path] ( identifier[drive] , identifier[folders] ):
literal[string]
identifier[reconstituted] = identifier[os] . identifier[path] . identifier[join] ( identifier[drive] , identifier[os] . identifier[path] . identifier[sep] ,* identifier[folders] )
keyword[return] identifier[reconstituted]
|
def reconstitute_path(drive, folders):
"""Reverts a tuple from `get_path_components` into a path.
:param drive: A drive (eg 'c:'). Only applicable for NT systems
:param folders: A list of folder names
:return: A path comprising the drive and list of folder names. The path terminate
with a `os.path.sep` *only* if it is a root directory
"""
reconstituted = os.path.join(drive, os.path.sep, *folders)
return reconstituted
|
def store_transition(self, frame, action, reward, done, extra_info=None):
""" Store given transition in the backend """
self.current_idx = (self.current_idx + 1) % self.buffer_capacity
if self.frame_stack_compensation:
# Compensate for frame stack built into the environment
idx_range = np.arange(-frame.shape[-1] // self.frame_history, 0)
frame = np.take(frame, indices=idx_range, axis=-1)
self.state_buffer[self.current_idx] = frame
self.action_buffer[self.current_idx] = action
self.reward_buffer[self.current_idx] = reward
self.dones_buffer[self.current_idx] = done
if extra_info is not None:
for name in extra_info:
if name not in self.extra_data:
assert self.current_size == 0, f"New data {name} encountered in the middle of the training"
array = extra_info[name]
self.extra_data[name] = np.zeros([self.buffer_capacity] + list(array.shape), dtype=array.dtype)
self.extra_data[name][self.current_idx] = extra_info[name]
if self.current_size < self.buffer_capacity:
self.current_size += 1
return self.current_idx
|
def function[store_transition, parameter[self, frame, action, reward, done, extra_info]]:
constant[ Store given transition in the backend ]
name[self].current_idx assign[=] binary_operation[binary_operation[name[self].current_idx + constant[1]] <ast.Mod object at 0x7da2590d6920> name[self].buffer_capacity]
if name[self].frame_stack_compensation begin[:]
variable[idx_range] assign[=] call[name[np].arange, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b1602e60> <ast.FloorDiv object at 0x7da2590d6bc0> name[self].frame_history], constant[0]]]
variable[frame] assign[=] call[name[np].take, parameter[name[frame]]]
call[name[self].state_buffer][name[self].current_idx] assign[=] name[frame]
call[name[self].action_buffer][name[self].current_idx] assign[=] name[action]
call[name[self].reward_buffer][name[self].current_idx] assign[=] name[reward]
call[name[self].dones_buffer][name[self].current_idx] assign[=] name[done]
if compare[name[extra_info] is_not constant[None]] begin[:]
for taget[name[name]] in starred[name[extra_info]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].extra_data] begin[:]
assert[compare[name[self].current_size equal[==] constant[0]]]
variable[array] assign[=] call[name[extra_info]][name[name]]
call[name[self].extra_data][name[name]] assign[=] call[name[np].zeros, parameter[binary_operation[list[[<ast.Attribute object at 0x7da1b1630d00>]] + call[name[list], parameter[name[array].shape]]]]]
call[call[name[self].extra_data][name[name]]][name[self].current_idx] assign[=] call[name[extra_info]][name[name]]
if compare[name[self].current_size less[<] name[self].buffer_capacity] begin[:]
<ast.AugAssign object at 0x7da1b1631450>
return[name[self].current_idx]
|
keyword[def] identifier[store_transition] ( identifier[self] , identifier[frame] , identifier[action] , identifier[reward] , identifier[done] , identifier[extra_info] = keyword[None] ):
literal[string]
identifier[self] . identifier[current_idx] =( identifier[self] . identifier[current_idx] + literal[int] )% identifier[self] . identifier[buffer_capacity]
keyword[if] identifier[self] . identifier[frame_stack_compensation] :
identifier[idx_range] = identifier[np] . identifier[arange] (- identifier[frame] . identifier[shape] [- literal[int] ]// identifier[self] . identifier[frame_history] , literal[int] )
identifier[frame] = identifier[np] . identifier[take] ( identifier[frame] , identifier[indices] = identifier[idx_range] , identifier[axis] =- literal[int] )
identifier[self] . identifier[state_buffer] [ identifier[self] . identifier[current_idx] ]= identifier[frame]
identifier[self] . identifier[action_buffer] [ identifier[self] . identifier[current_idx] ]= identifier[action]
identifier[self] . identifier[reward_buffer] [ identifier[self] . identifier[current_idx] ]= identifier[reward]
identifier[self] . identifier[dones_buffer] [ identifier[self] . identifier[current_idx] ]= identifier[done]
keyword[if] identifier[extra_info] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[name] keyword[in] identifier[extra_info] :
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[extra_data] :
keyword[assert] identifier[self] . identifier[current_size] == literal[int] , literal[string]
identifier[array] = identifier[extra_info] [ identifier[name] ]
identifier[self] . identifier[extra_data] [ identifier[name] ]= identifier[np] . identifier[zeros] ([ identifier[self] . identifier[buffer_capacity] ]+ identifier[list] ( identifier[array] . identifier[shape] ), identifier[dtype] = identifier[array] . identifier[dtype] )
identifier[self] . identifier[extra_data] [ identifier[name] ][ identifier[self] . identifier[current_idx] ]= identifier[extra_info] [ identifier[name] ]
keyword[if] identifier[self] . identifier[current_size] < identifier[self] . identifier[buffer_capacity] :
identifier[self] . identifier[current_size] += literal[int]
keyword[return] identifier[self] . identifier[current_idx]
|
def store_transition(self, frame, action, reward, done, extra_info=None):
""" Store given transition in the backend """
self.current_idx = (self.current_idx + 1) % self.buffer_capacity
if self.frame_stack_compensation:
# Compensate for frame stack built into the environment
idx_range = np.arange(-frame.shape[-1] // self.frame_history, 0)
frame = np.take(frame, indices=idx_range, axis=-1) # depends on [control=['if'], data=[]]
self.state_buffer[self.current_idx] = frame
self.action_buffer[self.current_idx] = action
self.reward_buffer[self.current_idx] = reward
self.dones_buffer[self.current_idx] = done
if extra_info is not None:
for name in extra_info:
if name not in self.extra_data:
assert self.current_size == 0, f'New data {name} encountered in the middle of the training'
array = extra_info[name]
self.extra_data[name] = np.zeros([self.buffer_capacity] + list(array.shape), dtype=array.dtype) # depends on [control=['if'], data=['name']]
self.extra_data[name][self.current_idx] = extra_info[name] # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=['extra_info']]
if self.current_size < self.buffer_capacity:
self.current_size += 1 # depends on [control=['if'], data=[]]
return self.current_idx
|
def fix_timestamps(self, time_ref):
"""
manipulates internal time stamps such that the last run ends at time 0
"""
for k,v in self.data.items():
for kk, vv in v.time_stamps.items():
for kkk,vvv in vv.items():
self.data[k].time_stamps[kk][kkk] += time_ref
|
def function[fix_timestamps, parameter[self, time_ref]]:
constant[
manipulates internal time stamps such that the last run ends at time 0
]
for taget[tuple[[<ast.Name object at 0x7da1b1714f40>, <ast.Name object at 0x7da1b1715d80>]]] in starred[call[name[self].data.items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1715810>, <ast.Name object at 0x7da1b1716440>]]] in starred[call[name[v].time_stamps.items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1715cc0>, <ast.Name object at 0x7da1b1716c80>]]] in starred[call[name[vv].items, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b1716e90>
|
keyword[def] identifier[fix_timestamps] ( identifier[self] , identifier[time_ref] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[data] . identifier[items] ():
keyword[for] identifier[kk] , identifier[vv] keyword[in] identifier[v] . identifier[time_stamps] . identifier[items] ():
keyword[for] identifier[kkk] , identifier[vvv] keyword[in] identifier[vv] . identifier[items] ():
identifier[self] . identifier[data] [ identifier[k] ]. identifier[time_stamps] [ identifier[kk] ][ identifier[kkk] ]+= identifier[time_ref]
|
def fix_timestamps(self, time_ref):
"""
manipulates internal time stamps such that the last run ends at time 0
"""
for (k, v) in self.data.items():
for (kk, vv) in v.time_stamps.items():
for (kkk, vvv) in vv.items():
self.data[k].time_stamps[kk][kkk] += time_ref # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
|
def runSavedQuery(self, saved_query_obj, returned_properties=None):
"""Query workitems using the :class:`rtcclient.models.SavedQuery`
object
:param saved_query_obj: the :class:`rtcclient.models.SavedQuery`
object
:param returned_properties: the returned properties that you want.
Refer to :class:`rtcclient.client.RTCClient` for more explanations
:return: a :class:`list` that contains the queried
:class:`rtcclient.workitem.Workitem` objects
:rtype: list
"""
try:
saved_query_id = saved_query_obj.results.split("/")[-2]
except:
error_msg = "Cannot get the correct saved query id"
self.log.error(error_msg)
raise exception.RTCException(error_msg)
return self._runSavedQuery(saved_query_id,
returned_properties=returned_properties)
|
def function[runSavedQuery, parameter[self, saved_query_obj, returned_properties]]:
constant[Query workitems using the :class:`rtcclient.models.SavedQuery`
object
:param saved_query_obj: the :class:`rtcclient.models.SavedQuery`
object
:param returned_properties: the returned properties that you want.
Refer to :class:`rtcclient.client.RTCClient` for more explanations
:return: a :class:`list` that contains the queried
:class:`rtcclient.workitem.Workitem` objects
:rtype: list
]
<ast.Try object at 0x7da1b2828a00>
return[call[name[self]._runSavedQuery, parameter[name[saved_query_id]]]]
|
keyword[def] identifier[runSavedQuery] ( identifier[self] , identifier[saved_query_obj] , identifier[returned_properties] = keyword[None] ):
literal[string]
keyword[try] :
identifier[saved_query_id] = identifier[saved_query_obj] . identifier[results] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[except] :
identifier[error_msg] = literal[string]
identifier[self] . identifier[log] . identifier[error] ( identifier[error_msg] )
keyword[raise] identifier[exception] . identifier[RTCException] ( identifier[error_msg] )
keyword[return] identifier[self] . identifier[_runSavedQuery] ( identifier[saved_query_id] ,
identifier[returned_properties] = identifier[returned_properties] )
|
def runSavedQuery(self, saved_query_obj, returned_properties=None):
"""Query workitems using the :class:`rtcclient.models.SavedQuery`
object
:param saved_query_obj: the :class:`rtcclient.models.SavedQuery`
object
:param returned_properties: the returned properties that you want.
Refer to :class:`rtcclient.client.RTCClient` for more explanations
:return: a :class:`list` that contains the queried
:class:`rtcclient.workitem.Workitem` objects
:rtype: list
"""
try:
saved_query_id = saved_query_obj.results.split('/')[-2] # depends on [control=['try'], data=[]]
except:
error_msg = 'Cannot get the correct saved query id'
self.log.error(error_msg)
raise exception.RTCException(error_msg) # depends on [control=['except'], data=[]]
return self._runSavedQuery(saved_query_id, returned_properties=returned_properties)
|
def builtin_list():
"""Show a listing of all our builtin templates"""
for template in resource_listdir(__name__, "templates"):
builtin, ext = os.path.splitext(os.path.basename(abspath(template)))
if ext == '.yml':
continue
help_obj = load_template_help(builtin)
if 'name' in help_obj:
print("%-*s %s" % (20, builtin, help_obj['name']))
else:
print("%s" % builtin)
|
def function[builtin_list, parameter[]]:
constant[Show a listing of all our builtin templates]
for taget[name[template]] in starred[call[name[resource_listdir], parameter[name[__name__], constant[templates]]]] begin[:]
<ast.Tuple object at 0x7da1b26ac820> assign[=] call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[call[name[abspath], parameter[name[template]]]]]]]
if compare[name[ext] equal[==] constant[.yml]] begin[:]
continue
variable[help_obj] assign[=] call[name[load_template_help], parameter[name[builtin]]]
if compare[constant[name] in name[help_obj]] begin[:]
call[name[print], parameter[binary_operation[constant[%-*s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da20cabf190>, <ast.Name object at 0x7da20cabf400>, <ast.Subscript object at 0x7da20cabfd90>]]]]]
|
keyword[def] identifier[builtin_list] ():
literal[string]
keyword[for] identifier[template] keyword[in] identifier[resource_listdir] ( identifier[__name__] , literal[string] ):
identifier[builtin] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[abspath] ( identifier[template] )))
keyword[if] identifier[ext] == literal[string] :
keyword[continue]
identifier[help_obj] = identifier[load_template_help] ( identifier[builtin] )
keyword[if] literal[string] keyword[in] identifier[help_obj] :
identifier[print] ( literal[string] %( literal[int] , identifier[builtin] , identifier[help_obj] [ literal[string] ]))
keyword[else] :
identifier[print] ( literal[string] % identifier[builtin] )
|
def builtin_list():
"""Show a listing of all our builtin templates"""
for template in resource_listdir(__name__, 'templates'):
(builtin, ext) = os.path.splitext(os.path.basename(abspath(template)))
if ext == '.yml':
continue # depends on [control=['if'], data=[]]
help_obj = load_template_help(builtin)
if 'name' in help_obj:
print('%-*s %s' % (20, builtin, help_obj['name'])) # depends on [control=['if'], data=['help_obj']]
else:
print('%s' % builtin) # depends on [control=['for'], data=['template']]
|
def _execute(self, sender, event_args):
'''
Event handler for timer that processes all queued messages.
'''
with self._lock:
while not self._messages.empty():
msg, args, kwargs = self._messages.get(False)
for subscriber in self._subscribers[msg]:
try:
subscriber(*args, **kwargs)
except weakref.ReferenceError:
# Reference to handler is lost and it is OK to silence it
pass
|
def function[_execute, parameter[self, sender, event_args]]:
constant[
Event handler for timer that processes all queued messages.
]
with name[self]._lock begin[:]
while <ast.UnaryOp object at 0x7da18bccae90> begin[:]
<ast.Tuple object at 0x7da204347a00> assign[=] call[name[self]._messages.get, parameter[constant[False]]]
for taget[name[subscriber]] in starred[call[name[self]._subscribers][name[msg]]] begin[:]
<ast.Try object at 0x7da204347af0>
|
keyword[def] identifier[_execute] ( identifier[self] , identifier[sender] , identifier[event_args] ):
literal[string]
keyword[with] identifier[self] . identifier[_lock] :
keyword[while] keyword[not] identifier[self] . identifier[_messages] . identifier[empty] ():
identifier[msg] , identifier[args] , identifier[kwargs] = identifier[self] . identifier[_messages] . identifier[get] ( keyword[False] )
keyword[for] identifier[subscriber] keyword[in] identifier[self] . identifier[_subscribers] [ identifier[msg] ]:
keyword[try] :
identifier[subscriber] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[weakref] . identifier[ReferenceError] :
keyword[pass]
|
def _execute(self, sender, event_args):
"""
Event handler for timer that processes all queued messages.
"""
with self._lock:
while not self._messages.empty():
(msg, args, kwargs) = self._messages.get(False)
for subscriber in self._subscribers[msg]:
try:
subscriber(*args, **kwargs) # depends on [control=['try'], data=[]]
except weakref.ReferenceError:
# Reference to handler is lost and it is OK to silence it
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['subscriber']] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]]
|
def get_colour(self, val, colformat='hex'):
""" Given a value, return a colour within the colour scale """
try:
# Sanity checks
val = re.sub("[^0-9\.]", "", str(val))
if val == '':
val = self.minval
val = float(val)
val = max(val, self.minval)
val = min(val, self.maxval)
domain_nums = list( np.linspace(self.minval, self.maxval, len(self.colours)) )
my_scale = spectra.scale(self.colours).domain(domain_nums)
# Weird, I know. I ported this from the original JavaScript for continuity
# Seems to work better than adjusting brightness / saturation / luminosity
rgb_converter = lambda x: max(0, min(1, 1+((x-1)*0.3)))
thecolour = spectra.rgb( *[rgb_converter(v) for v in my_scale(val).rgb] )
return thecolour.hexcode
except:
# Shouldn't crash all of MultiQC just for colours
return ''
|
def function[get_colour, parameter[self, val, colformat]]:
constant[ Given a value, return a colour within the colour scale ]
<ast.Try object at 0x7da18eb57e80>
|
keyword[def] identifier[get_colour] ( identifier[self] , identifier[val] , identifier[colformat] = literal[string] ):
literal[string]
keyword[try] :
identifier[val] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[str] ( identifier[val] ))
keyword[if] identifier[val] == literal[string] :
identifier[val] = identifier[self] . identifier[minval]
identifier[val] = identifier[float] ( identifier[val] )
identifier[val] = identifier[max] ( identifier[val] , identifier[self] . identifier[minval] )
identifier[val] = identifier[min] ( identifier[val] , identifier[self] . identifier[maxval] )
identifier[domain_nums] = identifier[list] ( identifier[np] . identifier[linspace] ( identifier[self] . identifier[minval] , identifier[self] . identifier[maxval] , identifier[len] ( identifier[self] . identifier[colours] )))
identifier[my_scale] = identifier[spectra] . identifier[scale] ( identifier[self] . identifier[colours] ). identifier[domain] ( identifier[domain_nums] )
identifier[rgb_converter] = keyword[lambda] identifier[x] : identifier[max] ( literal[int] , identifier[min] ( literal[int] , literal[int] +(( identifier[x] - literal[int] )* literal[int] )))
identifier[thecolour] = identifier[spectra] . identifier[rgb] (*[ identifier[rgb_converter] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[my_scale] ( identifier[val] ). identifier[rgb] ])
keyword[return] identifier[thecolour] . identifier[hexcode]
keyword[except] :
keyword[return] literal[string]
|
def get_colour(self, val, colformat='hex'):
""" Given a value, return a colour within the colour scale """
try: # Sanity checks
val = re.sub('[^0-9\\.]', '', str(val))
if val == '':
val = self.minval # depends on [control=['if'], data=['val']]
val = float(val)
val = max(val, self.minval)
val = min(val, self.maxval)
domain_nums = list(np.linspace(self.minval, self.maxval, len(self.colours)))
my_scale = spectra.scale(self.colours).domain(domain_nums) # Weird, I know. I ported this from the original JavaScript for continuity
# Seems to work better than adjusting brightness / saturation / luminosity
rgb_converter = lambda x: max(0, min(1, 1 + (x - 1) * 0.3))
thecolour = spectra.rgb(*[rgb_converter(v) for v in my_scale(val).rgb])
return thecolour.hexcode # depends on [control=['try'], data=[]]
except: # Shouldn't crash all of MultiQC just for colours
return '' # depends on [control=['except'], data=[]]
|
def do_clearaccess(self, line):
"""clearaccess Remove all subjects from access policy Only the submitter will
have access to the object."""
self._split_args(line, 0, 0)
self._command_processor.get_session().get_access_control().clear()
self._print_info_if_verbose("Removed all subjects from access policy")
|
def function[do_clearaccess, parameter[self, line]]:
constant[clearaccess Remove all subjects from access policy Only the submitter will
have access to the object.]
call[name[self]._split_args, parameter[name[line], constant[0], constant[0]]]
call[call[call[name[self]._command_processor.get_session, parameter[]].get_access_control, parameter[]].clear, parameter[]]
call[name[self]._print_info_if_verbose, parameter[constant[Removed all subjects from access policy]]]
|
keyword[def] identifier[do_clearaccess] ( identifier[self] , identifier[line] ):
literal[string]
identifier[self] . identifier[_split_args] ( identifier[line] , literal[int] , literal[int] )
identifier[self] . identifier[_command_processor] . identifier[get_session] (). identifier[get_access_control] (). identifier[clear] ()
identifier[self] . identifier[_print_info_if_verbose] ( literal[string] )
|
def do_clearaccess(self, line):
"""clearaccess Remove all subjects from access policy Only the submitter will
have access to the object."""
self._split_args(line, 0, 0)
self._command_processor.get_session().get_access_control().clear()
self._print_info_if_verbose('Removed all subjects from access policy')
|
def write_sample_sheet(output_file, accessions, names, celfile_urls, sel=None):
"""Generate a sample sheet in tab-separated text format.
The columns contain the following sample attributes:
1) accession
2) name
3) CEL file name
4) CEL file URL
Parameters
----------
output_file: str
The path of the output file.
accessions: list or tuple of str
The sample accessions.
names: list or tuple of str
The sample names.
celfile_urls: list or tuple of str
The sample CEL file URLs.
sel: Iterable, optional
A list of sample indices to include. If None, all samples are included.
[None]
Returns
-------
None
"""
assert isinstance(output_file, str)
assert isinstance(accessions, (list, tuple))
for acc in accessions:
assert isinstance(acc, str)
assert isinstance(names, (list, tuple))
for n in names:
assert isinstance(n, str)
assert isinstance(celfile_urls, (list, tuple))
for u in celfile_urls:
assert isinstance(u, str)
if sel is not None:
assert isinstance(sel, Iterable)
for i in sel:
assert isinstance(i, (int, np.integer))
with open(output_file, 'wb') as ofh:
writer = csv.writer(ofh, dialect='excel-tab',
lineterminator=os.linesep,
quoting=csv.QUOTE_NONE)
# write header
writer.writerow(['Accession', 'Name', 'CEL file name', 'CEL file URL'])
n = len(list(names))
if sel is None:
sel = range(n)
for i in sel:
cf = celfile_urls[i].split('/')[-1]
# row = [accessions[i], names[i], cf, celfile_urls[i]]
writer.writerow([accessions[i], names[i], cf, celfile_urls[i]])
|
def function[write_sample_sheet, parameter[output_file, accessions, names, celfile_urls, sel]]:
constant[Generate a sample sheet in tab-separated text format.
The columns contain the following sample attributes:
1) accession
2) name
3) CEL file name
4) CEL file URL
Parameters
----------
output_file: str
The path of the output file.
accessions: list or tuple of str
The sample accessions.
names: list or tuple of str
The sample names.
celfile_urls: list or tuple of str
The sample CEL file URLs.
sel: Iterable, optional
A list of sample indices to include. If None, all samples are included.
[None]
Returns
-------
None
]
assert[call[name[isinstance], parameter[name[output_file], name[str]]]]
assert[call[name[isinstance], parameter[name[accessions], tuple[[<ast.Name object at 0x7da2041dbb50>, <ast.Name object at 0x7da2041d96c0>]]]]]
for taget[name[acc]] in starred[name[accessions]] begin[:]
assert[call[name[isinstance], parameter[name[acc], name[str]]]]
assert[call[name[isinstance], parameter[name[names], tuple[[<ast.Name object at 0x7da2041db8e0>, <ast.Name object at 0x7da2041da5f0>]]]]]
for taget[name[n]] in starred[name[names]] begin[:]
assert[call[name[isinstance], parameter[name[n], name[str]]]]
assert[call[name[isinstance], parameter[name[celfile_urls], tuple[[<ast.Name object at 0x7da2041db3d0>, <ast.Name object at 0x7da2041dbaf0>]]]]]
for taget[name[u]] in starred[name[celfile_urls]] begin[:]
assert[call[name[isinstance], parameter[name[u], name[str]]]]
if compare[name[sel] is_not constant[None]] begin[:]
assert[call[name[isinstance], parameter[name[sel], name[Iterable]]]]
for taget[name[i]] in starred[name[sel]] begin[:]
assert[call[name[isinstance], parameter[name[i], tuple[[<ast.Name object at 0x7da2041db850>, <ast.Attribute object at 0x7da2041dbe20>]]]]]
with call[name[open], parameter[name[output_file], constant[wb]]] begin[:]
variable[writer] assign[=] call[name[csv].writer, parameter[name[ofh]]]
call[name[writer].writerow, parameter[list[[<ast.Constant object at 0x7da2041d9f60>, <ast.Constant object at 0x7da2041da080>, <ast.Constant object at 0x7da2041dab30>, <ast.Constant object at 0x7da2041daa70>]]]]
variable[n] assign[=] call[name[len], parameter[call[name[list], parameter[name[names]]]]]
if compare[name[sel] is constant[None]] begin[:]
variable[sel] assign[=] call[name[range], parameter[name[n]]]
for taget[name[i]] in starred[name[sel]] begin[:]
variable[cf] assign[=] call[call[call[name[celfile_urls]][name[i]].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da2041dbdf0>]
call[name[writer].writerow, parameter[list[[<ast.Subscript object at 0x7da2041dbbb0>, <ast.Subscript object at 0x7da2041d9150>, <ast.Name object at 0x7da2041d9750>, <ast.Subscript object at 0x7da2041da530>]]]]
|
keyword[def] identifier[write_sample_sheet] ( identifier[output_file] , identifier[accessions] , identifier[names] , identifier[celfile_urls] , identifier[sel] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[output_file] , identifier[str] )
keyword[assert] identifier[isinstance] ( identifier[accessions] ,( identifier[list] , identifier[tuple] ))
keyword[for] identifier[acc] keyword[in] identifier[accessions] :
keyword[assert] identifier[isinstance] ( identifier[acc] , identifier[str] )
keyword[assert] identifier[isinstance] ( identifier[names] ,( identifier[list] , identifier[tuple] ))
keyword[for] identifier[n] keyword[in] identifier[names] :
keyword[assert] identifier[isinstance] ( identifier[n] , identifier[str] )
keyword[assert] identifier[isinstance] ( identifier[celfile_urls] ,( identifier[list] , identifier[tuple] ))
keyword[for] identifier[u] keyword[in] identifier[celfile_urls] :
keyword[assert] identifier[isinstance] ( identifier[u] , identifier[str] )
keyword[if] identifier[sel] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[isinstance] ( identifier[sel] , identifier[Iterable] )
keyword[for] identifier[i] keyword[in] identifier[sel] :
keyword[assert] identifier[isinstance] ( identifier[i] ,( identifier[int] , identifier[np] . identifier[integer] ))
keyword[with] identifier[open] ( identifier[output_file] , literal[string] ) keyword[as] identifier[ofh] :
identifier[writer] = identifier[csv] . identifier[writer] ( identifier[ofh] , identifier[dialect] = literal[string] ,
identifier[lineterminator] = identifier[os] . identifier[linesep] ,
identifier[quoting] = identifier[csv] . identifier[QUOTE_NONE] )
identifier[writer] . identifier[writerow] ([ literal[string] , literal[string] , literal[string] , literal[string] ])
identifier[n] = identifier[len] ( identifier[list] ( identifier[names] ))
keyword[if] identifier[sel] keyword[is] keyword[None] :
identifier[sel] = identifier[range] ( identifier[n] )
keyword[for] identifier[i] keyword[in] identifier[sel] :
identifier[cf] = identifier[celfile_urls] [ identifier[i] ]. identifier[split] ( literal[string] )[- literal[int] ]
identifier[writer] . identifier[writerow] ([ identifier[accessions] [ identifier[i] ], identifier[names] [ identifier[i] ], identifier[cf] , identifier[celfile_urls] [ identifier[i] ]])
|
def write_sample_sheet(output_file, accessions, names, celfile_urls, sel=None):
"""Generate a sample sheet in tab-separated text format.
The columns contain the following sample attributes:
1) accession
2) name
3) CEL file name
4) CEL file URL
Parameters
----------
output_file: str
The path of the output file.
accessions: list or tuple of str
The sample accessions.
names: list or tuple of str
The sample names.
celfile_urls: list or tuple of str
The sample CEL file URLs.
sel: Iterable, optional
A list of sample indices to include. If None, all samples are included.
[None]
Returns
-------
None
"""
assert isinstance(output_file, str)
assert isinstance(accessions, (list, tuple))
for acc in accessions:
assert isinstance(acc, str) # depends on [control=['for'], data=['acc']]
assert isinstance(names, (list, tuple))
for n in names:
assert isinstance(n, str) # depends on [control=['for'], data=['n']]
assert isinstance(celfile_urls, (list, tuple))
for u in celfile_urls:
assert isinstance(u, str) # depends on [control=['for'], data=['u']]
if sel is not None:
assert isinstance(sel, Iterable)
for i in sel:
assert isinstance(i, (int, np.integer)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['sel']]
with open(output_file, 'wb') as ofh:
writer = csv.writer(ofh, dialect='excel-tab', lineterminator=os.linesep, quoting=csv.QUOTE_NONE)
# write header
writer.writerow(['Accession', 'Name', 'CEL file name', 'CEL file URL'])
n = len(list(names))
if sel is None:
sel = range(n) # depends on [control=['if'], data=['sel']]
for i in sel:
cf = celfile_urls[i].split('/')[-1]
# row = [accessions[i], names[i], cf, celfile_urls[i]]
writer.writerow([accessions[i], names[i], cf, celfile_urls[i]]) # depends on [control=['for'], data=['i']] # depends on [control=['with'], data=['ofh']]
|
def covar(self, x, y, binby=[], limits=None, shape=default_shape, selection=False, delay=False, progress=None):
"""Calculate the covariance cov[x,y] between and x and y, possibly on a grid defined by binby.
Example:
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)")
array(52.69461456005138)
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)")/(df.std("x**2+y**2+z**2") * df.std("-log(-E+1)"))
0.63666373822156686
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)", binby="Lz", shape=4)
array([ 10.17387143, 51.94954078, 51.24902796, 20.2163929 ])
:param x: {expression}
:param y: {expression}
:param binby: {binby}
:param limits: {limits}
:param shape: {shape}
:param selection: {selection}
:param delay: {delay}
:param progress: {progress}
:return: {return_stat_scalar}
"""
@delayed
def cov(mean_x, mean_y, mean_xy):
return mean_xy - mean_x * mean_y
waslist, [xlist, ylist] = vaex.utils.listify(x, y)
# print("limits", limits)
limits = self.limits(binby, limits, selection=selection, delay=True)
# print("limits", limits)
@delayed
def calculate(limits):
results = []
for x, y in zip(xlist, ylist):
mx = self.mean(x, binby=binby, limits=limits, shape=shape, selection=selection, delay=True, progress=progressbar)
my = self.mean(y, binby=binby, limits=limits, shape=shape, selection=selection, delay=True, progress=progressbar)
cxy = self.mean("(%s)*(%s)" % (x, y), binby=binby, limits=limits, shape=shape, selection=selection,
delay=True, progress=progressbar)
results.append(cov(mx, my, cxy))
return results
progressbar = vaex.utils.progressbars(progress)
covars = calculate(limits)
@delayed
def finish(covars):
value = np.array(vaex.utils.unlistify(waslist, covars))
return value
return self._delay(delay, finish(delayed_list(covars)))
|
def function[covar, parameter[self, x, y, binby, limits, shape, selection, delay, progress]]:
constant[Calculate the covariance cov[x,y] between and x and y, possibly on a grid defined by binby.
Example:
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)")
array(52.69461456005138)
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)")/(df.std("x**2+y**2+z**2") * df.std("-log(-E+1)"))
0.63666373822156686
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)", binby="Lz", shape=4)
array([ 10.17387143, 51.94954078, 51.24902796, 20.2163929 ])
:param x: {expression}
:param y: {expression}
:param binby: {binby}
:param limits: {limits}
:param shape: {shape}
:param selection: {selection}
:param delay: {delay}
:param progress: {progress}
:return: {return_stat_scalar}
]
def function[cov, parameter[mean_x, mean_y, mean_xy]]:
return[binary_operation[name[mean_xy] - binary_operation[name[mean_x] * name[mean_y]]]]
<ast.Tuple object at 0x7da2047eb370> assign[=] call[name[vaex].utils.listify, parameter[name[x], name[y]]]
variable[limits] assign[=] call[name[self].limits, parameter[name[binby], name[limits]]]
def function[calculate, parameter[limits]]:
variable[results] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2047ebb20>, <ast.Name object at 0x7da2047e98a0>]]] in starred[call[name[zip], parameter[name[xlist], name[ylist]]]] begin[:]
variable[mx] assign[=] call[name[self].mean, parameter[name[x]]]
variable[my] assign[=] call[name[self].mean, parameter[name[y]]]
variable[cxy] assign[=] call[name[self].mean, parameter[binary_operation[constant[(%s)*(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2047e9ff0>, <ast.Name object at 0x7da2047e97b0>]]]]]
call[name[results].append, parameter[call[name[cov], parameter[name[mx], name[my], name[cxy]]]]]
return[name[results]]
variable[progressbar] assign[=] call[name[vaex].utils.progressbars, parameter[name[progress]]]
variable[covars] assign[=] call[name[calculate], parameter[name[limits]]]
def function[finish, parameter[covars]]:
variable[value] assign[=] call[name[np].array, parameter[call[name[vaex].utils.unlistify, parameter[name[waslist], name[covars]]]]]
return[name[value]]
return[call[name[self]._delay, parameter[name[delay], call[name[finish], parameter[call[name[delayed_list], parameter[name[covars]]]]]]]]
|
keyword[def] identifier[covar] ( identifier[self] , identifier[x] , identifier[y] , identifier[binby] =[], identifier[limits] = keyword[None] , identifier[shape] = identifier[default_shape] , identifier[selection] = keyword[False] , identifier[delay] = keyword[False] , identifier[progress] = keyword[None] ):
literal[string]
@ identifier[delayed]
keyword[def] identifier[cov] ( identifier[mean_x] , identifier[mean_y] , identifier[mean_xy] ):
keyword[return] identifier[mean_xy] - identifier[mean_x] * identifier[mean_y]
identifier[waslist] ,[ identifier[xlist] , identifier[ylist] ]= identifier[vaex] . identifier[utils] . identifier[listify] ( identifier[x] , identifier[y] )
identifier[limits] = identifier[self] . identifier[limits] ( identifier[binby] , identifier[limits] , identifier[selection] = identifier[selection] , identifier[delay] = keyword[True] )
@ identifier[delayed]
keyword[def] identifier[calculate] ( identifier[limits] ):
identifier[results] =[]
keyword[for] identifier[x] , identifier[y] keyword[in] identifier[zip] ( identifier[xlist] , identifier[ylist] ):
identifier[mx] = identifier[self] . identifier[mean] ( identifier[x] , identifier[binby] = identifier[binby] , identifier[limits] = identifier[limits] , identifier[shape] = identifier[shape] , identifier[selection] = identifier[selection] , identifier[delay] = keyword[True] , identifier[progress] = identifier[progressbar] )
identifier[my] = identifier[self] . identifier[mean] ( identifier[y] , identifier[binby] = identifier[binby] , identifier[limits] = identifier[limits] , identifier[shape] = identifier[shape] , identifier[selection] = identifier[selection] , identifier[delay] = keyword[True] , identifier[progress] = identifier[progressbar] )
identifier[cxy] = identifier[self] . identifier[mean] ( literal[string] %( identifier[x] , identifier[y] ), identifier[binby] = identifier[binby] , identifier[limits] = identifier[limits] , identifier[shape] = identifier[shape] , identifier[selection] = identifier[selection] ,
identifier[delay] = keyword[True] , identifier[progress] = identifier[progressbar] )
identifier[results] . identifier[append] ( identifier[cov] ( identifier[mx] , identifier[my] , identifier[cxy] ))
keyword[return] identifier[results]
identifier[progressbar] = identifier[vaex] . identifier[utils] . identifier[progressbars] ( identifier[progress] )
identifier[covars] = identifier[calculate] ( identifier[limits] )
@ identifier[delayed]
keyword[def] identifier[finish] ( identifier[covars] ):
identifier[value] = identifier[np] . identifier[array] ( identifier[vaex] . identifier[utils] . identifier[unlistify] ( identifier[waslist] , identifier[covars] ))
keyword[return] identifier[value]
keyword[return] identifier[self] . identifier[_delay] ( identifier[delay] , identifier[finish] ( identifier[delayed_list] ( identifier[covars] )))
|
def covar(self, x, y, binby=[], limits=None, shape=default_shape, selection=False, delay=False, progress=None):
"""Calculate the covariance cov[x,y] between and x and y, possibly on a grid defined by binby.
Example:
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)")
array(52.69461456005138)
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)")/(df.std("x**2+y**2+z**2") * df.std("-log(-E+1)"))
0.63666373822156686
>>> df.covar("x**2+y**2+z**2", "-log(-E+1)", binby="Lz", shape=4)
array([ 10.17387143, 51.94954078, 51.24902796, 20.2163929 ])
:param x: {expression}
:param y: {expression}
:param binby: {binby}
:param limits: {limits}
:param shape: {shape}
:param selection: {selection}
:param delay: {delay}
:param progress: {progress}
:return: {return_stat_scalar}
"""
@delayed
def cov(mean_x, mean_y, mean_xy):
return mean_xy - mean_x * mean_y
(waslist, [xlist, ylist]) = vaex.utils.listify(x, y)
# print("limits", limits)
limits = self.limits(binby, limits, selection=selection, delay=True)
# print("limits", limits)
@delayed
def calculate(limits):
results = []
for (x, y) in zip(xlist, ylist):
mx = self.mean(x, binby=binby, limits=limits, shape=shape, selection=selection, delay=True, progress=progressbar)
my = self.mean(y, binby=binby, limits=limits, shape=shape, selection=selection, delay=True, progress=progressbar)
cxy = self.mean('(%s)*(%s)' % (x, y), binby=binby, limits=limits, shape=shape, selection=selection, delay=True, progress=progressbar)
results.append(cov(mx, my, cxy)) # depends on [control=['for'], data=[]]
return results
progressbar = vaex.utils.progressbars(progress)
covars = calculate(limits)
@delayed
def finish(covars):
value = np.array(vaex.utils.unlistify(waslist, covars))
return value
return self._delay(delay, finish(delayed_list(covars)))
|
def experiments_predictions_image_set_create(self, experiment_id, run_id, filename):
"""Create a prediction image set from a given tar archive that was
produced as the result of a successful model run.
Returns None if the specified model run does not exist or did not
finish successfully. Raises a ValueError if the given file is invalid or
model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
filename : string
Path to uploaded image set archive file
Returns
-------
PredictionImageSetHandle
Handle for new prediction image set collection
"""
# Ensure that the model run exists and is in state SUCCESS
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
if not model_run.state.is_success:
raise ValueError('invalid run state: ' + str(model_run.state))
# Check if the file is a valid tar archive (based on suffix).
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
if suffix is None:
# Not a valid file suffix
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename)))
# Unpack the file to a temporary folder .
temp_dir = tempfile.mkdtemp()
try:
tf = tarfile.open(name=filename, mode='r')
tf.extractall(path=temp_dir)
except (tarfile.ReadError, IOError) as err:
# Clean up in case there is an error during extraction
shutil.rmtree(temp_dir)
raise ValueError(str(err))
# The list of prediction image sets
image_sets = []
# Parse the CSV file. For each image file use:
# img_obj = self.images.create_object(img_filename)
# to create an image file object in the database.
# Use file name as default object name
name = os.path.basename(os.path.normpath(filename))[:-len(suffix)]
# Create prediction image set
img_set = self.prediction_images.create_object(name, image_sets)
# Delete the temporary folder
shutil.rmtree(temp_dir)
return img_set
|
def function[experiments_predictions_image_set_create, parameter[self, experiment_id, run_id, filename]]:
constant[Create a prediction image set from a given tar archive that was
produced as the result of a successful model run.
Returns None if the specified model run does not exist or did not
finish successfully. Raises a ValueError if the given file is invalid or
model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
filename : string
Path to uploaded image set archive file
Returns
-------
PredictionImageSetHandle
Handle for new prediction image set collection
]
variable[model_run] assign[=] call[name[self].experiments_predictions_get, parameter[name[experiment_id], name[run_id]]]
if compare[name[model_run] is constant[None]] begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da2054a7d00> begin[:]
<ast.Raise object at 0x7da2054a4310>
variable[suffix] assign[=] call[name[get_filename_suffix], parameter[name[filename], name[ARCHIVE_SUFFIXES]]]
if compare[name[suffix] is constant[None]] begin[:]
<ast.Raise object at 0x7da2054a5e40>
variable[temp_dir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
<ast.Try object at 0x7da2054a6ec0>
variable[image_sets] assign[=] list[[]]
variable[name] assign[=] call[call[name[os].path.basename, parameter[call[name[os].path.normpath, parameter[name[filename]]]]]][<ast.Slice object at 0x7da2054a7970>]
variable[img_set] assign[=] call[name[self].prediction_images.create_object, parameter[name[name], name[image_sets]]]
call[name[shutil].rmtree, parameter[name[temp_dir]]]
return[name[img_set]]
|
keyword[def] identifier[experiments_predictions_image_set_create] ( identifier[self] , identifier[experiment_id] , identifier[run_id] , identifier[filename] ):
literal[string]
identifier[model_run] = identifier[self] . identifier[experiments_predictions_get] ( identifier[experiment_id] , identifier[run_id] )
keyword[if] identifier[model_run] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[model_run] . identifier[state] . identifier[is_success] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[model_run] . identifier[state] ))
identifier[suffix] = identifier[get_filename_suffix] ( identifier[filename] , identifier[ARCHIVE_SUFFIXES] )
keyword[if] identifier[suffix] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[normpath] ( identifier[filename] )))
identifier[temp_dir] = identifier[tempfile] . identifier[mkdtemp] ()
keyword[try] :
identifier[tf] = identifier[tarfile] . identifier[open] ( identifier[name] = identifier[filename] , identifier[mode] = literal[string] )
identifier[tf] . identifier[extractall] ( identifier[path] = identifier[temp_dir] )
keyword[except] ( identifier[tarfile] . identifier[ReadError] , identifier[IOError] ) keyword[as] identifier[err] :
identifier[shutil] . identifier[rmtree] ( identifier[temp_dir] )
keyword[raise] identifier[ValueError] ( identifier[str] ( identifier[err] ))
identifier[image_sets] =[]
identifier[name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[normpath] ( identifier[filename] ))[:- identifier[len] ( identifier[suffix] )]
identifier[img_set] = identifier[self] . identifier[prediction_images] . identifier[create_object] ( identifier[name] , identifier[image_sets] )
identifier[shutil] . identifier[rmtree] ( identifier[temp_dir] )
keyword[return] identifier[img_set]
|
def experiments_predictions_image_set_create(self, experiment_id, run_id, filename):
"""Create a prediction image set from a given tar archive that was
produced as the result of a successful model run.
Returns None if the specified model run does not exist or did not
finish successfully. Raises a ValueError if the given file is invalid or
model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
filename : string
Path to uploaded image set archive file
Returns
-------
PredictionImageSetHandle
Handle for new prediction image set collection
"""
# Ensure that the model run exists and is in state SUCCESS
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None # depends on [control=['if'], data=[]]
if not model_run.state.is_success:
raise ValueError('invalid run state: ' + str(model_run.state)) # depends on [control=['if'], data=[]]
# Check if the file is a valid tar archive (based on suffix).
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
if suffix is None:
# Not a valid file suffix
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename))) # depends on [control=['if'], data=[]]
# Unpack the file to a temporary folder .
temp_dir = tempfile.mkdtemp()
try:
tf = tarfile.open(name=filename, mode='r')
tf.extractall(path=temp_dir) # depends on [control=['try'], data=[]]
except (tarfile.ReadError, IOError) as err:
# Clean up in case there is an error during extraction
shutil.rmtree(temp_dir)
raise ValueError(str(err)) # depends on [control=['except'], data=['err']]
# The list of prediction image sets
image_sets = []
# Parse the CSV file. For each image file use:
# img_obj = self.images.create_object(img_filename)
# to create an image file object in the database.
# Use file name as default object name
name = os.path.basename(os.path.normpath(filename))[:-len(suffix)]
# Create prediction image set
img_set = self.prediction_images.create_object(name, image_sets)
# Delete the temporary folder
shutil.rmtree(temp_dir)
return img_set
|
def summarize_tensors(tensor_dict, tag=None):
"""Summarize the tensors.
Args:
tensor_dict: a dictionary of tensors.
tag: name scope of the summary; defaults to tensors/.
"""
if tag is None:
tag = "tensors/"
for t_name in list(tensor_dict):
t = tensor_dict[t_name]
tf.summary.histogram(tag + t_name, t)
|
def function[summarize_tensors, parameter[tensor_dict, tag]]:
constant[Summarize the tensors.
Args:
tensor_dict: a dictionary of tensors.
tag: name scope of the summary; defaults to tensors/.
]
if compare[name[tag] is constant[None]] begin[:]
variable[tag] assign[=] constant[tensors/]
for taget[name[t_name]] in starred[call[name[list], parameter[name[tensor_dict]]]] begin[:]
variable[t] assign[=] call[name[tensor_dict]][name[t_name]]
call[name[tf].summary.histogram, parameter[binary_operation[name[tag] + name[t_name]], name[t]]]
|
keyword[def] identifier[summarize_tensors] ( identifier[tensor_dict] , identifier[tag] = keyword[None] ):
literal[string]
keyword[if] identifier[tag] keyword[is] keyword[None] :
identifier[tag] = literal[string]
keyword[for] identifier[t_name] keyword[in] identifier[list] ( identifier[tensor_dict] ):
identifier[t] = identifier[tensor_dict] [ identifier[t_name] ]
identifier[tf] . identifier[summary] . identifier[histogram] ( identifier[tag] + identifier[t_name] , identifier[t] )
|
def summarize_tensors(tensor_dict, tag=None):
"""Summarize the tensors.
Args:
tensor_dict: a dictionary of tensors.
tag: name scope of the summary; defaults to tensors/.
"""
if tag is None:
tag = 'tensors/' # depends on [control=['if'], data=['tag']]
for t_name in list(tensor_dict):
t = tensor_dict[t_name]
tf.summary.histogram(tag + t_name, t) # depends on [control=['for'], data=['t_name']]
|
def default_start_index(self, value):
"""Validate and set the default start index."""
if not isinstance(value, int):
raise TypeError('default_start_index attribute must be of int '
'type.')
self._default_start_index = value
|
def function[default_start_index, parameter[self, value]]:
constant[Validate and set the default start index.]
if <ast.UnaryOp object at 0x7da1b0359270> begin[:]
<ast.Raise object at 0x7da1b0359e10>
name[self]._default_start_index assign[=] name[value]
|
keyword[def] identifier[default_start_index] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[int] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] )
identifier[self] . identifier[_default_start_index] = identifier[value]
|
def default_start_index(self, value):
"""Validate and set the default start index."""
if not isinstance(value, int):
raise TypeError('default_start_index attribute must be of int type.') # depends on [control=['if'], data=[]]
self._default_start_index = value
|
def build(self, jokers=False, num_jokers=0):
"""
Builds a standard 52 card French deck of Card instances.
:arg bool jokers:
Whether or not to include jokers in the deck.
:arg int num_jokers:
The number of jokers to include.
"""
jokers = jokers or self.jokers
num_jokers = num_jokers or self.num_jokers
self.decks_used += 1
self.cards += build_cards(jokers, num_jokers)
|
def function[build, parameter[self, jokers, num_jokers]]:
constant[
Builds a standard 52 card French deck of Card instances.
:arg bool jokers:
Whether or not to include jokers in the deck.
:arg int num_jokers:
The number of jokers to include.
]
variable[jokers] assign[=] <ast.BoolOp object at 0x7da1b265a9b0>
variable[num_jokers] assign[=] <ast.BoolOp object at 0x7da1b265b130>
<ast.AugAssign object at 0x7da1b265ac80>
<ast.AugAssign object at 0x7da1b265a3b0>
|
keyword[def] identifier[build] ( identifier[self] , identifier[jokers] = keyword[False] , identifier[num_jokers] = literal[int] ):
literal[string]
identifier[jokers] = identifier[jokers] keyword[or] identifier[self] . identifier[jokers]
identifier[num_jokers] = identifier[num_jokers] keyword[or] identifier[self] . identifier[num_jokers]
identifier[self] . identifier[decks_used] += literal[int]
identifier[self] . identifier[cards] += identifier[build_cards] ( identifier[jokers] , identifier[num_jokers] )
|
def build(self, jokers=False, num_jokers=0):
"""
Builds a standard 52 card French deck of Card instances.
:arg bool jokers:
Whether or not to include jokers in the deck.
:arg int num_jokers:
The number of jokers to include.
"""
jokers = jokers or self.jokers
num_jokers = num_jokers or self.num_jokers
self.decks_used += 1
self.cards += build_cards(jokers, num_jokers)
|
def set(self, id, translation, domain='messages'):
"""
Sets a message translation.
"""
assert isinstance(id, (str, unicode))
assert isinstance(translation, (str, unicode))
assert isinstance(domain, (str, unicode))
self.add({id: translation}, domain)
|
def function[set, parameter[self, id, translation, domain]]:
constant[
Sets a message translation.
]
assert[call[name[isinstance], parameter[name[id], tuple[[<ast.Name object at 0x7da1b20d5a50>, <ast.Name object at 0x7da1b20d4970>]]]]]
assert[call[name[isinstance], parameter[name[translation], tuple[[<ast.Name object at 0x7da1b20d4f10>, <ast.Name object at 0x7da1b20d48e0>]]]]]
assert[call[name[isinstance], parameter[name[domain], tuple[[<ast.Name object at 0x7da1b20d4220>, <ast.Name object at 0x7da1b20d6170>]]]]]
call[name[self].add, parameter[dictionary[[<ast.Name object at 0x7da1b20d66e0>], [<ast.Name object at 0x7da1b20d4af0>]], name[domain]]]
|
keyword[def] identifier[set] ( identifier[self] , identifier[id] , identifier[translation] , identifier[domain] = literal[string] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[id] ,( identifier[str] , identifier[unicode] ))
keyword[assert] identifier[isinstance] ( identifier[translation] ,( identifier[str] , identifier[unicode] ))
keyword[assert] identifier[isinstance] ( identifier[domain] ,( identifier[str] , identifier[unicode] ))
identifier[self] . identifier[add] ({ identifier[id] : identifier[translation] }, identifier[domain] )
|
def set(self, id, translation, domain='messages'):
"""
Sets a message translation.
"""
assert isinstance(id, (str, unicode))
assert isinstance(translation, (str, unicode))
assert isinstance(domain, (str, unicode))
self.add({id: translation}, domain)
|
def median_filter(tr, multiplier=10, windowlength=0.5,
interp_len=0.05, debug=0):
"""
Filter out spikes in data above a multiple of MAD of the data.
Currently only has the ability to replaces spikes with linear
interpolation. In the future we would aim to fill the gap with something
more appropriate. Works in-place on data.
:type tr: obspy.core.trace.Trace
:param tr: trace to despike
:type multiplier: float
:param multiplier:
median absolute deviation multiplier to find spikes above.
:type windowlength: float
:param windowlength: Length of window to look for spikes in in seconds.
:type interp_len: float
:param interp_len: Length in seconds to interpolate around spikes.
:type debug: int
:param debug: Debug output level between 0 and 5, higher is more output.
:returns: :class:`obspy.core.trace.Trace`
.. warning::
Not particularly effective, and may remove earthquake signals, use with
caution.
"""
num_cores = cpu_count()
if debug >= 1:
data_in = tr.copy()
# Note - might be worth finding spikes in filtered data
filt = tr.copy()
filt.detrend('linear')
try:
filt.filter('bandpass', freqmin=10.0,
freqmax=(tr.stats.sampling_rate / 2) - 1)
except Exception as e:
print("Could not filter due to error: {0}".format(e))
data = filt.data
del filt
# Loop through windows
_windowlength = int(windowlength * tr.stats.sampling_rate)
_interp_len = int(interp_len * tr.stats.sampling_rate)
peaks = []
with Timer() as t:
pool = Pool(processes=num_cores)
results = [pool.apply_async(_median_window,
args=(data[chunk * _windowlength:
(chunk + 1) * _windowlength],
chunk * _windowlength, multiplier,
tr.stats.starttime + windowlength,
tr.stats.sampling_rate,
debug))
for chunk in range(int(len(data) / _windowlength))]
pool.close()
for p in results:
peaks += p.get()
pool.join()
for peak in peaks:
tr.data = _interp_gap(tr.data, peak[1], _interp_len)
print("Despiking took: %s s" % t.secs)
if debug >= 1:
plt.plot(data_in.data, 'r', label='raw')
plt.plot(tr.data, 'k', label='despiked')
plt.legend()
plt.show()
return tr
|
def function[median_filter, parameter[tr, multiplier, windowlength, interp_len, debug]]:
constant[
Filter out spikes in data above a multiple of MAD of the data.
Currently only has the ability to replaces spikes with linear
interpolation. In the future we would aim to fill the gap with something
more appropriate. Works in-place on data.
:type tr: obspy.core.trace.Trace
:param tr: trace to despike
:type multiplier: float
:param multiplier:
median absolute deviation multiplier to find spikes above.
:type windowlength: float
:param windowlength: Length of window to look for spikes in in seconds.
:type interp_len: float
:param interp_len: Length in seconds to interpolate around spikes.
:type debug: int
:param debug: Debug output level between 0 and 5, higher is more output.
:returns: :class:`obspy.core.trace.Trace`
.. warning::
Not particularly effective, and may remove earthquake signals, use with
caution.
]
variable[num_cores] assign[=] call[name[cpu_count], parameter[]]
if compare[name[debug] greater_or_equal[>=] constant[1]] begin[:]
variable[data_in] assign[=] call[name[tr].copy, parameter[]]
variable[filt] assign[=] call[name[tr].copy, parameter[]]
call[name[filt].detrend, parameter[constant[linear]]]
<ast.Try object at 0x7da1b0788a90>
variable[data] assign[=] name[filt].data
<ast.Delete object at 0x7da1b078bbe0>
variable[_windowlength] assign[=] call[name[int], parameter[binary_operation[name[windowlength] * name[tr].stats.sampling_rate]]]
variable[_interp_len] assign[=] call[name[int], parameter[binary_operation[name[interp_len] * name[tr].stats.sampling_rate]]]
variable[peaks] assign[=] list[[]]
with call[name[Timer], parameter[]] begin[:]
variable[pool] assign[=] call[name[Pool], parameter[]]
variable[results] assign[=] <ast.ListComp object at 0x7da18f09e140>
call[name[pool].close, parameter[]]
for taget[name[p]] in starred[name[results]] begin[:]
<ast.AugAssign object at 0x7da18f09c9d0>
call[name[pool].join, parameter[]]
for taget[name[peak]] in starred[name[peaks]] begin[:]
name[tr].data assign[=] call[name[_interp_gap], parameter[name[tr].data, call[name[peak]][constant[1]], name[_interp_len]]]
call[name[print], parameter[binary_operation[constant[Despiking took: %s s] <ast.Mod object at 0x7da2590d6920> name[t].secs]]]
if compare[name[debug] greater_or_equal[>=] constant[1]] begin[:]
call[name[plt].plot, parameter[name[data_in].data, constant[r]]]
call[name[plt].plot, parameter[name[tr].data, constant[k]]]
call[name[plt].legend, parameter[]]
call[name[plt].show, parameter[]]
return[name[tr]]
|
keyword[def] identifier[median_filter] ( identifier[tr] , identifier[multiplier] = literal[int] , identifier[windowlength] = literal[int] ,
identifier[interp_len] = literal[int] , identifier[debug] = literal[int] ):
literal[string]
identifier[num_cores] = identifier[cpu_count] ()
keyword[if] identifier[debug] >= literal[int] :
identifier[data_in] = identifier[tr] . identifier[copy] ()
identifier[filt] = identifier[tr] . identifier[copy] ()
identifier[filt] . identifier[detrend] ( literal[string] )
keyword[try] :
identifier[filt] . identifier[filter] ( literal[string] , identifier[freqmin] = literal[int] ,
identifier[freqmax] =( identifier[tr] . identifier[stats] . identifier[sampling_rate] / literal[int] )- literal[int] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[data] = identifier[filt] . identifier[data]
keyword[del] identifier[filt]
identifier[_windowlength] = identifier[int] ( identifier[windowlength] * identifier[tr] . identifier[stats] . identifier[sampling_rate] )
identifier[_interp_len] = identifier[int] ( identifier[interp_len] * identifier[tr] . identifier[stats] . identifier[sampling_rate] )
identifier[peaks] =[]
keyword[with] identifier[Timer] () keyword[as] identifier[t] :
identifier[pool] = identifier[Pool] ( identifier[processes] = identifier[num_cores] )
identifier[results] =[ identifier[pool] . identifier[apply_async] ( identifier[_median_window] ,
identifier[args] =( identifier[data] [ identifier[chunk] * identifier[_windowlength] :
( identifier[chunk] + literal[int] )* identifier[_windowlength] ],
identifier[chunk] * identifier[_windowlength] , identifier[multiplier] ,
identifier[tr] . identifier[stats] . identifier[starttime] + identifier[windowlength] ,
identifier[tr] . identifier[stats] . identifier[sampling_rate] ,
identifier[debug] ))
keyword[for] identifier[chunk] keyword[in] identifier[range] ( identifier[int] ( identifier[len] ( identifier[data] )/ identifier[_windowlength] ))]
identifier[pool] . identifier[close] ()
keyword[for] identifier[p] keyword[in] identifier[results] :
identifier[peaks] += identifier[p] . identifier[get] ()
identifier[pool] . identifier[join] ()
keyword[for] identifier[peak] keyword[in] identifier[peaks] :
identifier[tr] . identifier[data] = identifier[_interp_gap] ( identifier[tr] . identifier[data] , identifier[peak] [ literal[int] ], identifier[_interp_len] )
identifier[print] ( literal[string] % identifier[t] . identifier[secs] )
keyword[if] identifier[debug] >= literal[int] :
identifier[plt] . identifier[plot] ( identifier[data_in] . identifier[data] , literal[string] , identifier[label] = literal[string] )
identifier[plt] . identifier[plot] ( identifier[tr] . identifier[data] , literal[string] , identifier[label] = literal[string] )
identifier[plt] . identifier[legend] ()
identifier[plt] . identifier[show] ()
keyword[return] identifier[tr]
|
def median_filter(tr, multiplier=10, windowlength=0.5, interp_len=0.05, debug=0):
"""
Filter out spikes in data above a multiple of MAD of the data.
Currently only has the ability to replaces spikes with linear
interpolation. In the future we would aim to fill the gap with something
more appropriate. Works in-place on data.
:type tr: obspy.core.trace.Trace
:param tr: trace to despike
:type multiplier: float
:param multiplier:
median absolute deviation multiplier to find spikes above.
:type windowlength: float
:param windowlength: Length of window to look for spikes in in seconds.
:type interp_len: float
:param interp_len: Length in seconds to interpolate around spikes.
:type debug: int
:param debug: Debug output level between 0 and 5, higher is more output.
:returns: :class:`obspy.core.trace.Trace`
.. warning::
Not particularly effective, and may remove earthquake signals, use with
caution.
"""
num_cores = cpu_count()
if debug >= 1:
data_in = tr.copy() # depends on [control=['if'], data=[]]
# Note - might be worth finding spikes in filtered data
filt = tr.copy()
filt.detrend('linear')
try:
filt.filter('bandpass', freqmin=10.0, freqmax=tr.stats.sampling_rate / 2 - 1) # depends on [control=['try'], data=[]]
except Exception as e:
print('Could not filter due to error: {0}'.format(e)) # depends on [control=['except'], data=['e']]
data = filt.data
del filt
# Loop through windows
_windowlength = int(windowlength * tr.stats.sampling_rate)
_interp_len = int(interp_len * tr.stats.sampling_rate)
peaks = []
with Timer() as t:
pool = Pool(processes=num_cores)
results = [pool.apply_async(_median_window, args=(data[chunk * _windowlength:(chunk + 1) * _windowlength], chunk * _windowlength, multiplier, tr.stats.starttime + windowlength, tr.stats.sampling_rate, debug)) for chunk in range(int(len(data) / _windowlength))]
pool.close()
for p in results:
peaks += p.get() # depends on [control=['for'], data=['p']]
pool.join()
for peak in peaks:
tr.data = _interp_gap(tr.data, peak[1], _interp_len) # depends on [control=['for'], data=['peak']] # depends on [control=['with'], data=[]]
print('Despiking took: %s s' % t.secs)
if debug >= 1:
plt.plot(data_in.data, 'r', label='raw')
plt.plot(tr.data, 'k', label='despiked')
plt.legend()
plt.show() # depends on [control=['if'], data=[]]
return tr
|
def _sb_short_word(self, term, r1_prefixes=None):
"""Return True iff term is a short word.
(...according to the Porter2 specification.)
Parameters
----------
term : str
The term to examine
r1_prefixes : set
Prefixes to consider
Returns
-------
bool
True iff term is a short word
"""
if self._sb_r1(term, r1_prefixes) == len(
term
) and self._sb_ends_in_short_syllable(term):
return True
return False
|
def function[_sb_short_word, parameter[self, term, r1_prefixes]]:
constant[Return True iff term is a short word.
(...according to the Porter2 specification.)
Parameters
----------
term : str
The term to examine
r1_prefixes : set
Prefixes to consider
Returns
-------
bool
True iff term is a short word
]
if <ast.BoolOp object at 0x7da1b01401c0> begin[:]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[_sb_short_word] ( identifier[self] , identifier[term] , identifier[r1_prefixes] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_sb_r1] ( identifier[term] , identifier[r1_prefixes] )== identifier[len] (
identifier[term]
) keyword[and] identifier[self] . identifier[_sb_ends_in_short_syllable] ( identifier[term] ):
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def _sb_short_word(self, term, r1_prefixes=None):
"""Return True iff term is a short word.
(...according to the Porter2 specification.)
Parameters
----------
term : str
The term to examine
r1_prefixes : set
Prefixes to consider
Returns
-------
bool
True iff term is a short word
"""
if self._sb_r1(term, r1_prefixes) == len(term) and self._sb_ends_in_short_syllable(term):
return True # depends on [control=['if'], data=[]]
return False
|
def check_input_layer(layer, purpose):
"""Function to check if the layer is valid.
The function will also set the monkey patching if needed.
:param layer: The layer to test.
:type layer: QgsMapLayer
:param purpose: The expected purpose of the layer.
:type purpose: basestring
:return: A tuple with the status of the layer and an error message if
needed.
The status is 0 if everything was fine.
The status is 1 if the client should fix something.
:rtype: (int, m.Message)
"""
if not layer.isValid():
title = tr(
'The {purpose} layer is invalid').format(purpose=purpose)
content = tr(
'The impact function needs a {exposure} layer to run. '
'You must provide a valid {exposure} layer.').format(
purpose=purpose)
message = generate_input_error_message(
title, m.Paragraph(content))
return PREPARE_FAILED_BAD_INPUT, message
# We should read it using KeywordIO for the very beginning. To avoid
# get the modified keywords in the patching.
try:
keywords = KeywordIO().read_keywords(layer)
except NoKeywordsFoundError:
title = tr(
'The {purpose} layer does not have keywords.').format(
purpose=purpose)
content = tr(
'The {purpose} layer does not have keywords. Use the wizard '
'to assign keywords to the layer.').format(purpose=purpose)
message = generate_input_error_message(
title, m.Paragraph(content))
return PREPARE_FAILED_BAD_INPUT, message
if keywords.get('layer_purpose') != purpose:
title = tr('The expected {purpose} layer is not an {purpose}.') \
.format(purpose=purpose)
content = tr('The expected {purpose} layer is not an {purpose}.') \
.format(purpose=purpose)
message = generate_input_error_message(
title, m.Paragraph(content))
return PREPARE_FAILED_BAD_INPUT, message
version = keywords.get(inasafe_keyword_version_key)
supported = is_keyword_version_supported(version)
if not supported:
parameters = {
'version': inasafe_keyword_version,
'source': layer.publicSource()
}
title = tr('The {purpose} layer is not up to date.').format(
purpose=purpose)
content = tr(
'The layer {source} must be updated to {version}.').format(
**parameters)
message = generate_input_error_message(
title, m.Paragraph(content))
return PREPARE_FAILED_BAD_INPUT, message
layer.keywords = keywords
if is_vector_layer(layer):
try:
check_inasafe_fields(layer, keywords_only=True)
except InvalidLayerError:
title = tr('The {purpose} layer is not up to date.').format(
purpose=purpose)
content = tr(
'The layer {source} must be updated with the keyword '
'wizard. Your fields which have been set in the keywords '
'previously are not matching your layer.').format(
source=layer.publicSource())
message = generate_input_error_message(
title, m.Paragraph(content))
del layer.keywords
return PREPARE_FAILED_BAD_INPUT, message
return PREPARE_SUCCESS, None
|
def function[check_input_layer, parameter[layer, purpose]]:
constant[Function to check if the layer is valid.
The function will also set the monkey patching if needed.
:param layer: The layer to test.
:type layer: QgsMapLayer
:param purpose: The expected purpose of the layer.
:type purpose: basestring
:return: A tuple with the status of the layer and an error message if
needed.
The status is 0 if everything was fine.
The status is 1 if the client should fix something.
:rtype: (int, m.Message)
]
if <ast.UnaryOp object at 0x7da18f58c430> begin[:]
variable[title] assign[=] call[call[name[tr], parameter[constant[The {purpose} layer is invalid]]].format, parameter[]]
variable[content] assign[=] call[call[name[tr], parameter[constant[The impact function needs a {exposure} layer to run. You must provide a valid {exposure} layer.]]].format, parameter[]]
variable[message] assign[=] call[name[generate_input_error_message], parameter[name[title], call[name[m].Paragraph, parameter[name[content]]]]]
return[tuple[[<ast.Name object at 0x7da18fe93790>, <ast.Name object at 0x7da18fe92f50>]]]
<ast.Try object at 0x7da18fe91120>
if compare[call[name[keywords].get, parameter[constant[layer_purpose]]] not_equal[!=] name[purpose]] begin[:]
variable[title] assign[=] call[call[name[tr], parameter[constant[The expected {purpose} layer is not an {purpose}.]]].format, parameter[]]
variable[content] assign[=] call[call[name[tr], parameter[constant[The expected {purpose} layer is not an {purpose}.]]].format, parameter[]]
variable[message] assign[=] call[name[generate_input_error_message], parameter[name[title], call[name[m].Paragraph, parameter[name[content]]]]]
return[tuple[[<ast.Name object at 0x7da18fe93cd0>, <ast.Name object at 0x7da18fe92ce0>]]]
variable[version] assign[=] call[name[keywords].get, parameter[name[inasafe_keyword_version_key]]]
variable[supported] assign[=] call[name[is_keyword_version_supported], parameter[name[version]]]
if <ast.UnaryOp object at 0x7da18fe91390> begin[:]
variable[parameters] assign[=] dictionary[[<ast.Constant object at 0x7da18fe932e0>, <ast.Constant object at 0x7da18fe90b50>], [<ast.Name object at 0x7da18fe91c90>, <ast.Call object at 0x7da18fe90ac0>]]
variable[title] assign[=] call[call[name[tr], parameter[constant[The {purpose} layer is not up to date.]]].format, parameter[]]
variable[content] assign[=] call[call[name[tr], parameter[constant[The layer {source} must be updated to {version}.]]].format, parameter[]]
variable[message] assign[=] call[name[generate_input_error_message], parameter[name[title], call[name[m].Paragraph, parameter[name[content]]]]]
return[tuple[[<ast.Name object at 0x7da18fe91930>, <ast.Name object at 0x7da18fe90340>]]]
name[layer].keywords assign[=] name[keywords]
if call[name[is_vector_layer], parameter[name[layer]]] begin[:]
<ast.Try object at 0x7da18fe93e20>
return[tuple[[<ast.Name object at 0x7da18fe92bc0>, <ast.Constant object at 0x7da18fe90550>]]]
|
keyword[def] identifier[check_input_layer] ( identifier[layer] , identifier[purpose] ):
literal[string]
keyword[if] keyword[not] identifier[layer] . identifier[isValid] ():
identifier[title] = identifier[tr] (
literal[string] ). identifier[format] ( identifier[purpose] = identifier[purpose] )
identifier[content] = identifier[tr] (
literal[string]
literal[string] ). identifier[format] (
identifier[purpose] = identifier[purpose] )
identifier[message] = identifier[generate_input_error_message] (
identifier[title] , identifier[m] . identifier[Paragraph] ( identifier[content] ))
keyword[return] identifier[PREPARE_FAILED_BAD_INPUT] , identifier[message]
keyword[try] :
identifier[keywords] = identifier[KeywordIO] (). identifier[read_keywords] ( identifier[layer] )
keyword[except] identifier[NoKeywordsFoundError] :
identifier[title] = identifier[tr] (
literal[string] ). identifier[format] (
identifier[purpose] = identifier[purpose] )
identifier[content] = identifier[tr] (
literal[string]
literal[string] ). identifier[format] ( identifier[purpose] = identifier[purpose] )
identifier[message] = identifier[generate_input_error_message] (
identifier[title] , identifier[m] . identifier[Paragraph] ( identifier[content] ))
keyword[return] identifier[PREPARE_FAILED_BAD_INPUT] , identifier[message]
keyword[if] identifier[keywords] . identifier[get] ( literal[string] )!= identifier[purpose] :
identifier[title] = identifier[tr] ( literal[string] ). identifier[format] ( identifier[purpose] = identifier[purpose] )
identifier[content] = identifier[tr] ( literal[string] ). identifier[format] ( identifier[purpose] = identifier[purpose] )
identifier[message] = identifier[generate_input_error_message] (
identifier[title] , identifier[m] . identifier[Paragraph] ( identifier[content] ))
keyword[return] identifier[PREPARE_FAILED_BAD_INPUT] , identifier[message]
identifier[version] = identifier[keywords] . identifier[get] ( identifier[inasafe_keyword_version_key] )
identifier[supported] = identifier[is_keyword_version_supported] ( identifier[version] )
keyword[if] keyword[not] identifier[supported] :
identifier[parameters] ={
literal[string] : identifier[inasafe_keyword_version] ,
literal[string] : identifier[layer] . identifier[publicSource] ()
}
identifier[title] = identifier[tr] ( literal[string] ). identifier[format] (
identifier[purpose] = identifier[purpose] )
identifier[content] = identifier[tr] (
literal[string] ). identifier[format] (
** identifier[parameters] )
identifier[message] = identifier[generate_input_error_message] (
identifier[title] , identifier[m] . identifier[Paragraph] ( identifier[content] ))
keyword[return] identifier[PREPARE_FAILED_BAD_INPUT] , identifier[message]
identifier[layer] . identifier[keywords] = identifier[keywords]
keyword[if] identifier[is_vector_layer] ( identifier[layer] ):
keyword[try] :
identifier[check_inasafe_fields] ( identifier[layer] , identifier[keywords_only] = keyword[True] )
keyword[except] identifier[InvalidLayerError] :
identifier[title] = identifier[tr] ( literal[string] ). identifier[format] (
identifier[purpose] = identifier[purpose] )
identifier[content] = identifier[tr] (
literal[string]
literal[string]
literal[string] ). identifier[format] (
identifier[source] = identifier[layer] . identifier[publicSource] ())
identifier[message] = identifier[generate_input_error_message] (
identifier[title] , identifier[m] . identifier[Paragraph] ( identifier[content] ))
keyword[del] identifier[layer] . identifier[keywords]
keyword[return] identifier[PREPARE_FAILED_BAD_INPUT] , identifier[message]
keyword[return] identifier[PREPARE_SUCCESS] , keyword[None]
|
def check_input_layer(layer, purpose):
"""Function to check if the layer is valid.
The function will also set the monkey patching if needed.
:param layer: The layer to test.
:type layer: QgsMapLayer
:param purpose: The expected purpose of the layer.
:type purpose: basestring
:return: A tuple with the status of the layer and an error message if
needed.
The status is 0 if everything was fine.
The status is 1 if the client should fix something.
:rtype: (int, m.Message)
"""
if not layer.isValid():
title = tr('The {purpose} layer is invalid').format(purpose=purpose)
content = tr('The impact function needs a {exposure} layer to run. You must provide a valid {exposure} layer.').format(purpose=purpose)
message = generate_input_error_message(title, m.Paragraph(content))
return (PREPARE_FAILED_BAD_INPUT, message) # depends on [control=['if'], data=[]]
# We should read it using KeywordIO for the very beginning. To avoid
# get the modified keywords in the patching.
try:
keywords = KeywordIO().read_keywords(layer) # depends on [control=['try'], data=[]]
except NoKeywordsFoundError:
title = tr('The {purpose} layer does not have keywords.').format(purpose=purpose)
content = tr('The {purpose} layer does not have keywords. Use the wizard to assign keywords to the layer.').format(purpose=purpose)
message = generate_input_error_message(title, m.Paragraph(content))
return (PREPARE_FAILED_BAD_INPUT, message) # depends on [control=['except'], data=[]]
if keywords.get('layer_purpose') != purpose:
title = tr('The expected {purpose} layer is not an {purpose}.').format(purpose=purpose)
content = tr('The expected {purpose} layer is not an {purpose}.').format(purpose=purpose)
message = generate_input_error_message(title, m.Paragraph(content))
return (PREPARE_FAILED_BAD_INPUT, message) # depends on [control=['if'], data=['purpose']]
version = keywords.get(inasafe_keyword_version_key)
supported = is_keyword_version_supported(version)
if not supported:
parameters = {'version': inasafe_keyword_version, 'source': layer.publicSource()}
title = tr('The {purpose} layer is not up to date.').format(purpose=purpose)
content = tr('The layer {source} must be updated to {version}.').format(**parameters)
message = generate_input_error_message(title, m.Paragraph(content))
return (PREPARE_FAILED_BAD_INPUT, message) # depends on [control=['if'], data=[]]
layer.keywords = keywords
if is_vector_layer(layer):
try:
check_inasafe_fields(layer, keywords_only=True) # depends on [control=['try'], data=[]]
except InvalidLayerError:
title = tr('The {purpose} layer is not up to date.').format(purpose=purpose)
content = tr('The layer {source} must be updated with the keyword wizard. Your fields which have been set in the keywords previously are not matching your layer.').format(source=layer.publicSource())
message = generate_input_error_message(title, m.Paragraph(content))
del layer.keywords
return (PREPARE_FAILED_BAD_INPUT, message) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return (PREPARE_SUCCESS, None)
|
def _retrieve_users(self):
"""
Retrieve user objects of the entire administration.
:return: list of dictionary with users information
:rtype: list(dict)
-------
"""
users_url = self._build_url('users')
response = self._request('GET', users_url)
users = response.json()
return users
|
def function[_retrieve_users, parameter[self]]:
constant[
Retrieve user objects of the entire administration.
:return: list of dictionary with users information
:rtype: list(dict)
-------
]
variable[users_url] assign[=] call[name[self]._build_url, parameter[constant[users]]]
variable[response] assign[=] call[name[self]._request, parameter[constant[GET], name[users_url]]]
variable[users] assign[=] call[name[response].json, parameter[]]
return[name[users]]
|
keyword[def] identifier[_retrieve_users] ( identifier[self] ):
literal[string]
identifier[users_url] = identifier[self] . identifier[_build_url] ( literal[string] )
identifier[response] = identifier[self] . identifier[_request] ( literal[string] , identifier[users_url] )
identifier[users] = identifier[response] . identifier[json] ()
keyword[return] identifier[users]
|
def _retrieve_users(self):
"""
Retrieve user objects of the entire administration.
:return: list of dictionary with users information
:rtype: list(dict)
-------
"""
users_url = self._build_url('users')
response = self._request('GET', users_url)
users = response.json()
return users
|
def getApplicationPropertyBool(self, pchAppKey, eProperty):
"""Returns a bool value for an application property. Returns false in all error cases."""
fn = self.function_table.getApplicationPropertyBool
peError = EVRApplicationError()
result = fn(pchAppKey, eProperty, byref(peError))
return result, peError
|
def function[getApplicationPropertyBool, parameter[self, pchAppKey, eProperty]]:
constant[Returns a bool value for an application property. Returns false in all error cases.]
variable[fn] assign[=] name[self].function_table.getApplicationPropertyBool
variable[peError] assign[=] call[name[EVRApplicationError], parameter[]]
variable[result] assign[=] call[name[fn], parameter[name[pchAppKey], name[eProperty], call[name[byref], parameter[name[peError]]]]]
return[tuple[[<ast.Name object at 0x7da207f02860>, <ast.Name object at 0x7da207f01f90>]]]
|
keyword[def] identifier[getApplicationPropertyBool] ( identifier[self] , identifier[pchAppKey] , identifier[eProperty] ):
literal[string]
identifier[fn] = identifier[self] . identifier[function_table] . identifier[getApplicationPropertyBool]
identifier[peError] = identifier[EVRApplicationError] ()
identifier[result] = identifier[fn] ( identifier[pchAppKey] , identifier[eProperty] , identifier[byref] ( identifier[peError] ))
keyword[return] identifier[result] , identifier[peError]
|
def getApplicationPropertyBool(self, pchAppKey, eProperty):
"""Returns a bool value for an application property. Returns false in all error cases."""
fn = self.function_table.getApplicationPropertyBool
peError = EVRApplicationError()
result = fn(pchAppKey, eProperty, byref(peError))
return (result, peError)
|
def _parse_comment(self):
"""Parse an HTML comment at the head of the wikicode string."""
self._head += 4
reset = self._head - 1
self._push()
while True:
this = self._read()
if this == self.END:
self._pop()
self._head = reset
self._emit_text("<!--")
return
if this == self._read(1) == "-" and self._read(2) == ">":
self._emit_first(tokens.CommentStart())
self._emit(tokens.CommentEnd())
self._emit_all(self._pop())
self._head += 2
if self._context & contexts.FAIL_NEXT:
# _verify_safe() sets this flag while parsing a template
# or link when it encounters what might be a comment -- we
# must unset it to let _verify_safe() know it was correct:
self._context ^= contexts.FAIL_NEXT
return
self._emit_text(this)
self._head += 1
|
def function[_parse_comment, parameter[self]]:
constant[Parse an HTML comment at the head of the wikicode string.]
<ast.AugAssign object at 0x7da2044c3370>
variable[reset] assign[=] binary_operation[name[self]._head - constant[1]]
call[name[self]._push, parameter[]]
while constant[True] begin[:]
variable[this] assign[=] call[name[self]._read, parameter[]]
if compare[name[this] equal[==] name[self].END] begin[:]
call[name[self]._pop, parameter[]]
name[self]._head assign[=] name[reset]
call[name[self]._emit_text, parameter[constant[<!--]]]
return[None]
if <ast.BoolOp object at 0x7da2044c3490> begin[:]
call[name[self]._emit_first, parameter[call[name[tokens].CommentStart, parameter[]]]]
call[name[self]._emit, parameter[call[name[tokens].CommentEnd, parameter[]]]]
call[name[self]._emit_all, parameter[call[name[self]._pop, parameter[]]]]
<ast.AugAssign object at 0x7da2044c1ba0>
if binary_operation[name[self]._context <ast.BitAnd object at 0x7da2590d6b60> name[contexts].FAIL_NEXT] begin[:]
<ast.AugAssign object at 0x7da2044c07c0>
return[None]
call[name[self]._emit_text, parameter[name[this]]]
<ast.AugAssign object at 0x7da18bcc8be0>
|
keyword[def] identifier[_parse_comment] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_head] += literal[int]
identifier[reset] = identifier[self] . identifier[_head] - literal[int]
identifier[self] . identifier[_push] ()
keyword[while] keyword[True] :
identifier[this] = identifier[self] . identifier[_read] ()
keyword[if] identifier[this] == identifier[self] . identifier[END] :
identifier[self] . identifier[_pop] ()
identifier[self] . identifier[_head] = identifier[reset]
identifier[self] . identifier[_emit_text] ( literal[string] )
keyword[return]
keyword[if] identifier[this] == identifier[self] . identifier[_read] ( literal[int] )== literal[string] keyword[and] identifier[self] . identifier[_read] ( literal[int] )== literal[string] :
identifier[self] . identifier[_emit_first] ( identifier[tokens] . identifier[CommentStart] ())
identifier[self] . identifier[_emit] ( identifier[tokens] . identifier[CommentEnd] ())
identifier[self] . identifier[_emit_all] ( identifier[self] . identifier[_pop] ())
identifier[self] . identifier[_head] += literal[int]
keyword[if] identifier[self] . identifier[_context] & identifier[contexts] . identifier[FAIL_NEXT] :
identifier[self] . identifier[_context] ^= identifier[contexts] . identifier[FAIL_NEXT]
keyword[return]
identifier[self] . identifier[_emit_text] ( identifier[this] )
identifier[self] . identifier[_head] += literal[int]
|
def _parse_comment(self):
"""Parse an HTML comment at the head of the wikicode string."""
self._head += 4
reset = self._head - 1
self._push()
while True:
this = self._read()
if this == self.END:
self._pop()
self._head = reset
self._emit_text('<!--')
return # depends on [control=['if'], data=[]]
if this == self._read(1) == '-' and self._read(2) == '>':
self._emit_first(tokens.CommentStart())
self._emit(tokens.CommentEnd())
self._emit_all(self._pop())
self._head += 2
if self._context & contexts.FAIL_NEXT:
# _verify_safe() sets this flag while parsing a template
# or link when it encounters what might be a comment -- we
# must unset it to let _verify_safe() know it was correct:
self._context ^= contexts.FAIL_NEXT # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
self._emit_text(this)
self._head += 1 # depends on [control=['while'], data=[]]
|
def run(self):
"""overridden from install_lib class"""
install_lib.install_lib.run(self)
# manually install included directories if any
if include_dirs:
for directory in include_dirs:
dest = join(self.install_dir, directory)
if sys.version_info >= (3, 0):
exclude = {"invalid_encoded_data*", "unknown_encoding*"}
else:
exclude = set()
shutil.rmtree(dest, ignore_errors=True)
shutil.copytree(
directory, dest, ignore=shutil.ignore_patterns(*exclude)
)
|
def function[run, parameter[self]]:
constant[overridden from install_lib class]
call[name[install_lib].install_lib.run, parameter[name[self]]]
if name[include_dirs] begin[:]
for taget[name[directory]] in starred[name[include_dirs]] begin[:]
variable[dest] assign[=] call[name[join], parameter[name[self].install_dir, name[directory]]]
if compare[name[sys].version_info greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b0246830>, <ast.Constant object at 0x7da1b0246680>]]] begin[:]
variable[exclude] assign[=] <ast.Set object at 0x7da1b0246a70>
call[name[shutil].rmtree, parameter[name[dest]]]
call[name[shutil].copytree, parameter[name[directory], name[dest]]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[install_lib] . identifier[install_lib] . identifier[run] ( identifier[self] )
keyword[if] identifier[include_dirs] :
keyword[for] identifier[directory] keyword[in] identifier[include_dirs] :
identifier[dest] = identifier[join] ( identifier[self] . identifier[install_dir] , identifier[directory] )
keyword[if] identifier[sys] . identifier[version_info] >=( literal[int] , literal[int] ):
identifier[exclude] ={ literal[string] , literal[string] }
keyword[else] :
identifier[exclude] = identifier[set] ()
identifier[shutil] . identifier[rmtree] ( identifier[dest] , identifier[ignore_errors] = keyword[True] )
identifier[shutil] . identifier[copytree] (
identifier[directory] , identifier[dest] , identifier[ignore] = identifier[shutil] . identifier[ignore_patterns] (* identifier[exclude] )
)
|
def run(self):
"""overridden from install_lib class"""
install_lib.install_lib.run(self)
# manually install included directories if any
if include_dirs:
for directory in include_dirs:
dest = join(self.install_dir, directory)
if sys.version_info >= (3, 0):
exclude = {'invalid_encoded_data*', 'unknown_encoding*'} # depends on [control=['if'], data=[]]
else:
exclude = set()
shutil.rmtree(dest, ignore_errors=True)
shutil.copytree(directory, dest, ignore=shutil.ignore_patterns(*exclude)) # depends on [control=['for'], data=['directory']] # depends on [control=['if'], data=[]]
|
def remove(self, data):
"""
Removes a data node from the list. If the list contains more than one
node having the same data that shall be removed, then the node having
the first occurrency of the data is removed.
:param data: the data to be removed in the new list node
:type data: object
"""
current_node = self._first_node
deleted = False
if self._size == 0:
return
if data == current_node.data():
# case 1: the list has only one item
if current_node.next() is None:
self._first_node = LinkedListNode(None, None)
self._last_node = self._first_node
self._size = 0
return
# case 2: the list has more than one item
current_node = current_node.next()
self._first_node = current_node
self._size -= 1
return
while True:
if current_node is None:
deleted = False
break
# Check next element's data
next_node = current_node.next()
if next_node is not None:
if data == next_node.data():
next_next_node = next_node.next()
current_node.update_next(next_next_node)
next_node = None
deleted = True
break
current_node = current_node.next()
if deleted:
self._size -= 1
|
def function[remove, parameter[self, data]]:
constant[
Removes a data node from the list. If the list contains more than one
node having the same data that shall be removed, then the node having
the first occurrency of the data is removed.
:param data: the data to be removed in the new list node
:type data: object
]
variable[current_node] assign[=] name[self]._first_node
variable[deleted] assign[=] constant[False]
if compare[name[self]._size equal[==] constant[0]] begin[:]
return[None]
if compare[name[data] equal[==] call[name[current_node].data, parameter[]]] begin[:]
if compare[call[name[current_node].next, parameter[]] is constant[None]] begin[:]
name[self]._first_node assign[=] call[name[LinkedListNode], parameter[constant[None], constant[None]]]
name[self]._last_node assign[=] name[self]._first_node
name[self]._size assign[=] constant[0]
return[None]
variable[current_node] assign[=] call[name[current_node].next, parameter[]]
name[self]._first_node assign[=] name[current_node]
<ast.AugAssign object at 0x7da20c7c8df0>
return[None]
while constant[True] begin[:]
if compare[name[current_node] is constant[None]] begin[:]
variable[deleted] assign[=] constant[False]
break
variable[next_node] assign[=] call[name[current_node].next, parameter[]]
if compare[name[next_node] is_not constant[None]] begin[:]
if compare[name[data] equal[==] call[name[next_node].data, parameter[]]] begin[:]
variable[next_next_node] assign[=] call[name[next_node].next, parameter[]]
call[name[current_node].update_next, parameter[name[next_next_node]]]
variable[next_node] assign[=] constant[None]
variable[deleted] assign[=] constant[True]
break
variable[current_node] assign[=] call[name[current_node].next, parameter[]]
if name[deleted] begin[:]
<ast.AugAssign object at 0x7da20e9b3b20>
|
keyword[def] identifier[remove] ( identifier[self] , identifier[data] ):
literal[string]
identifier[current_node] = identifier[self] . identifier[_first_node]
identifier[deleted] = keyword[False]
keyword[if] identifier[self] . identifier[_size] == literal[int] :
keyword[return]
keyword[if] identifier[data] == identifier[current_node] . identifier[data] ():
keyword[if] identifier[current_node] . identifier[next] () keyword[is] keyword[None] :
identifier[self] . identifier[_first_node] = identifier[LinkedListNode] ( keyword[None] , keyword[None] )
identifier[self] . identifier[_last_node] = identifier[self] . identifier[_first_node]
identifier[self] . identifier[_size] = literal[int]
keyword[return]
identifier[current_node] = identifier[current_node] . identifier[next] ()
identifier[self] . identifier[_first_node] = identifier[current_node]
identifier[self] . identifier[_size] -= literal[int]
keyword[return]
keyword[while] keyword[True] :
keyword[if] identifier[current_node] keyword[is] keyword[None] :
identifier[deleted] = keyword[False]
keyword[break]
identifier[next_node] = identifier[current_node] . identifier[next] ()
keyword[if] identifier[next_node] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[data] == identifier[next_node] . identifier[data] ():
identifier[next_next_node] = identifier[next_node] . identifier[next] ()
identifier[current_node] . identifier[update_next] ( identifier[next_next_node] )
identifier[next_node] = keyword[None]
identifier[deleted] = keyword[True]
keyword[break]
identifier[current_node] = identifier[current_node] . identifier[next] ()
keyword[if] identifier[deleted] :
identifier[self] . identifier[_size] -= literal[int]
|
def remove(self, data):
"""
Removes a data node from the list. If the list contains more than one
node having the same data that shall be removed, then the node having
the first occurrency of the data is removed.
:param data: the data to be removed in the new list node
:type data: object
"""
current_node = self._first_node
deleted = False
if self._size == 0:
return # depends on [control=['if'], data=[]]
if data == current_node.data():
# case 1: the list has only one item
if current_node.next() is None:
self._first_node = LinkedListNode(None, None)
self._last_node = self._first_node
self._size = 0
return # depends on [control=['if'], data=[]]
# case 2: the list has more than one item
current_node = current_node.next()
self._first_node = current_node
self._size -= 1
return # depends on [control=['if'], data=[]]
while True:
if current_node is None:
deleted = False
break # depends on [control=['if'], data=[]]
# Check next element's data
next_node = current_node.next()
if next_node is not None:
if data == next_node.data():
next_next_node = next_node.next()
current_node.update_next(next_next_node)
next_node = None
deleted = True
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['next_node']]
current_node = current_node.next() # depends on [control=['while'], data=[]]
if deleted:
self._size -= 1 # depends on [control=['if'], data=[]]
|
def HStruct_selectFields(structT, fieldsToUse):
"""
Select fields from structure (rest will become spacing)
:param structT: HStruct type instance
:param fieldsToUse: dict {name:{...}} or set of names to select,
dictionary is used to select nested fields
in HStruct or HUnion fields
(f.e. {"struct1": {"field1", "field2"}, "field3":{}}
will select field1 and 2 from struct1 and field3 from root)
"""
template = []
fieldsToUse = fieldsToUse
foundNames = set()
for f in structT.fields:
name = None
subfields = []
if f.name is not None:
try:
if isinstance(fieldsToUse, dict):
subfields = fieldsToUse[f.name]
name = f.name
else:
if f.name in fieldsToUse:
name = f.name
except KeyError:
name = None
if name is not None and subfields:
fields = HStruct_selectFields(f.dtype, subfields)
template.append(HStructField(fields, name))
else:
template.append(HStructField(f.dtype, name))
if f.name is not None:
foundNames.add(f.name)
if isinstance(fieldsToUse, dict):
fieldsToUse = set(fieldsToUse.keys())
assert fieldsToUse.issubset(foundNames)
return HStruct(*template)
|
def function[HStruct_selectFields, parameter[structT, fieldsToUse]]:
constant[
Select fields from structure (rest will become spacing)
:param structT: HStruct type instance
:param fieldsToUse: dict {name:{...}} or set of names to select,
dictionary is used to select nested fields
in HStruct or HUnion fields
(f.e. {"struct1": {"field1", "field2"}, "field3":{}}
will select field1 and 2 from struct1 and field3 from root)
]
variable[template] assign[=] list[[]]
variable[fieldsToUse] assign[=] name[fieldsToUse]
variable[foundNames] assign[=] call[name[set], parameter[]]
for taget[name[f]] in starred[name[structT].fields] begin[:]
variable[name] assign[=] constant[None]
variable[subfields] assign[=] list[[]]
if compare[name[f].name is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b03e3970>
if <ast.BoolOp object at 0x7da1b03e2020> begin[:]
variable[fields] assign[=] call[name[HStruct_selectFields], parameter[name[f].dtype, name[subfields]]]
call[name[template].append, parameter[call[name[HStructField], parameter[name[fields], name[name]]]]]
if compare[name[f].name is_not constant[None]] begin[:]
call[name[foundNames].add, parameter[name[f].name]]
if call[name[isinstance], parameter[name[fieldsToUse], name[dict]]] begin[:]
variable[fieldsToUse] assign[=] call[name[set], parameter[call[name[fieldsToUse].keys, parameter[]]]]
assert[call[name[fieldsToUse].issubset, parameter[name[foundNames]]]]
return[call[name[HStruct], parameter[<ast.Starred object at 0x7da1b03e28c0>]]]
|
keyword[def] identifier[HStruct_selectFields] ( identifier[structT] , identifier[fieldsToUse] ):
literal[string]
identifier[template] =[]
identifier[fieldsToUse] = identifier[fieldsToUse]
identifier[foundNames] = identifier[set] ()
keyword[for] identifier[f] keyword[in] identifier[structT] . identifier[fields] :
identifier[name] = keyword[None]
identifier[subfields] =[]
keyword[if] identifier[f] . identifier[name] keyword[is] keyword[not] keyword[None] :
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[fieldsToUse] , identifier[dict] ):
identifier[subfields] = identifier[fieldsToUse] [ identifier[f] . identifier[name] ]
identifier[name] = identifier[f] . identifier[name]
keyword[else] :
keyword[if] identifier[f] . identifier[name] keyword[in] identifier[fieldsToUse] :
identifier[name] = identifier[f] . identifier[name]
keyword[except] identifier[KeyError] :
identifier[name] = keyword[None]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] keyword[and] identifier[subfields] :
identifier[fields] = identifier[HStruct_selectFields] ( identifier[f] . identifier[dtype] , identifier[subfields] )
identifier[template] . identifier[append] ( identifier[HStructField] ( identifier[fields] , identifier[name] ))
keyword[else] :
identifier[template] . identifier[append] ( identifier[HStructField] ( identifier[f] . identifier[dtype] , identifier[name] ))
keyword[if] identifier[f] . identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[foundNames] . identifier[add] ( identifier[f] . identifier[name] )
keyword[if] identifier[isinstance] ( identifier[fieldsToUse] , identifier[dict] ):
identifier[fieldsToUse] = identifier[set] ( identifier[fieldsToUse] . identifier[keys] ())
keyword[assert] identifier[fieldsToUse] . identifier[issubset] ( identifier[foundNames] )
keyword[return] identifier[HStruct] (* identifier[template] )
|
def HStruct_selectFields(structT, fieldsToUse):
"""
Select fields from structure (rest will become spacing)
:param structT: HStruct type instance
:param fieldsToUse: dict {name:{...}} or set of names to select,
dictionary is used to select nested fields
in HStruct or HUnion fields
(f.e. {"struct1": {"field1", "field2"}, "field3":{}}
will select field1 and 2 from struct1 and field3 from root)
"""
template = []
fieldsToUse = fieldsToUse
foundNames = set()
for f in structT.fields:
name = None
subfields = []
if f.name is not None:
try:
if isinstance(fieldsToUse, dict):
subfields = fieldsToUse[f.name]
name = f.name # depends on [control=['if'], data=[]]
elif f.name in fieldsToUse:
name = f.name # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
name = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if name is not None and subfields:
fields = HStruct_selectFields(f.dtype, subfields)
template.append(HStructField(fields, name)) # depends on [control=['if'], data=[]]
else:
template.append(HStructField(f.dtype, name))
if f.name is not None:
foundNames.add(f.name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
if isinstance(fieldsToUse, dict):
fieldsToUse = set(fieldsToUse.keys()) # depends on [control=['if'], data=[]]
assert fieldsToUse.issubset(foundNames)
return HStruct(*template)
|
def pause_resume(self):
"""Toggle between pausing or resuming downloading."""
if self.is_paused():
urlopen(self.url + "&mode=resume")
else:
urlopen(self.url + "&mode=pause")
|
def function[pause_resume, parameter[self]]:
constant[Toggle between pausing or resuming downloading.]
if call[name[self].is_paused, parameter[]] begin[:]
call[name[urlopen], parameter[binary_operation[name[self].url + constant[&mode=resume]]]]
|
keyword[def] identifier[pause_resume] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_paused] ():
identifier[urlopen] ( identifier[self] . identifier[url] + literal[string] )
keyword[else] :
identifier[urlopen] ( identifier[self] . identifier[url] + literal[string] )
|
def pause_resume(self):
"""Toggle between pausing or resuming downloading."""
if self.is_paused():
urlopen(self.url + '&mode=resume') # depends on [control=['if'], data=[]]
else:
urlopen(self.url + '&mode=pause')
|
async def receive_message_batch_async(self, max_batch_size=None, on_message_received=None, timeout=0):
"""Receive a batch of messages asynchronously. This method will return as soon as some
messages are available rather than waiting to achieve a specific batch size, and
therefore the number of messages returned per call will vary up to the maximum allowed.
If the receive client is configured with `auto_complete=True` then the messages received
in the batch returned by this function will already be settled. Alternatively, if
`auto_complete=False`, then each message will need to be explicitly settled before
it expires and is released.
:param max_batch_size: The maximum number of messages that can be returned in
one call. This value cannot be larger than the prefetch value, and if not specified,
the prefetch value will be used.
:type max_batch_size: int
:param on_message_received: A callback to process messages as they arrive from the
service. It takes a single argument, a ~uamqp.message.Message object.
:type on_message_received: callable[~uamqp.message.Message]
:param timeout: I timeout in milliseconds for which to wait to receive any messages.
If no messages are received in this time, an empty list will be returned. If set to
0, the client will continue to wait until at least one message is received. The
default is 0.
:type timeout: int
"""
self._message_received_callback = on_message_received
max_batch_size = max_batch_size or self._prefetch
if max_batch_size > self._prefetch:
raise ValueError(
'Maximum batch size {} cannot be greater than the '
'connection link credit: {}'.format(max_batch_size, self._prefetch))
timeout = self._counter.get_current_ms() + int(timeout) if timeout else 0
expired = False
self._received_messages = self._received_messages or queue.Queue()
await self.open_async()
receiving = True
batch = []
while not self._received_messages.empty() and len(batch) < max_batch_size:
batch.append(self._received_messages.get())
self._received_messages.task_done()
if len(batch) >= max_batch_size:
return batch
while receiving and not expired and len(batch) < max_batch_size:
while receiving and self._received_messages.qsize() < max_batch_size:
if timeout and self._counter.get_current_ms() > timeout:
expired = True
break
before = self._received_messages.qsize()
receiving = await self.do_work_async()
received = self._received_messages.qsize() - before
if self._received_messages.qsize() > 0 and received == 0:
# No new messages arrived, but we have some - so return what we have.
expired = True
break
while not self._received_messages.empty() and len(batch) < max_batch_size:
batch.append(self._received_messages.get())
self._received_messages.task_done()
return batch
|
<ast.AsyncFunctionDef object at 0x7da20e962dd0>
|
keyword[async] keyword[def] identifier[receive_message_batch_async] ( identifier[self] , identifier[max_batch_size] = keyword[None] , identifier[on_message_received] = keyword[None] , identifier[timeout] = literal[int] ):
literal[string]
identifier[self] . identifier[_message_received_callback] = identifier[on_message_received]
identifier[max_batch_size] = identifier[max_batch_size] keyword[or] identifier[self] . identifier[_prefetch]
keyword[if] identifier[max_batch_size] > identifier[self] . identifier[_prefetch] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] . identifier[format] ( identifier[max_batch_size] , identifier[self] . identifier[_prefetch] ))
identifier[timeout] = identifier[self] . identifier[_counter] . identifier[get_current_ms] ()+ identifier[int] ( identifier[timeout] ) keyword[if] identifier[timeout] keyword[else] literal[int]
identifier[expired] = keyword[False]
identifier[self] . identifier[_received_messages] = identifier[self] . identifier[_received_messages] keyword[or] identifier[queue] . identifier[Queue] ()
keyword[await] identifier[self] . identifier[open_async] ()
identifier[receiving] = keyword[True]
identifier[batch] =[]
keyword[while] keyword[not] identifier[self] . identifier[_received_messages] . identifier[empty] () keyword[and] identifier[len] ( identifier[batch] )< identifier[max_batch_size] :
identifier[batch] . identifier[append] ( identifier[self] . identifier[_received_messages] . identifier[get] ())
identifier[self] . identifier[_received_messages] . identifier[task_done] ()
keyword[if] identifier[len] ( identifier[batch] )>= identifier[max_batch_size] :
keyword[return] identifier[batch]
keyword[while] identifier[receiving] keyword[and] keyword[not] identifier[expired] keyword[and] identifier[len] ( identifier[batch] )< identifier[max_batch_size] :
keyword[while] identifier[receiving] keyword[and] identifier[self] . identifier[_received_messages] . identifier[qsize] ()< identifier[max_batch_size] :
keyword[if] identifier[timeout] keyword[and] identifier[self] . identifier[_counter] . identifier[get_current_ms] ()> identifier[timeout] :
identifier[expired] = keyword[True]
keyword[break]
identifier[before] = identifier[self] . identifier[_received_messages] . identifier[qsize] ()
identifier[receiving] = keyword[await] identifier[self] . identifier[do_work_async] ()
identifier[received] = identifier[self] . identifier[_received_messages] . identifier[qsize] ()- identifier[before]
keyword[if] identifier[self] . identifier[_received_messages] . identifier[qsize] ()> literal[int] keyword[and] identifier[received] == literal[int] :
identifier[expired] = keyword[True]
keyword[break]
keyword[while] keyword[not] identifier[self] . identifier[_received_messages] . identifier[empty] () keyword[and] identifier[len] ( identifier[batch] )< identifier[max_batch_size] :
identifier[batch] . identifier[append] ( identifier[self] . identifier[_received_messages] . identifier[get] ())
identifier[self] . identifier[_received_messages] . identifier[task_done] ()
keyword[return] identifier[batch]
|
async def receive_message_batch_async(self, max_batch_size=None, on_message_received=None, timeout=0):
"""Receive a batch of messages asynchronously. This method will return as soon as some
messages are available rather than waiting to achieve a specific batch size, and
therefore the number of messages returned per call will vary up to the maximum allowed.
If the receive client is configured with `auto_complete=True` then the messages received
in the batch returned by this function will already be settled. Alternatively, if
`auto_complete=False`, then each message will need to be explicitly settled before
it expires and is released.
:param max_batch_size: The maximum number of messages that can be returned in
one call. This value cannot be larger than the prefetch value, and if not specified,
the prefetch value will be used.
:type max_batch_size: int
:param on_message_received: A callback to process messages as they arrive from the
service. It takes a single argument, a ~uamqp.message.Message object.
:type on_message_received: callable[~uamqp.message.Message]
:param timeout: I timeout in milliseconds for which to wait to receive any messages.
If no messages are received in this time, an empty list will be returned. If set to
0, the client will continue to wait until at least one message is received. The
default is 0.
:type timeout: int
"""
self._message_received_callback = on_message_received
max_batch_size = max_batch_size or self._prefetch
if max_batch_size > self._prefetch:
raise ValueError('Maximum batch size {} cannot be greater than the connection link credit: {}'.format(max_batch_size, self._prefetch)) # depends on [control=['if'], data=['max_batch_size']]
timeout = self._counter.get_current_ms() + int(timeout) if timeout else 0
expired = False
self._received_messages = self._received_messages or queue.Queue()
await self.open_async()
receiving = True
batch = []
while not self._received_messages.empty() and len(batch) < max_batch_size:
batch.append(self._received_messages.get())
self._received_messages.task_done() # depends on [control=['while'], data=[]]
if len(batch) >= max_batch_size:
return batch # depends on [control=['if'], data=[]]
while receiving and (not expired) and (len(batch) < max_batch_size):
while receiving and self._received_messages.qsize() < max_batch_size:
if timeout and self._counter.get_current_ms() > timeout:
expired = True
break # depends on [control=['if'], data=[]]
before = self._received_messages.qsize()
receiving = await self.do_work_async()
received = self._received_messages.qsize() - before
if self._received_messages.qsize() > 0 and received == 0:
# No new messages arrived, but we have some - so return what we have.
expired = True
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
while not self._received_messages.empty() and len(batch) < max_batch_size:
batch.append(self._received_messages.get())
self._received_messages.task_done() # depends on [control=['while'], data=[]] # depends on [control=['while'], data=[]]
return batch
|
def validate_group(images):
"""Validates that the combination of folder and name for all images in
a group is unique. Raises a ValueError exception if uniqueness
constraint is violated.
Parameters
----------
images : List(GroupImage)
List of images in group
"""
image_ids = set()
for image in images:
key = image.folder + image.name
if key in image_ids:
raise ValueError('Duplicate images in group: ' + key)
else:
image_ids.add(key)
|
def function[validate_group, parameter[images]]:
constant[Validates that the combination of folder and name for all images in
a group is unique. Raises a ValueError exception if uniqueness
constraint is violated.
Parameters
----------
images : List(GroupImage)
List of images in group
]
variable[image_ids] assign[=] call[name[set], parameter[]]
for taget[name[image]] in starred[name[images]] begin[:]
variable[key] assign[=] binary_operation[name[image].folder + name[image].name]
if compare[name[key] in name[image_ids]] begin[:]
<ast.Raise object at 0x7da1b1348340>
|
keyword[def] identifier[validate_group] ( identifier[images] ):
literal[string]
identifier[image_ids] = identifier[set] ()
keyword[for] identifier[image] keyword[in] identifier[images] :
identifier[key] = identifier[image] . identifier[folder] + identifier[image] . identifier[name]
keyword[if] identifier[key] keyword[in] identifier[image_ids] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[key] )
keyword[else] :
identifier[image_ids] . identifier[add] ( identifier[key] )
|
def validate_group(images):
"""Validates that the combination of folder and name for all images in
a group is unique. Raises a ValueError exception if uniqueness
constraint is violated.
Parameters
----------
images : List(GroupImage)
List of images in group
"""
image_ids = set()
for image in images:
key = image.folder + image.name
if key in image_ids:
raise ValueError('Duplicate images in group: ' + key) # depends on [control=['if'], data=['key']]
else:
image_ids.add(key) # depends on [control=['for'], data=['image']]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.