code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request(
"{0}_{1}".format(request_id, user_id), method="DELETE"
) | def function[delete_request, parameter[self, user_id, request_id]]:
constant[Deletes the Request with the given ID for the given user.]
return[call[name[self].request, parameter[call[constant[{0}_{1}].format, parameter[name[request_id], name[user_id]]]]]] | keyword[def] identifier[delete_request] ( identifier[self] , identifier[user_id] , identifier[request_id] ):
literal[string]
keyword[return] identifier[self] . identifier[request] (
literal[string] . identifier[format] ( identifier[request_id] , identifier[user_id] ), identifier[method] = literal[string]
) | def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request('{0}_{1}'.format(request_id, user_id), method='DELETE') |
def _arith_method_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
str_rep = _get_opstr(op, cls)
op_name = _get_op_name(op, special)
eval_kwargs = _gen_eval_kwargs(op_name)
fill_zeros = _gen_fill_zeros(op_name)
construct_result = (_construct_divmod_result
if op in [divmod, rdivmod] else _construct_result)
def na_op(x, y):
import pandas.core.computation.expressions as expressions
try:
result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs)
except TypeError:
result = masked_arith_op(x, y, op)
result = missing.fill_zeros(result, x, y, op_name, fill_zeros)
return result
def safe_na_op(lvalues, rvalues):
"""
return the result of evaluating na_op on the passed in values
try coercion to object type if the native types are not compatible
Parameters
----------
lvalues : array-like
rvalues : array-like
Raises
------
TypeError: invalid operation
"""
try:
with np.errstate(all='ignore'):
return na_op(lvalues, rvalues)
except Exception:
if is_object_dtype(lvalues):
return libalgos.arrmap_object(lvalues,
lambda x: op(x, rvalues))
raise
def wrapper(left, right):
if isinstance(right, ABCDataFrame):
return NotImplemented
left, right = _align_method_SERIES(left, right)
res_name = get_op_result_name(left, right)
right = maybe_upcast_for_op(right)
if is_categorical_dtype(left):
raise TypeError("{typ} cannot perform the operation "
"{op}".format(typ=type(left).__name__, op=str_rep))
elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left):
# Give dispatch_to_index_op a chance for tests like
# test_dt64_series_add_intlike, which the index dispatching handles
# specifically.
result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex)
return construct_result(left, result,
index=left.index, name=res_name,
dtype=result.dtype)
elif (is_extension_array_dtype(left) or
(is_extension_array_dtype(right) and not is_scalar(right))):
# GH#22378 disallow scalar to exclude e.g. "category", "Int64"
return dispatch_to_extension_op(op, left, right)
elif is_timedelta64_dtype(left):
result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex)
return construct_result(left, result,
index=left.index, name=res_name)
elif is_timedelta64_dtype(right):
# We should only get here with non-scalar or timedelta64('NaT')
# values for right
# Note: we cannot use dispatch_to_index_op because
# that may incorrectly raise TypeError when we
# should get NullFrequencyError
result = op(pd.Index(left), right)
return construct_result(left, result,
index=left.index, name=res_name,
dtype=result.dtype)
lvalues = left.values
rvalues = right
if isinstance(rvalues, ABCSeries):
rvalues = rvalues.values
result = safe_na_op(lvalues, rvalues)
return construct_result(left, result,
index=left.index, name=res_name, dtype=None)
wrapper.__name__ = op_name
return wrapper | def function[_arith_method_SERIES, parameter[cls, op, special]]:
constant[
Wrapper function for Series arithmetic operations, to avoid
code duplication.
]
variable[str_rep] assign[=] call[name[_get_opstr], parameter[name[op], name[cls]]]
variable[op_name] assign[=] call[name[_get_op_name], parameter[name[op], name[special]]]
variable[eval_kwargs] assign[=] call[name[_gen_eval_kwargs], parameter[name[op_name]]]
variable[fill_zeros] assign[=] call[name[_gen_fill_zeros], parameter[name[op_name]]]
variable[construct_result] assign[=] <ast.IfExp object at 0x7da1b2347040>
def function[na_op, parameter[x, y]]:
import module[pandas.core.computation.expressions] as alias[expressions]
<ast.Try object at 0x7da1b2345fc0>
variable[result] assign[=] call[name[missing].fill_zeros, parameter[name[result], name[x], name[y], name[op_name], name[fill_zeros]]]
return[name[result]]
def function[safe_na_op, parameter[lvalues, rvalues]]:
constant[
return the result of evaluating na_op on the passed in values
try coercion to object type if the native types are not compatible
Parameters
----------
lvalues : array-like
rvalues : array-like
Raises
------
TypeError: invalid operation
]
<ast.Try object at 0x7da204346320>
def function[wrapper, parameter[left, right]]:
if call[name[isinstance], parameter[name[right], name[ABCDataFrame]]] begin[:]
return[name[NotImplemented]]
<ast.Tuple object at 0x7da2043463e0> assign[=] call[name[_align_method_SERIES], parameter[name[left], name[right]]]
variable[res_name] assign[=] call[name[get_op_result_name], parameter[name[left], name[right]]]
variable[right] assign[=] call[name[maybe_upcast_for_op], parameter[name[right]]]
if call[name[is_categorical_dtype], parameter[name[left]]] begin[:]
<ast.Raise object at 0x7da1b2347e20>
variable[lvalues] assign[=] name[left].values
variable[rvalues] assign[=] name[right]
if call[name[isinstance], parameter[name[rvalues], name[ABCSeries]]] begin[:]
variable[rvalues] assign[=] name[rvalues].values
variable[result] assign[=] call[name[safe_na_op], parameter[name[lvalues], name[rvalues]]]
return[call[name[construct_result], parameter[name[left], name[result]]]]
name[wrapper].__name__ assign[=] name[op_name]
return[name[wrapper]] | keyword[def] identifier[_arith_method_SERIES] ( identifier[cls] , identifier[op] , identifier[special] ):
literal[string]
identifier[str_rep] = identifier[_get_opstr] ( identifier[op] , identifier[cls] )
identifier[op_name] = identifier[_get_op_name] ( identifier[op] , identifier[special] )
identifier[eval_kwargs] = identifier[_gen_eval_kwargs] ( identifier[op_name] )
identifier[fill_zeros] = identifier[_gen_fill_zeros] ( identifier[op_name] )
identifier[construct_result] =( identifier[_construct_divmod_result]
keyword[if] identifier[op] keyword[in] [ identifier[divmod] , identifier[rdivmod] ] keyword[else] identifier[_construct_result] )
keyword[def] identifier[na_op] ( identifier[x] , identifier[y] ):
keyword[import] identifier[pandas] . identifier[core] . identifier[computation] . identifier[expressions] keyword[as] identifier[expressions]
keyword[try] :
identifier[result] = identifier[expressions] . identifier[evaluate] ( identifier[op] , identifier[str_rep] , identifier[x] , identifier[y] ,** identifier[eval_kwargs] )
keyword[except] identifier[TypeError] :
identifier[result] = identifier[masked_arith_op] ( identifier[x] , identifier[y] , identifier[op] )
identifier[result] = identifier[missing] . identifier[fill_zeros] ( identifier[result] , identifier[x] , identifier[y] , identifier[op_name] , identifier[fill_zeros] )
keyword[return] identifier[result]
keyword[def] identifier[safe_na_op] ( identifier[lvalues] , identifier[rvalues] ):
literal[string]
keyword[try] :
keyword[with] identifier[np] . identifier[errstate] ( identifier[all] = literal[string] ):
keyword[return] identifier[na_op] ( identifier[lvalues] , identifier[rvalues] )
keyword[except] identifier[Exception] :
keyword[if] identifier[is_object_dtype] ( identifier[lvalues] ):
keyword[return] identifier[libalgos] . identifier[arrmap_object] ( identifier[lvalues] ,
keyword[lambda] identifier[x] : identifier[op] ( identifier[x] , identifier[rvalues] ))
keyword[raise]
keyword[def] identifier[wrapper] ( identifier[left] , identifier[right] ):
keyword[if] identifier[isinstance] ( identifier[right] , identifier[ABCDataFrame] ):
keyword[return] identifier[NotImplemented]
identifier[left] , identifier[right] = identifier[_align_method_SERIES] ( identifier[left] , identifier[right] )
identifier[res_name] = identifier[get_op_result_name] ( identifier[left] , identifier[right] )
identifier[right] = identifier[maybe_upcast_for_op] ( identifier[right] )
keyword[if] identifier[is_categorical_dtype] ( identifier[left] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] . identifier[format] ( identifier[typ] = identifier[type] ( identifier[left] ). identifier[__name__] , identifier[op] = identifier[str_rep] ))
keyword[elif] identifier[is_datetime64_dtype] ( identifier[left] ) keyword[or] identifier[is_datetime64tz_dtype] ( identifier[left] ):
identifier[result] = identifier[dispatch_to_index_op] ( identifier[op] , identifier[left] , identifier[right] , identifier[pd] . identifier[DatetimeIndex] )
keyword[return] identifier[construct_result] ( identifier[left] , identifier[result] ,
identifier[index] = identifier[left] . identifier[index] , identifier[name] = identifier[res_name] ,
identifier[dtype] = identifier[result] . identifier[dtype] )
keyword[elif] ( identifier[is_extension_array_dtype] ( identifier[left] ) keyword[or]
( identifier[is_extension_array_dtype] ( identifier[right] ) keyword[and] keyword[not] identifier[is_scalar] ( identifier[right] ))):
keyword[return] identifier[dispatch_to_extension_op] ( identifier[op] , identifier[left] , identifier[right] )
keyword[elif] identifier[is_timedelta64_dtype] ( identifier[left] ):
identifier[result] = identifier[dispatch_to_index_op] ( identifier[op] , identifier[left] , identifier[right] , identifier[pd] . identifier[TimedeltaIndex] )
keyword[return] identifier[construct_result] ( identifier[left] , identifier[result] ,
identifier[index] = identifier[left] . identifier[index] , identifier[name] = identifier[res_name] )
keyword[elif] identifier[is_timedelta64_dtype] ( identifier[right] ):
identifier[result] = identifier[op] ( identifier[pd] . identifier[Index] ( identifier[left] ), identifier[right] )
keyword[return] identifier[construct_result] ( identifier[left] , identifier[result] ,
identifier[index] = identifier[left] . identifier[index] , identifier[name] = identifier[res_name] ,
identifier[dtype] = identifier[result] . identifier[dtype] )
identifier[lvalues] = identifier[left] . identifier[values]
identifier[rvalues] = identifier[right]
keyword[if] identifier[isinstance] ( identifier[rvalues] , identifier[ABCSeries] ):
identifier[rvalues] = identifier[rvalues] . identifier[values]
identifier[result] = identifier[safe_na_op] ( identifier[lvalues] , identifier[rvalues] )
keyword[return] identifier[construct_result] ( identifier[left] , identifier[result] ,
identifier[index] = identifier[left] . identifier[index] , identifier[name] = identifier[res_name] , identifier[dtype] = keyword[None] )
identifier[wrapper] . identifier[__name__] = identifier[op_name]
keyword[return] identifier[wrapper] | def _arith_method_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
str_rep = _get_opstr(op, cls)
op_name = _get_op_name(op, special)
eval_kwargs = _gen_eval_kwargs(op_name)
fill_zeros = _gen_fill_zeros(op_name)
construct_result = _construct_divmod_result if op in [divmod, rdivmod] else _construct_result
def na_op(x, y):
import pandas.core.computation.expressions as expressions
try:
result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) # depends on [control=['try'], data=[]]
except TypeError:
result = masked_arith_op(x, y, op) # depends on [control=['except'], data=[]]
result = missing.fill_zeros(result, x, y, op_name, fill_zeros)
return result
def safe_na_op(lvalues, rvalues):
"""
return the result of evaluating na_op on the passed in values
try coercion to object type if the native types are not compatible
Parameters
----------
lvalues : array-like
rvalues : array-like
Raises
------
TypeError: invalid operation
"""
try:
with np.errstate(all='ignore'):
return na_op(lvalues, rvalues) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
if is_object_dtype(lvalues):
return libalgos.arrmap_object(lvalues, lambda x: op(x, rvalues)) # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=[]]
def wrapper(left, right):
if isinstance(right, ABCDataFrame):
return NotImplemented # depends on [control=['if'], data=[]]
(left, right) = _align_method_SERIES(left, right)
res_name = get_op_result_name(left, right)
right = maybe_upcast_for_op(right)
if is_categorical_dtype(left):
raise TypeError('{typ} cannot perform the operation {op}'.format(typ=type(left).__name__, op=str_rep)) # depends on [control=['if'], data=[]]
elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left):
# Give dispatch_to_index_op a chance for tests like
# test_dt64_series_add_intlike, which the index dispatching handles
# specifically.
result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex)
return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) # depends on [control=['if'], data=[]]
elif is_extension_array_dtype(left) or (is_extension_array_dtype(right) and (not is_scalar(right))):
# GH#22378 disallow scalar to exclude e.g. "category", "Int64"
return dispatch_to_extension_op(op, left, right) # depends on [control=['if'], data=[]]
elif is_timedelta64_dtype(left):
result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex)
return construct_result(left, result, index=left.index, name=res_name) # depends on [control=['if'], data=[]]
elif is_timedelta64_dtype(right):
# We should only get here with non-scalar or timedelta64('NaT')
# values for right
# Note: we cannot use dispatch_to_index_op because
# that may incorrectly raise TypeError when we
# should get NullFrequencyError
result = op(pd.Index(left), right)
return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) # depends on [control=['if'], data=[]]
lvalues = left.values
rvalues = right
if isinstance(rvalues, ABCSeries):
rvalues = rvalues.values # depends on [control=['if'], data=[]]
result = safe_na_op(lvalues, rvalues)
return construct_result(left, result, index=left.index, name=res_name, dtype=None)
wrapper.__name__ = op_name
return wrapper |
def to_networkx_graph(self, node_attribute_name='bias', edge_attribute_name='bias'):
"""Convert a binary quadratic model to NetworkX graph format.
Args:
node_attribute_name (hashable, optional, default='bias'):
Attribute name for linear biases.
edge_attribute_name (hashable, optional, default='bias'):
Attribute name for quadratic biases.
Returns:
:class:`networkx.Graph`: A NetworkX graph with biases stored as
node/edge attributes.
Examples:
This example converts a binary quadratic model to a NetworkX graph, using first
the default attribute name for quadratic biases then "weight".
>>> import networkx as nx
>>> bqm = dimod.BinaryQuadraticModel({0: 1, 1: -1, 2: .5},
... {(0, 1): .5, (1, 2): 1.5},
... 1.4,
... dimod.SPIN)
>>> BQM = bqm.to_networkx_graph()
>>> BQM[0][1]['bias']
0.5
>>> BQM.node[0]['bias']
1
>>> BQM_w = bqm.to_networkx_graph(edge_attribute_name='weight')
>>> BQM_w[0][1]['weight']
0.5
"""
import networkx as nx
BQM = nx.Graph()
# add the linear biases
BQM.add_nodes_from(((v, {node_attribute_name: bias, 'vartype': self.vartype})
for v, bias in iteritems(self.linear)))
# add the quadratic biases
BQM.add_edges_from(((u, v, {edge_attribute_name: bias}) for (u, v), bias in iteritems(self.quadratic)))
# set the offset and vartype properties for the graph
BQM.offset = self.offset
BQM.vartype = self.vartype
return BQM | def function[to_networkx_graph, parameter[self, node_attribute_name, edge_attribute_name]]:
constant[Convert a binary quadratic model to NetworkX graph format.
Args:
node_attribute_name (hashable, optional, default='bias'):
Attribute name for linear biases.
edge_attribute_name (hashable, optional, default='bias'):
Attribute name for quadratic biases.
Returns:
:class:`networkx.Graph`: A NetworkX graph with biases stored as
node/edge attributes.
Examples:
This example converts a binary quadratic model to a NetworkX graph, using first
the default attribute name for quadratic biases then "weight".
>>> import networkx as nx
>>> bqm = dimod.BinaryQuadraticModel({0: 1, 1: -1, 2: .5},
... {(0, 1): .5, (1, 2): 1.5},
... 1.4,
... dimod.SPIN)
>>> BQM = bqm.to_networkx_graph()
>>> BQM[0][1]['bias']
0.5
>>> BQM.node[0]['bias']
1
>>> BQM_w = bqm.to_networkx_graph(edge_attribute_name='weight')
>>> BQM_w[0][1]['weight']
0.5
]
import module[networkx] as alias[nx]
variable[BQM] assign[=] call[name[nx].Graph, parameter[]]
call[name[BQM].add_nodes_from, parameter[<ast.GeneratorExp object at 0x7da1b07f7df0>]]
call[name[BQM].add_edges_from, parameter[<ast.GeneratorExp object at 0x7da1b07f6920>]]
name[BQM].offset assign[=] name[self].offset
name[BQM].vartype assign[=] name[self].vartype
return[name[BQM]] | keyword[def] identifier[to_networkx_graph] ( identifier[self] , identifier[node_attribute_name] = literal[string] , identifier[edge_attribute_name] = literal[string] ):
literal[string]
keyword[import] identifier[networkx] keyword[as] identifier[nx]
identifier[BQM] = identifier[nx] . identifier[Graph] ()
identifier[BQM] . identifier[add_nodes_from] ((( identifier[v] ,{ identifier[node_attribute_name] : identifier[bias] , literal[string] : identifier[self] . identifier[vartype] })
keyword[for] identifier[v] , identifier[bias] keyword[in] identifier[iteritems] ( identifier[self] . identifier[linear] )))
identifier[BQM] . identifier[add_edges_from] ((( identifier[u] , identifier[v] ,{ identifier[edge_attribute_name] : identifier[bias] }) keyword[for] ( identifier[u] , identifier[v] ), identifier[bias] keyword[in] identifier[iteritems] ( identifier[self] . identifier[quadratic] )))
identifier[BQM] . identifier[offset] = identifier[self] . identifier[offset]
identifier[BQM] . identifier[vartype] = identifier[self] . identifier[vartype]
keyword[return] identifier[BQM] | def to_networkx_graph(self, node_attribute_name='bias', edge_attribute_name='bias'):
"""Convert a binary quadratic model to NetworkX graph format.
Args:
node_attribute_name (hashable, optional, default='bias'):
Attribute name for linear biases.
edge_attribute_name (hashable, optional, default='bias'):
Attribute name for quadratic biases.
Returns:
:class:`networkx.Graph`: A NetworkX graph with biases stored as
node/edge attributes.
Examples:
This example converts a binary quadratic model to a NetworkX graph, using first
the default attribute name for quadratic biases then "weight".
>>> import networkx as nx
>>> bqm = dimod.BinaryQuadraticModel({0: 1, 1: -1, 2: .5},
... {(0, 1): .5, (1, 2): 1.5},
... 1.4,
... dimod.SPIN)
>>> BQM = bqm.to_networkx_graph()
>>> BQM[0][1]['bias']
0.5
>>> BQM.node[0]['bias']
1
>>> BQM_w = bqm.to_networkx_graph(edge_attribute_name='weight')
>>> BQM_w[0][1]['weight']
0.5
"""
import networkx as nx
BQM = nx.Graph()
# add the linear biases
BQM.add_nodes_from(((v, {node_attribute_name: bias, 'vartype': self.vartype}) for (v, bias) in iteritems(self.linear)))
# add the quadratic biases
BQM.add_edges_from(((u, v, {edge_attribute_name: bias}) for ((u, v), bias) in iteritems(self.quadratic)))
# set the offset and vartype properties for the graph
BQM.offset = self.offset
BQM.vartype = self.vartype
return BQM |
def append_records(self, dataset_key, stream_id, body):
"""Append records to a stream.
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param stream_id: Stream unique identifier.
:type stream_id: str
:param body: Object body
:type body: obj
:raises RestApiException: If a server error occurs
Examples
--------
>>> import datadotworld as dw
>>> api_client = dw.api_client()
>>> api_client.append_records('username/test-dataset','streamId',
... {'content':'content'}) # doctest: +SKIP
"""
owner_id, dataset_id = parse_dataset_key(dataset_key)
try:
return self._streams_api.append_records(owner_id, dataset_id,
stream_id, body)
except _swagger.rest.ApiException as e:
raise RestApiError(cause=e) | def function[append_records, parameter[self, dataset_key, stream_id, body]]:
constant[Append records to a stream.
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param stream_id: Stream unique identifier.
:type stream_id: str
:param body: Object body
:type body: obj
:raises RestApiException: If a server error occurs
Examples
--------
>>> import datadotworld as dw
>>> api_client = dw.api_client()
>>> api_client.append_records('username/test-dataset','streamId',
... {'content':'content'}) # doctest: +SKIP
]
<ast.Tuple object at 0x7da2049608e0> assign[=] call[name[parse_dataset_key], parameter[name[dataset_key]]]
<ast.Try object at 0x7da204960c70> | keyword[def] identifier[append_records] ( identifier[self] , identifier[dataset_key] , identifier[stream_id] , identifier[body] ):
literal[string]
identifier[owner_id] , identifier[dataset_id] = identifier[parse_dataset_key] ( identifier[dataset_key] )
keyword[try] :
keyword[return] identifier[self] . identifier[_streams_api] . identifier[append_records] ( identifier[owner_id] , identifier[dataset_id] ,
identifier[stream_id] , identifier[body] )
keyword[except] identifier[_swagger] . identifier[rest] . identifier[ApiException] keyword[as] identifier[e] :
keyword[raise] identifier[RestApiError] ( identifier[cause] = identifier[e] ) | def append_records(self, dataset_key, stream_id, body):
"""Append records to a stream.
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param stream_id: Stream unique identifier.
:type stream_id: str
:param body: Object body
:type body: obj
:raises RestApiException: If a server error occurs
Examples
--------
>>> import datadotworld as dw
>>> api_client = dw.api_client()
>>> api_client.append_records('username/test-dataset','streamId',
... {'content':'content'}) # doctest: +SKIP
"""
(owner_id, dataset_id) = parse_dataset_key(dataset_key)
try:
return self._streams_api.append_records(owner_id, dataset_id, stream_id, body) # depends on [control=['try'], data=[]]
except _swagger.rest.ApiException as e:
raise RestApiError(cause=e) # depends on [control=['except'], data=['e']] |
def serial_number(self, serial_number):
"""
Sets the serial_number of this DeviceDataPostRequest.
The serial number of the device.
:param serial_number: The serial_number of this DeviceDataPostRequest.
:type: str
"""
if serial_number is not None and len(serial_number) > 64:
raise ValueError("Invalid value for `serial_number`, length must be less than or equal to `64`")
self._serial_number = serial_number | def function[serial_number, parameter[self, serial_number]]:
constant[
Sets the serial_number of this DeviceDataPostRequest.
The serial number of the device.
:param serial_number: The serial_number of this DeviceDataPostRequest.
:type: str
]
if <ast.BoolOp object at 0x7da1b04ef730> begin[:]
<ast.Raise object at 0x7da1b04ec550>
name[self]._serial_number assign[=] name[serial_number] | keyword[def] identifier[serial_number] ( identifier[self] , identifier[serial_number] ):
literal[string]
keyword[if] identifier[serial_number] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[serial_number] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_serial_number] = identifier[serial_number] | def serial_number(self, serial_number):
"""
Sets the serial_number of this DeviceDataPostRequest.
The serial number of the device.
:param serial_number: The serial_number of this DeviceDataPostRequest.
:type: str
"""
if serial_number is not None and len(serial_number) > 64:
raise ValueError('Invalid value for `serial_number`, length must be less than or equal to `64`') # depends on [control=['if'], data=[]]
self._serial_number = serial_number |
def _wait(self):
"""Block until an upload finishes
Raise an exception if that tar volume failed with an error.
"""
val = self.wait_change.get()
if isinstance(val, Exception):
# Don't other uncharging, because execution is going to stop
raise val
else:
# Uncharge for resources.
self.member_burden -= len(val)
self.concurrency_burden -= 1 | def function[_wait, parameter[self]]:
constant[Block until an upload finishes
Raise an exception if that tar volume failed with an error.
]
variable[val] assign[=] call[name[self].wait_change.get, parameter[]]
if call[name[isinstance], parameter[name[val], name[Exception]]] begin[:]
<ast.Raise object at 0x7da20e9b1420> | keyword[def] identifier[_wait] ( identifier[self] ):
literal[string]
identifier[val] = identifier[self] . identifier[wait_change] . identifier[get] ()
keyword[if] identifier[isinstance] ( identifier[val] , identifier[Exception] ):
keyword[raise] identifier[val]
keyword[else] :
identifier[self] . identifier[member_burden] -= identifier[len] ( identifier[val] )
identifier[self] . identifier[concurrency_burden] -= literal[int] | def _wait(self):
"""Block until an upload finishes
Raise an exception if that tar volume failed with an error.
"""
val = self.wait_change.get()
if isinstance(val, Exception):
# Don't other uncharging, because execution is going to stop
raise val # depends on [control=['if'], data=[]]
else:
# Uncharge for resources.
self.member_burden -= len(val)
self.concurrency_burden -= 1 |
def create_datapipeline(self):
"""Creates data pipeline and adds definition"""
utils.banner("Creating Data Pipeline")
dpobj = datapipeline.AWSDataPipeline(app=self.app, env=self.env, region=self.region, prop_path=self.json_path)
dpobj.create_datapipeline()
dpobj.set_pipeline_definition()
if self.configs[self.env].get('datapipeline').get('activate_on_deploy'):
dpobj.activate_pipeline() | def function[create_datapipeline, parameter[self]]:
constant[Creates data pipeline and adds definition]
call[name[utils].banner, parameter[constant[Creating Data Pipeline]]]
variable[dpobj] assign[=] call[name[datapipeline].AWSDataPipeline, parameter[]]
call[name[dpobj].create_datapipeline, parameter[]]
call[name[dpobj].set_pipeline_definition, parameter[]]
if call[call[call[name[self].configs][name[self].env].get, parameter[constant[datapipeline]]].get, parameter[constant[activate_on_deploy]]] begin[:]
call[name[dpobj].activate_pipeline, parameter[]] | keyword[def] identifier[create_datapipeline] ( identifier[self] ):
literal[string]
identifier[utils] . identifier[banner] ( literal[string] )
identifier[dpobj] = identifier[datapipeline] . identifier[AWSDataPipeline] ( identifier[app] = identifier[self] . identifier[app] , identifier[env] = identifier[self] . identifier[env] , identifier[region] = identifier[self] . identifier[region] , identifier[prop_path] = identifier[self] . identifier[json_path] )
identifier[dpobj] . identifier[create_datapipeline] ()
identifier[dpobj] . identifier[set_pipeline_definition] ()
keyword[if] identifier[self] . identifier[configs] [ identifier[self] . identifier[env] ]. identifier[get] ( literal[string] ). identifier[get] ( literal[string] ):
identifier[dpobj] . identifier[activate_pipeline] () | def create_datapipeline(self):
"""Creates data pipeline and adds definition"""
utils.banner('Creating Data Pipeline')
dpobj = datapipeline.AWSDataPipeline(app=self.app, env=self.env, region=self.region, prop_path=self.json_path)
dpobj.create_datapipeline()
dpobj.set_pipeline_definition()
if self.configs[self.env].get('datapipeline').get('activate_on_deploy'):
dpobj.activate_pipeline() # depends on [control=['if'], data=[]] |
def plot_eqarea_mean(self, meanpars, fig):
"""
Given a dictionary of parameters from pmag.dofisher, pmag.dolnp, or
pmag.dobingham (meanpars) plots parameters to fig
"""
mpars_to_plot = []
if meanpars == {}:
return
if meanpars['calculation_type'] == 'Fisher by polarity':
for mode in list(meanpars.keys()):
if type(meanpars[mode]) == dict and meanpars[mode] != {}:
mpars_to_plot.append(meanpars[mode])
else:
mpars_to_plot.append(meanpars)
ymin, ymax = fig.get_ylim()
xmin, xmax = fig.get_xlim()
if 'color' in meanpars:
color = meanpars['color']
else:
color = 'black'
size, alpha = 30, 1.
# put on the mean direction
for mpars in mpars_to_plot:
XYM = pmag.dimap(float(mpars["dec"]), float(mpars["inc"]))
if float(mpars["inc"]) > 0:
FC = color
EC = 'black'
else:
FC = 'white'
EC = color
self.displayed_means.append(fig.scatter([XYM[0]], [
XYM[1]], marker='o', edgecolor=EC, facecolor=FC, s=size, lw=1, clip_on=False, alpha=alpha))
if "alpha95" in list(mpars.keys()):
# get the alpha95
Xcirc, Ycirc = [], []
Da95, Ia95 = pmag.circ(float(mpars["dec"]), float(
mpars["inc"]), float(mpars["alpha95"]))
for k in range(len(Da95)):
XY = pmag.dimap(Da95[k], Ia95[k])
Xcirc.append(XY[0])
Ycirc.append(XY[1])
self.displayed_means.append(
fig.plot(Xcirc, Ycirc, color, alpha=alpha))
if self.ie_open:
self.displayed_means.append(self.ie.scatter([XYM[0]], [
XYM[1]], marker='o', edgecolor=EC, facecolor=FC, s=size, lw=1, clip_on=False, alpha=alpha))
if "alpha95" in list(mpars.keys()):
self.displayed_means.append(
self.ie.plot(Xcirc, Ycirc, color, alpha=alpha))
self.ie.eqarea.set_xlim(xmin, xmax)
self.ie.eqarea.set_ylim(ymin, ymax)
fig.set_xlim(xmin, xmax)
fig.set_ylim(ymin, ymax) | def function[plot_eqarea_mean, parameter[self, meanpars, fig]]:
constant[
Given a dictionary of parameters from pmag.dofisher, pmag.dolnp, or
pmag.dobingham (meanpars) plots parameters to fig
]
variable[mpars_to_plot] assign[=] list[[]]
if compare[name[meanpars] equal[==] dictionary[[], []]] begin[:]
return[None]
if compare[call[name[meanpars]][constant[calculation_type]] equal[==] constant[Fisher by polarity]] begin[:]
for taget[name[mode]] in starred[call[name[list], parameter[call[name[meanpars].keys, parameter[]]]]] begin[:]
if <ast.BoolOp object at 0x7da20c6e7fd0> begin[:]
call[name[mpars_to_plot].append, parameter[call[name[meanpars]][name[mode]]]]
<ast.Tuple object at 0x7da20c6e7910> assign[=] call[name[fig].get_ylim, parameter[]]
<ast.Tuple object at 0x7da20c6e7940> assign[=] call[name[fig].get_xlim, parameter[]]
if compare[constant[color] in name[meanpars]] begin[:]
variable[color] assign[=] call[name[meanpars]][constant[color]]
<ast.Tuple object at 0x7da18f09d750> assign[=] tuple[[<ast.Constant object at 0x7da18f09e0b0>, <ast.Constant object at 0x7da18f09d120>]]
for taget[name[mpars]] in starred[name[mpars_to_plot]] begin[:]
variable[XYM] assign[=] call[name[pmag].dimap, parameter[call[name[float], parameter[call[name[mpars]][constant[dec]]]], call[name[float], parameter[call[name[mpars]][constant[inc]]]]]]
if compare[call[name[float], parameter[call[name[mpars]][constant[inc]]]] greater[>] constant[0]] begin[:]
variable[FC] assign[=] name[color]
variable[EC] assign[=] constant[black]
call[name[self].displayed_means.append, parameter[call[name[fig].scatter, parameter[list[[<ast.Subscript object at 0x7da18f09da20>]], list[[<ast.Subscript object at 0x7da18f09dea0>]]]]]]
if compare[constant[alpha95] in call[name[list], parameter[call[name[mpars].keys, parameter[]]]]] begin[:]
<ast.Tuple object at 0x7da18f09c7c0> assign[=] tuple[[<ast.List object at 0x7da18f09eb00>, <ast.List object at 0x7da18f09d660>]]
<ast.Tuple object at 0x7da18f09c070> assign[=] call[name[pmag].circ, parameter[call[name[float], parameter[call[name[mpars]][constant[dec]]]], call[name[float], parameter[call[name[mpars]][constant[inc]]]], call[name[float], parameter[call[name[mpars]][constant[alpha95]]]]]]
for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[Da95]]]]]] begin[:]
variable[XY] assign[=] call[name[pmag].dimap, parameter[call[name[Da95]][name[k]], call[name[Ia95]][name[k]]]]
call[name[Xcirc].append, parameter[call[name[XY]][constant[0]]]]
call[name[Ycirc].append, parameter[call[name[XY]][constant[1]]]]
call[name[self].displayed_means.append, parameter[call[name[fig].plot, parameter[name[Xcirc], name[Ycirc], name[color]]]]]
if name[self].ie_open begin[:]
call[name[self].displayed_means.append, parameter[call[name[self].ie.scatter, parameter[list[[<ast.Subscript object at 0x7da18f09f880>]], list[[<ast.Subscript object at 0x7da18f09efe0>]]]]]]
if compare[constant[alpha95] in call[name[list], parameter[call[name[mpars].keys, parameter[]]]]] begin[:]
call[name[self].displayed_means.append, parameter[call[name[self].ie.plot, parameter[name[Xcirc], name[Ycirc], name[color]]]]]
call[name[self].ie.eqarea.set_xlim, parameter[name[xmin], name[xmax]]]
call[name[self].ie.eqarea.set_ylim, parameter[name[ymin], name[ymax]]]
call[name[fig].set_xlim, parameter[name[xmin], name[xmax]]]
call[name[fig].set_ylim, parameter[name[ymin], name[ymax]]] | keyword[def] identifier[plot_eqarea_mean] ( identifier[self] , identifier[meanpars] , identifier[fig] ):
literal[string]
identifier[mpars_to_plot] =[]
keyword[if] identifier[meanpars] =={}:
keyword[return]
keyword[if] identifier[meanpars] [ literal[string] ]== literal[string] :
keyword[for] identifier[mode] keyword[in] identifier[list] ( identifier[meanpars] . identifier[keys] ()):
keyword[if] identifier[type] ( identifier[meanpars] [ identifier[mode] ])== identifier[dict] keyword[and] identifier[meanpars] [ identifier[mode] ]!={}:
identifier[mpars_to_plot] . identifier[append] ( identifier[meanpars] [ identifier[mode] ])
keyword[else] :
identifier[mpars_to_plot] . identifier[append] ( identifier[meanpars] )
identifier[ymin] , identifier[ymax] = identifier[fig] . identifier[get_ylim] ()
identifier[xmin] , identifier[xmax] = identifier[fig] . identifier[get_xlim] ()
keyword[if] literal[string] keyword[in] identifier[meanpars] :
identifier[color] = identifier[meanpars] [ literal[string] ]
keyword[else] :
identifier[color] = literal[string]
identifier[size] , identifier[alpha] = literal[int] , literal[int]
keyword[for] identifier[mpars] keyword[in] identifier[mpars_to_plot] :
identifier[XYM] = identifier[pmag] . identifier[dimap] ( identifier[float] ( identifier[mpars] [ literal[string] ]), identifier[float] ( identifier[mpars] [ literal[string] ]))
keyword[if] identifier[float] ( identifier[mpars] [ literal[string] ])> literal[int] :
identifier[FC] = identifier[color]
identifier[EC] = literal[string]
keyword[else] :
identifier[FC] = literal[string]
identifier[EC] = identifier[color]
identifier[self] . identifier[displayed_means] . identifier[append] ( identifier[fig] . identifier[scatter] ([ identifier[XYM] [ literal[int] ]],[
identifier[XYM] [ literal[int] ]], identifier[marker] = literal[string] , identifier[edgecolor] = identifier[EC] , identifier[facecolor] = identifier[FC] , identifier[s] = identifier[size] , identifier[lw] = literal[int] , identifier[clip_on] = keyword[False] , identifier[alpha] = identifier[alpha] ))
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[mpars] . identifier[keys] ()):
identifier[Xcirc] , identifier[Ycirc] =[],[]
identifier[Da95] , identifier[Ia95] = identifier[pmag] . identifier[circ] ( identifier[float] ( identifier[mpars] [ literal[string] ]), identifier[float] (
identifier[mpars] [ literal[string] ]), identifier[float] ( identifier[mpars] [ literal[string] ]))
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[Da95] )):
identifier[XY] = identifier[pmag] . identifier[dimap] ( identifier[Da95] [ identifier[k] ], identifier[Ia95] [ identifier[k] ])
identifier[Xcirc] . identifier[append] ( identifier[XY] [ literal[int] ])
identifier[Ycirc] . identifier[append] ( identifier[XY] [ literal[int] ])
identifier[self] . identifier[displayed_means] . identifier[append] (
identifier[fig] . identifier[plot] ( identifier[Xcirc] , identifier[Ycirc] , identifier[color] , identifier[alpha] = identifier[alpha] ))
keyword[if] identifier[self] . identifier[ie_open] :
identifier[self] . identifier[displayed_means] . identifier[append] ( identifier[self] . identifier[ie] . identifier[scatter] ([ identifier[XYM] [ literal[int] ]],[
identifier[XYM] [ literal[int] ]], identifier[marker] = literal[string] , identifier[edgecolor] = identifier[EC] , identifier[facecolor] = identifier[FC] , identifier[s] = identifier[size] , identifier[lw] = literal[int] , identifier[clip_on] = keyword[False] , identifier[alpha] = identifier[alpha] ))
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[mpars] . identifier[keys] ()):
identifier[self] . identifier[displayed_means] . identifier[append] (
identifier[self] . identifier[ie] . identifier[plot] ( identifier[Xcirc] , identifier[Ycirc] , identifier[color] , identifier[alpha] = identifier[alpha] ))
identifier[self] . identifier[ie] . identifier[eqarea] . identifier[set_xlim] ( identifier[xmin] , identifier[xmax] )
identifier[self] . identifier[ie] . identifier[eqarea] . identifier[set_ylim] ( identifier[ymin] , identifier[ymax] )
identifier[fig] . identifier[set_xlim] ( identifier[xmin] , identifier[xmax] )
identifier[fig] . identifier[set_ylim] ( identifier[ymin] , identifier[ymax] ) | def plot_eqarea_mean(self, meanpars, fig):
"""
Given a dictionary of parameters from pmag.dofisher, pmag.dolnp, or
pmag.dobingham (meanpars) plots parameters to fig
"""
mpars_to_plot = []
if meanpars == {}:
return # depends on [control=['if'], data=[]]
if meanpars['calculation_type'] == 'Fisher by polarity':
for mode in list(meanpars.keys()):
if type(meanpars[mode]) == dict and meanpars[mode] != {}:
mpars_to_plot.append(meanpars[mode]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mode']] # depends on [control=['if'], data=[]]
else:
mpars_to_plot.append(meanpars)
(ymin, ymax) = fig.get_ylim()
(xmin, xmax) = fig.get_xlim()
if 'color' in meanpars:
color = meanpars['color'] # depends on [control=['if'], data=['meanpars']]
else:
color = 'black'
(size, alpha) = (30, 1.0)
# put on the mean direction
for mpars in mpars_to_plot:
XYM = pmag.dimap(float(mpars['dec']), float(mpars['inc']))
if float(mpars['inc']) > 0:
FC = color
EC = 'black' # depends on [control=['if'], data=[]]
else:
FC = 'white'
EC = color
self.displayed_means.append(fig.scatter([XYM[0]], [XYM[1]], marker='o', edgecolor=EC, facecolor=FC, s=size, lw=1, clip_on=False, alpha=alpha))
if 'alpha95' in list(mpars.keys()):
# get the alpha95
(Xcirc, Ycirc) = ([], [])
(Da95, Ia95) = pmag.circ(float(mpars['dec']), float(mpars['inc']), float(mpars['alpha95']))
for k in range(len(Da95)):
XY = pmag.dimap(Da95[k], Ia95[k])
Xcirc.append(XY[0])
Ycirc.append(XY[1]) # depends on [control=['for'], data=['k']]
self.displayed_means.append(fig.plot(Xcirc, Ycirc, color, alpha=alpha)) # depends on [control=['if'], data=[]]
if self.ie_open:
self.displayed_means.append(self.ie.scatter([XYM[0]], [XYM[1]], marker='o', edgecolor=EC, facecolor=FC, s=size, lw=1, clip_on=False, alpha=alpha))
if 'alpha95' in list(mpars.keys()):
self.displayed_means.append(self.ie.plot(Xcirc, Ycirc, color, alpha=alpha)) # depends on [control=['if'], data=[]]
self.ie.eqarea.set_xlim(xmin, xmax)
self.ie.eqarea.set_ylim(ymin, ymax) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mpars']]
fig.set_xlim(xmin, xmax)
fig.set_ylim(ymin, ymax) |
def _check_reply_pending(self, option):
"""Test the status of requested Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
return self.telnet_opt_dict[option].reply_pending | def function[_check_reply_pending, parameter[self, option]]:
constant[Test the status of requested Telnet options.]
if <ast.UnaryOp object at 0x7da18fe90820> begin[:]
call[name[self].telnet_opt_dict][name[option]] assign[=] call[name[TelnetOption], parameter[]]
return[call[name[self].telnet_opt_dict][name[option]].reply_pending] | keyword[def] identifier[_check_reply_pending] ( identifier[self] , identifier[option] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[telnet_opt_dict] . identifier[has_key] ( identifier[option] ):
identifier[self] . identifier[telnet_opt_dict] [ identifier[option] ]= identifier[TelnetOption] ()
keyword[return] identifier[self] . identifier[telnet_opt_dict] [ identifier[option] ]. identifier[reply_pending] | def _check_reply_pending(self, option):
"""Test the status of requested Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption() # depends on [control=['if'], data=[]]
return self.telnet_opt_dict[option].reply_pending |
def set_decimal(self, pos, decimal):
"""Turn decimal point on or off at provided position. Position should be
a value 0 to 3 with 0 being the left most digit on the display. Decimal
should be True to turn on the decimal point and False to turn it off.
"""
if pos < 0 or pos > 3:
# Ignore out of bounds digits.
return
# Jump past the colon at position 2 by adding a conditional offset.
offset = 0 if pos < 2 else 1
# Calculate the correct position depending on orientation
if self.invert:
pos = 4-(pos+offset)
else:
pos = pos+offset
# Set bit 7 (decimal point) based on provided value.
if decimal:
self.buffer[pos*2] |= (1 << 7)
else:
self.buffer[pos*2] &= ~(1 << 7) | def function[set_decimal, parameter[self, pos, decimal]]:
constant[Turn decimal point on or off at provided position. Position should be
a value 0 to 3 with 0 being the left most digit on the display. Decimal
should be True to turn on the decimal point and False to turn it off.
]
if <ast.BoolOp object at 0x7da1b1042920> begin[:]
return[None]
variable[offset] assign[=] <ast.IfExp object at 0x7da1b1042230>
if name[self].invert begin[:]
variable[pos] assign[=] binary_operation[constant[4] - binary_operation[name[pos] + name[offset]]]
if name[decimal] begin[:]
<ast.AugAssign object at 0x7da1b10419c0> | keyword[def] identifier[set_decimal] ( identifier[self] , identifier[pos] , identifier[decimal] ):
literal[string]
keyword[if] identifier[pos] < literal[int] keyword[or] identifier[pos] > literal[int] :
keyword[return]
identifier[offset] = literal[int] keyword[if] identifier[pos] < literal[int] keyword[else] literal[int]
keyword[if] identifier[self] . identifier[invert] :
identifier[pos] = literal[int] -( identifier[pos] + identifier[offset] )
keyword[else] :
identifier[pos] = identifier[pos] + identifier[offset]
keyword[if] identifier[decimal] :
identifier[self] . identifier[buffer] [ identifier[pos] * literal[int] ]|=( literal[int] << literal[int] )
keyword[else] :
identifier[self] . identifier[buffer] [ identifier[pos] * literal[int] ]&=~( literal[int] << literal[int] ) | def set_decimal(self, pos, decimal):
"""Turn decimal point on or off at provided position. Position should be
a value 0 to 3 with 0 being the left most digit on the display. Decimal
should be True to turn on the decimal point and False to turn it off.
"""
if pos < 0 or pos > 3:
# Ignore out of bounds digits.
return # depends on [control=['if'], data=[]]
# Jump past the colon at position 2 by adding a conditional offset.
offset = 0 if pos < 2 else 1
# Calculate the correct position depending on orientation
if self.invert:
pos = 4 - (pos + offset) # depends on [control=['if'], data=[]]
else:
pos = pos + offset
# Set bit 7 (decimal point) based on provided value.
if decimal:
self.buffer[pos * 2] |= 1 << 7 # depends on [control=['if'], data=[]]
else:
self.buffer[pos * 2] &= ~(1 << 7) |
def parse_device_list(device_list_str, key):
"""Parses a byte string representing a list of devices.
The string is generated by calling either adb or fastboot. The tokens in
each string is tab-separated.
Args:
device_list_str: Output of adb or fastboot.
key: The token that signifies a device in device_list_str.
Returns:
A list of android device serial numbers.
"""
clean_lines = new_str(device_list_str, 'utf-8').strip().split('\n')
results = []
for line in clean_lines:
tokens = line.strip().split('\t')
if len(tokens) == 2 and tokens[1] == key:
results.append(tokens[0])
return results | def function[parse_device_list, parameter[device_list_str, key]]:
constant[Parses a byte string representing a list of devices.
The string is generated by calling either adb or fastboot. The tokens in
each string is tab-separated.
Args:
device_list_str: Output of adb or fastboot.
key: The token that signifies a device in device_list_str.
Returns:
A list of android device serial numbers.
]
variable[clean_lines] assign[=] call[call[call[name[new_str], parameter[name[device_list_str], constant[utf-8]]].strip, parameter[]].split, parameter[constant[
]]]
variable[results] assign[=] list[[]]
for taget[name[line]] in starred[name[clean_lines]] begin[:]
variable[tokens] assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[ ]]]
if <ast.BoolOp object at 0x7da1b0633460> begin[:]
call[name[results].append, parameter[call[name[tokens]][constant[0]]]]
return[name[results]] | keyword[def] identifier[parse_device_list] ( identifier[device_list_str] , identifier[key] ):
literal[string]
identifier[clean_lines] = identifier[new_str] ( identifier[device_list_str] , literal[string] ). identifier[strip] (). identifier[split] ( literal[string] )
identifier[results] =[]
keyword[for] identifier[line] keyword[in] identifier[clean_lines] :
identifier[tokens] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[tokens] )== literal[int] keyword[and] identifier[tokens] [ literal[int] ]== identifier[key] :
identifier[results] . identifier[append] ( identifier[tokens] [ literal[int] ])
keyword[return] identifier[results] | def parse_device_list(device_list_str, key):
"""Parses a byte string representing a list of devices.
The string is generated by calling either adb or fastboot. The tokens in
each string is tab-separated.
Args:
device_list_str: Output of adb or fastboot.
key: The token that signifies a device in device_list_str.
Returns:
A list of android device serial numbers.
"""
clean_lines = new_str(device_list_str, 'utf-8').strip().split('\n')
results = []
for line in clean_lines:
tokens = line.strip().split('\t')
if len(tokens) == 2 and tokens[1] == key:
results.append(tokens[0]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return results |
def update(self, auth, resource, desc={}, defer=False):
""" Updates the description of the resource.
Args:
auth: <cik> for authentication
resource: Resource to be updated
desc: A Dictionary containing the update for the resource.
"""
return self._call('update', auth, [resource, desc], defer) | def function[update, parameter[self, auth, resource, desc, defer]]:
constant[ Updates the description of the resource.
Args:
auth: <cik> for authentication
resource: Resource to be updated
desc: A Dictionary containing the update for the resource.
]
return[call[name[self]._call, parameter[constant[update], name[auth], list[[<ast.Name object at 0x7da18f00ff10>, <ast.Name object at 0x7da18f00e9e0>]], name[defer]]]] | keyword[def] identifier[update] ( identifier[self] , identifier[auth] , identifier[resource] , identifier[desc] ={}, identifier[defer] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[_call] ( literal[string] , identifier[auth] ,[ identifier[resource] , identifier[desc] ], identifier[defer] ) | def update(self, auth, resource, desc={}, defer=False):
""" Updates the description of the resource.
Args:
auth: <cik> for authentication
resource: Resource to be updated
desc: A Dictionary containing the update for the resource.
"""
return self._call('update', auth, [resource, desc], defer) |
def get_admin_url(self, query_params=None, use_path=False):
"""Returns the URL for viewing a FileNode in the admin."""
if not query_params:
query_params = {}
url = ''
if self.is_top_node():
url = reverse('admin:media_tree_filenode_changelist');
elif use_path and (self.is_folder() or self.pk):
url = reverse('admin:media_tree_filenode_open_path', args=(self.get_path(),));
elif self.is_folder():
url = reverse('admin:media_tree_filenode_changelist');
query_params['folder_id'] = self.pk
elif self.pk:
return reverse('admin:media_tree_filenode_change', args=(self.pk,));
if len(query_params):
params = ['%s=%s' % (key, value) for key, value in query_params.items()]
url = '%s?%s' % (url, "&".join(params))
return url | def function[get_admin_url, parameter[self, query_params, use_path]]:
constant[Returns the URL for viewing a FileNode in the admin.]
if <ast.UnaryOp object at 0x7da18c4ce860> begin[:]
variable[query_params] assign[=] dictionary[[], []]
variable[url] assign[=] constant[]
if call[name[self].is_top_node, parameter[]] begin[:]
variable[url] assign[=] call[name[reverse], parameter[constant[admin:media_tree_filenode_changelist]]]
if call[name[len], parameter[name[query_params]]] begin[:]
variable[params] assign[=] <ast.ListComp object at 0x7da207f01060>
variable[url] assign[=] binary_operation[constant[%s?%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20cabe0b0>, <ast.Call object at 0x7da20cabc2e0>]]]
return[name[url]] | keyword[def] identifier[get_admin_url] ( identifier[self] , identifier[query_params] = keyword[None] , identifier[use_path] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[query_params] :
identifier[query_params] ={}
identifier[url] = literal[string]
keyword[if] identifier[self] . identifier[is_top_node] ():
identifier[url] = identifier[reverse] ( literal[string] );
keyword[elif] identifier[use_path] keyword[and] ( identifier[self] . identifier[is_folder] () keyword[or] identifier[self] . identifier[pk] ):
identifier[url] = identifier[reverse] ( literal[string] , identifier[args] =( identifier[self] . identifier[get_path] (),));
keyword[elif] identifier[self] . identifier[is_folder] ():
identifier[url] = identifier[reverse] ( literal[string] );
identifier[query_params] [ literal[string] ]= identifier[self] . identifier[pk]
keyword[elif] identifier[self] . identifier[pk] :
keyword[return] identifier[reverse] ( literal[string] , identifier[args] =( identifier[self] . identifier[pk] ,));
keyword[if] identifier[len] ( identifier[query_params] ):
identifier[params] =[ literal[string] %( identifier[key] , identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[query_params] . identifier[items] ()]
identifier[url] = literal[string] %( identifier[url] , literal[string] . identifier[join] ( identifier[params] ))
keyword[return] identifier[url] | def get_admin_url(self, query_params=None, use_path=False):
"""Returns the URL for viewing a FileNode in the admin."""
if not query_params:
query_params = {} # depends on [control=['if'], data=[]]
url = ''
if self.is_top_node():
url = reverse('admin:media_tree_filenode_changelist') # depends on [control=['if'], data=[]]
elif use_path and (self.is_folder() or self.pk):
url = reverse('admin:media_tree_filenode_open_path', args=(self.get_path(),)) # depends on [control=['if'], data=[]]
elif self.is_folder():
url = reverse('admin:media_tree_filenode_changelist')
query_params['folder_id'] = self.pk # depends on [control=['if'], data=[]]
elif self.pk:
return reverse('admin:media_tree_filenode_change', args=(self.pk,)) # depends on [control=['if'], data=[]]
if len(query_params):
params = ['%s=%s' % (key, value) for (key, value) in query_params.items()]
url = '%s?%s' % (url, '&'.join(params)) # depends on [control=['if'], data=[]]
return url |
def get_fields(self):
"""
get property instance according self.columns
"""
columns = self.columns
model = self.model
fields = []
for col in columns:
if isinstance(col, (str, unicode)):
v = col.split('.')
if len(v) > 1:
field = get_model(v[0], engine_name=self.model.get_engine_name(),
signal=False).properties(v[1])
else:
field = model.properties[col]
elif isinstance(col, Column):
field = get_model(col.table.name, engine_name=self.model.get_engine_name(),
signal=False).properties[col.name]
else:
field = col
fields.append(field)
return fields | def function[get_fields, parameter[self]]:
constant[
get property instance according self.columns
]
variable[columns] assign[=] name[self].columns
variable[model] assign[=] name[self].model
variable[fields] assign[=] list[[]]
for taget[name[col]] in starred[name[columns]] begin[:]
if call[name[isinstance], parameter[name[col], tuple[[<ast.Name object at 0x7da2044c20b0>, <ast.Name object at 0x7da2044c1870>]]]] begin[:]
variable[v] assign[=] call[name[col].split, parameter[constant[.]]]
if compare[call[name[len], parameter[name[v]]] greater[>] constant[1]] begin[:]
variable[field] assign[=] call[call[name[get_model], parameter[call[name[v]][constant[0]]]].properties, parameter[call[name[v]][constant[1]]]]
call[name[fields].append, parameter[name[field]]]
return[name[fields]] | keyword[def] identifier[get_fields] ( identifier[self] ):
literal[string]
identifier[columns] = identifier[self] . identifier[columns]
identifier[model] = identifier[self] . identifier[model]
identifier[fields] =[]
keyword[for] identifier[col] keyword[in] identifier[columns] :
keyword[if] identifier[isinstance] ( identifier[col] ,( identifier[str] , identifier[unicode] )):
identifier[v] = identifier[col] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[v] )> literal[int] :
identifier[field] = identifier[get_model] ( identifier[v] [ literal[int] ], identifier[engine_name] = identifier[self] . identifier[model] . identifier[get_engine_name] (),
identifier[signal] = keyword[False] ). identifier[properties] ( identifier[v] [ literal[int] ])
keyword[else] :
identifier[field] = identifier[model] . identifier[properties] [ identifier[col] ]
keyword[elif] identifier[isinstance] ( identifier[col] , identifier[Column] ):
identifier[field] = identifier[get_model] ( identifier[col] . identifier[table] . identifier[name] , identifier[engine_name] = identifier[self] . identifier[model] . identifier[get_engine_name] (),
identifier[signal] = keyword[False] ). identifier[properties] [ identifier[col] . identifier[name] ]
keyword[else] :
identifier[field] = identifier[col]
identifier[fields] . identifier[append] ( identifier[field] )
keyword[return] identifier[fields] | def get_fields(self):
"""
get property instance according self.columns
"""
columns = self.columns
model = self.model
fields = []
for col in columns:
if isinstance(col, (str, unicode)):
v = col.split('.')
if len(v) > 1:
field = get_model(v[0], engine_name=self.model.get_engine_name(), signal=False).properties(v[1]) # depends on [control=['if'], data=[]]
else:
field = model.properties[col] # depends on [control=['if'], data=[]]
elif isinstance(col, Column):
field = get_model(col.table.name, engine_name=self.model.get_engine_name(), signal=False).properties[col.name] # depends on [control=['if'], data=[]]
else:
field = col
fields.append(field) # depends on [control=['for'], data=['col']]
return fields |
def get_value_from_cfg(cfg_file):
''' initial the configuration with file that you specify
Sample usage:
config = get_value_from_cfg()
return:
return a dict -->config[section][option] such as config["twsm"]["dut_ip"]
'''
if not os.path.isfile(cfg_file):
return
cfg = {}
config = ConfigParser.RawConfigParser()
try:
config.read(cfg_file)
except Exception as e:
# raise Exception("\n\tcommon exception 1.2: Not a well format configuration file. error: '%s'" %(e))
return
for section in config.sections():
cfg[section] = {}
for option in config.options(section):
cfg[section][option]=config.get(section,option)
return cfg | def function[get_value_from_cfg, parameter[cfg_file]]:
constant[ initial the configuration with file that you specify
Sample usage:
config = get_value_from_cfg()
return:
return a dict -->config[section][option] such as config["twsm"]["dut_ip"]
]
if <ast.UnaryOp object at 0x7da1b1044ac0> begin[:]
return[None]
variable[cfg] assign[=] dictionary[[], []]
variable[config] assign[=] call[name[ConfigParser].RawConfigParser, parameter[]]
<ast.Try object at 0x7da1b11c3130>
for taget[name[section]] in starred[call[name[config].sections, parameter[]]] begin[:]
call[name[cfg]][name[section]] assign[=] dictionary[[], []]
for taget[name[option]] in starred[call[name[config].options, parameter[name[section]]]] begin[:]
call[call[name[cfg]][name[section]]][name[option]] assign[=] call[name[config].get, parameter[name[section], name[option]]]
return[name[cfg]] | keyword[def] identifier[get_value_from_cfg] ( identifier[cfg_file] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[cfg_file] ):
keyword[return]
identifier[cfg] ={}
identifier[config] = identifier[ConfigParser] . identifier[RawConfigParser] ()
keyword[try] :
identifier[config] . identifier[read] ( identifier[cfg_file] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return]
keyword[for] identifier[section] keyword[in] identifier[config] . identifier[sections] ():
identifier[cfg] [ identifier[section] ]={}
keyword[for] identifier[option] keyword[in] identifier[config] . identifier[options] ( identifier[section] ):
identifier[cfg] [ identifier[section] ][ identifier[option] ]= identifier[config] . identifier[get] ( identifier[section] , identifier[option] )
keyword[return] identifier[cfg] | def get_value_from_cfg(cfg_file):
""" initial the configuration with file that you specify
Sample usage:
config = get_value_from_cfg()
return:
return a dict -->config[section][option] such as config["twsm"]["dut_ip"]
"""
if not os.path.isfile(cfg_file):
return # depends on [control=['if'], data=[]]
cfg = {}
config = ConfigParser.RawConfigParser()
try:
config.read(cfg_file) # depends on [control=['try'], data=[]]
except Exception as e: # raise Exception("\n\tcommon exception 1.2: Not a well format configuration file. error: '%s'" %(e))
return # depends on [control=['except'], data=[]]
for section in config.sections():
cfg[section] = {}
for option in config.options(section):
cfg[section][option] = config.get(section, option) # depends on [control=['for'], data=['option']] # depends on [control=['for'], data=['section']]
return cfg |
def setparents(self):
"""Correct all parent relations for elements within the scop. There is sually no need to call this directly, invoked implicitly by :meth:`copy`"""
for c in self:
if isinstance(c, AbstractElement):
c.parent = self
c.setparents() | def function[setparents, parameter[self]]:
constant[Correct all parent relations for elements within the scop. There is sually no need to call this directly, invoked implicitly by :meth:`copy`]
for taget[name[c]] in starred[name[self]] begin[:]
if call[name[isinstance], parameter[name[c], name[AbstractElement]]] begin[:]
name[c].parent assign[=] name[self]
call[name[c].setparents, parameter[]] | keyword[def] identifier[setparents] ( identifier[self] ):
literal[string]
keyword[for] identifier[c] keyword[in] identifier[self] :
keyword[if] identifier[isinstance] ( identifier[c] , identifier[AbstractElement] ):
identifier[c] . identifier[parent] = identifier[self]
identifier[c] . identifier[setparents] () | def setparents(self):
"""Correct all parent relations for elements within the scop. There is sually no need to call this directly, invoked implicitly by :meth:`copy`"""
for c in self:
if isinstance(c, AbstractElement):
c.parent = self
c.setparents() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] |
def _set_index_map(self, atom_mapping):
"""T(r_i) in Eq.(3) is given as permutation of atom indices.
_index_set : ndarray
For each translation (shift), atomic indices of the positions
(_ideal_positions - shift) are searched and stored. The indices
are used to select eigenvectors, by which T(r_i)|KJ> is
represented.
shape=(num_trans, num_sites), dtype='intc'
"""
lattice = self._phonon.supercell.get_cell()
natom = len(self._ideal_positions)
index_map_inv = np.zeros((self._N, natom), dtype='intc')
for i, shift in enumerate(self._trans_s):
for j, p in enumerate(self._ideal_positions - shift): # minus r_i
diff = self._ideal_positions - p
diff -= np.rint(diff)
dist = np.sqrt((np.dot(diff, lattice) ** 2).sum(axis=1))
# k is index in _ideal_positions.
k = np.where(dist < self._symprec)[0][0]
index_map_inv[i, j] = k
self._index_map_inv = index_map_inv
self._atom_mapping = np.zeros(len(atom_mapping), dtype='int')
for i, idx in enumerate(atom_mapping):
if idx is None:
self._atom_mapping[i] = -1
else:
self._atom_mapping[i] = idx | def function[_set_index_map, parameter[self, atom_mapping]]:
constant[T(r_i) in Eq.(3) is given as permutation of atom indices.
_index_set : ndarray
For each translation (shift), atomic indices of the positions
(_ideal_positions - shift) are searched and stored. The indices
are used to select eigenvectors, by which T(r_i)|KJ> is
represented.
shape=(num_trans, num_sites), dtype='intc'
]
variable[lattice] assign[=] call[name[self]._phonon.supercell.get_cell, parameter[]]
variable[natom] assign[=] call[name[len], parameter[name[self]._ideal_positions]]
variable[index_map_inv] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da207f03b50>, <ast.Name object at 0x7da207f02cb0>]]]]
for taget[tuple[[<ast.Name object at 0x7da207f003d0>, <ast.Name object at 0x7da207f00460>]]] in starred[call[name[enumerate], parameter[name[self]._trans_s]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da207f00dc0>, <ast.Name object at 0x7da207f02ef0>]]] in starred[call[name[enumerate], parameter[binary_operation[name[self]._ideal_positions - name[shift]]]]] begin[:]
variable[diff] assign[=] binary_operation[name[self]._ideal_positions - name[p]]
<ast.AugAssign object at 0x7da207f02710>
variable[dist] assign[=] call[name[np].sqrt, parameter[call[binary_operation[call[name[np].dot, parameter[name[diff], name[lattice]]] ** constant[2]].sum, parameter[]]]]
variable[k] assign[=] call[call[call[name[np].where, parameter[compare[name[dist] less[<] name[self]._symprec]]]][constant[0]]][constant[0]]
call[name[index_map_inv]][tuple[[<ast.Name object at 0x7da18ede7490>, <ast.Name object at 0x7da18ede6b00>]]] assign[=] name[k]
name[self]._index_map_inv assign[=] name[index_map_inv]
name[self]._atom_mapping assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[atom_mapping]]]]]
for taget[tuple[[<ast.Name object at 0x7da18ede7e20>, <ast.Name object at 0x7da18ede4df0>]]] in starred[call[name[enumerate], parameter[name[atom_mapping]]]] begin[:]
if compare[name[idx] is constant[None]] begin[:]
call[name[self]._atom_mapping][name[i]] assign[=] <ast.UnaryOp object at 0x7da18ede6b60> | keyword[def] identifier[_set_index_map] ( identifier[self] , identifier[atom_mapping] ):
literal[string]
identifier[lattice] = identifier[self] . identifier[_phonon] . identifier[supercell] . identifier[get_cell] ()
identifier[natom] = identifier[len] ( identifier[self] . identifier[_ideal_positions] )
identifier[index_map_inv] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[_N] , identifier[natom] ), identifier[dtype] = literal[string] )
keyword[for] identifier[i] , identifier[shift] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_trans_s] ):
keyword[for] identifier[j] , identifier[p] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_ideal_positions] - identifier[shift] ):
identifier[diff] = identifier[self] . identifier[_ideal_positions] - identifier[p]
identifier[diff] -= identifier[np] . identifier[rint] ( identifier[diff] )
identifier[dist] = identifier[np] . identifier[sqrt] (( identifier[np] . identifier[dot] ( identifier[diff] , identifier[lattice] )** literal[int] ). identifier[sum] ( identifier[axis] = literal[int] ))
identifier[k] = identifier[np] . identifier[where] ( identifier[dist] < identifier[self] . identifier[_symprec] )[ literal[int] ][ literal[int] ]
identifier[index_map_inv] [ identifier[i] , identifier[j] ]= identifier[k]
identifier[self] . identifier[_index_map_inv] = identifier[index_map_inv]
identifier[self] . identifier[_atom_mapping] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[atom_mapping] ), identifier[dtype] = literal[string] )
keyword[for] identifier[i] , identifier[idx] keyword[in] identifier[enumerate] ( identifier[atom_mapping] ):
keyword[if] identifier[idx] keyword[is] keyword[None] :
identifier[self] . identifier[_atom_mapping] [ identifier[i] ]=- literal[int]
keyword[else] :
identifier[self] . identifier[_atom_mapping] [ identifier[i] ]= identifier[idx] | def _set_index_map(self, atom_mapping):
"""T(r_i) in Eq.(3) is given as permutation of atom indices.
_index_set : ndarray
For each translation (shift), atomic indices of the positions
(_ideal_positions - shift) are searched and stored. The indices
are used to select eigenvectors, by which T(r_i)|KJ> is
represented.
shape=(num_trans, num_sites), dtype='intc'
"""
lattice = self._phonon.supercell.get_cell()
natom = len(self._ideal_positions)
index_map_inv = np.zeros((self._N, natom), dtype='intc')
for (i, shift) in enumerate(self._trans_s):
for (j, p) in enumerate(self._ideal_positions - shift): # minus r_i
diff = self._ideal_positions - p
diff -= np.rint(diff)
dist = np.sqrt((np.dot(diff, lattice) ** 2).sum(axis=1))
# k is index in _ideal_positions.
k = np.where(dist < self._symprec)[0][0]
index_map_inv[i, j] = k # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
self._index_map_inv = index_map_inv
self._atom_mapping = np.zeros(len(atom_mapping), dtype='int')
for (i, idx) in enumerate(atom_mapping):
if idx is None:
self._atom_mapping[i] = -1 # depends on [control=['if'], data=[]]
else:
self._atom_mapping[i] = idx # depends on [control=['for'], data=[]] |
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
if not super(AdminSiteOTPRequiredMixin, self).has_permission(request):
return False
return request.user.is_verified() | def function[has_permission, parameter[self, request]]:
constant[
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
]
if <ast.UnaryOp object at 0x7da1b1e7d300> begin[:]
return[constant[False]]
return[call[name[request].user.is_verified, parameter[]]] | keyword[def] identifier[has_permission] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] keyword[not] identifier[super] ( identifier[AdminSiteOTPRequiredMixin] , identifier[self] ). identifier[has_permission] ( identifier[request] ):
keyword[return] keyword[False]
keyword[return] identifier[request] . identifier[user] . identifier[is_verified] () | def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
if not super(AdminSiteOTPRequiredMixin, self).has_permission(request):
return False # depends on [control=['if'], data=[]]
return request.user.is_verified() |
def _textOutput(self, gaObjects):
"""
Prints out the specified Variant objects in a VCF-like form.
"""
for variant in gaObjects:
print(
variant.id, variant.variant_set_id, variant.names,
variant.reference_name, variant.start, variant.end,
variant.reference_bases, variant.alternate_bases,
sep="\t", end="\t")
for key, value in variant.attributes.attr.items():
val = value.values[0].string_value
print(key, val, sep="=", end=";")
print("\t", end="")
for c in variant.calls:
print(
c.call_set_id,
c.genotype.__str__().replace('\n', ''),
c.genotype_likelihood, c.attributes,
c.phaseset, sep=":", end="\t")
print() | def function[_textOutput, parameter[self, gaObjects]]:
constant[
Prints out the specified Variant objects in a VCF-like form.
]
for taget[name[variant]] in starred[name[gaObjects]] begin[:]
call[name[print], parameter[name[variant].id, name[variant].variant_set_id, name[variant].names, name[variant].reference_name, name[variant].start, name[variant].end, name[variant].reference_bases, name[variant].alternate_bases]]
for taget[tuple[[<ast.Name object at 0x7da18fe910f0>, <ast.Name object at 0x7da18fe920e0>]]] in starred[call[name[variant].attributes.attr.items, parameter[]]] begin[:]
variable[val] assign[=] call[name[value].values][constant[0]].string_value
call[name[print], parameter[name[key], name[val]]]
call[name[print], parameter[constant[ ]]]
for taget[name[c]] in starred[name[variant].calls] begin[:]
call[name[print], parameter[name[c].call_set_id, call[call[name[c].genotype.__str__, parameter[]].replace, parameter[constant[
], constant[]]], name[c].genotype_likelihood, name[c].attributes, name[c].phaseset]]
call[name[print], parameter[]] | keyword[def] identifier[_textOutput] ( identifier[self] , identifier[gaObjects] ):
literal[string]
keyword[for] identifier[variant] keyword[in] identifier[gaObjects] :
identifier[print] (
identifier[variant] . identifier[id] , identifier[variant] . identifier[variant_set_id] , identifier[variant] . identifier[names] ,
identifier[variant] . identifier[reference_name] , identifier[variant] . identifier[start] , identifier[variant] . identifier[end] ,
identifier[variant] . identifier[reference_bases] , identifier[variant] . identifier[alternate_bases] ,
identifier[sep] = literal[string] , identifier[end] = literal[string] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[variant] . identifier[attributes] . identifier[attr] . identifier[items] ():
identifier[val] = identifier[value] . identifier[values] [ literal[int] ]. identifier[string_value]
identifier[print] ( identifier[key] , identifier[val] , identifier[sep] = literal[string] , identifier[end] = literal[string] )
identifier[print] ( literal[string] , identifier[end] = literal[string] )
keyword[for] identifier[c] keyword[in] identifier[variant] . identifier[calls] :
identifier[print] (
identifier[c] . identifier[call_set_id] ,
identifier[c] . identifier[genotype] . identifier[__str__] (). identifier[replace] ( literal[string] , literal[string] ),
identifier[c] . identifier[genotype_likelihood] , identifier[c] . identifier[attributes] ,
identifier[c] . identifier[phaseset] , identifier[sep] = literal[string] , identifier[end] = literal[string] )
identifier[print] () | def _textOutput(self, gaObjects):
"""
Prints out the specified Variant objects in a VCF-like form.
"""
for variant in gaObjects:
print(variant.id, variant.variant_set_id, variant.names, variant.reference_name, variant.start, variant.end, variant.reference_bases, variant.alternate_bases, sep='\t', end='\t')
for (key, value) in variant.attributes.attr.items():
val = value.values[0].string_value
print(key, val, sep='=', end=';') # depends on [control=['for'], data=[]]
print('\t', end='')
for c in variant.calls:
print(c.call_set_id, c.genotype.__str__().replace('\n', ''), c.genotype_likelihood, c.attributes, c.phaseset, sep=':', end='\t') # depends on [control=['for'], data=['c']]
print() # depends on [control=['for'], data=['variant']] |
def dict_diff(d1: Dict[Any, Any], d2: Dict[Any, Any],
deleted_value: Any = None) -> Dict[Any, Any]:
"""
Returns a representation of the changes that need to be made to ``d1`` to
create ``d2``.
Args:
d1: a dictionary
d2: another dictionary
deleted_value: value to use for deleted keys; see below
Returns:
dict: a dictionary of the format ``{k: v}`` where the ``k``/``v`` pairs
are key/value pairs that are absent from ``d1`` and present in ``d2``,
or present in both but with different values (in which case the ``d2``
value is shown). If a key ``k`` is present in ``d1`` but absent in
``d2``, the result dictionary has the entry ``{k: deleted_value}``.
"""
changes = {k: v for k, v in d2.items()
if k not in d1 or d2[k] != d1[k]}
for k in d1.keys():
if k not in d2:
changes[k] = deleted_value
return changes | def function[dict_diff, parameter[d1, d2, deleted_value]]:
constant[
Returns a representation of the changes that need to be made to ``d1`` to
create ``d2``.
Args:
d1: a dictionary
d2: another dictionary
deleted_value: value to use for deleted keys; see below
Returns:
dict: a dictionary of the format ``{k: v}`` where the ``k``/``v`` pairs
are key/value pairs that are absent from ``d1`` and present in ``d2``,
or present in both but with different values (in which case the ``d2``
value is shown). If a key ``k`` is present in ``d1`` but absent in
``d2``, the result dictionary has the entry ``{k: deleted_value}``.
]
variable[changes] assign[=] <ast.DictComp object at 0x7da1b190c040>
for taget[name[k]] in starred[call[name[d1].keys, parameter[]]] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[d2]] begin[:]
call[name[changes]][name[k]] assign[=] name[deleted_value]
return[name[changes]] | keyword[def] identifier[dict_diff] ( identifier[d1] : identifier[Dict] [ identifier[Any] , identifier[Any] ], identifier[d2] : identifier[Dict] [ identifier[Any] , identifier[Any] ],
identifier[deleted_value] : identifier[Any] = keyword[None] )-> identifier[Dict] [ identifier[Any] , identifier[Any] ]:
literal[string]
identifier[changes] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d2] . identifier[items] ()
keyword[if] identifier[k] keyword[not] keyword[in] identifier[d1] keyword[or] identifier[d2] [ identifier[k] ]!= identifier[d1] [ identifier[k] ]}
keyword[for] identifier[k] keyword[in] identifier[d1] . identifier[keys] ():
keyword[if] identifier[k] keyword[not] keyword[in] identifier[d2] :
identifier[changes] [ identifier[k] ]= identifier[deleted_value]
keyword[return] identifier[changes] | def dict_diff(d1: Dict[Any, Any], d2: Dict[Any, Any], deleted_value: Any=None) -> Dict[Any, Any]:
"""
Returns a representation of the changes that need to be made to ``d1`` to
create ``d2``.
Args:
d1: a dictionary
d2: another dictionary
deleted_value: value to use for deleted keys; see below
Returns:
dict: a dictionary of the format ``{k: v}`` where the ``k``/``v`` pairs
are key/value pairs that are absent from ``d1`` and present in ``d2``,
or present in both but with different values (in which case the ``d2``
value is shown). If a key ``k`` is present in ``d1`` but absent in
``d2``, the result dictionary has the entry ``{k: deleted_value}``.
"""
changes = {k: v for (k, v) in d2.items() if k not in d1 or d2[k] != d1[k]}
for k in d1.keys():
if k not in d2:
changes[k] = deleted_value # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']]
return changes |
def count_variants(graph):
"""Count how many of each type of variant a graph has.
:param pybel.BELGraph graph: A BEL graph
:rtype: Counter
"""
return Counter(
variant_data[KIND]
for data in graph
if has_variant(graph, data)
for variant_data in data[VARIANTS]
) | def function[count_variants, parameter[graph]]:
constant[Count how many of each type of variant a graph has.
:param pybel.BELGraph graph: A BEL graph
:rtype: Counter
]
return[call[name[Counter], parameter[<ast.GeneratorExp object at 0x7da1b0cb7d90>]]] | keyword[def] identifier[count_variants] ( identifier[graph] ):
literal[string]
keyword[return] identifier[Counter] (
identifier[variant_data] [ identifier[KIND] ]
keyword[for] identifier[data] keyword[in] identifier[graph]
keyword[if] identifier[has_variant] ( identifier[graph] , identifier[data] )
keyword[for] identifier[variant_data] keyword[in] identifier[data] [ identifier[VARIANTS] ]
) | def count_variants(graph):
"""Count how many of each type of variant a graph has.
:param pybel.BELGraph graph: A BEL graph
:rtype: Counter
"""
return Counter((variant_data[KIND] for data in graph if has_variant(graph, data) for variant_data in data[VARIANTS])) |
def dir():
"""Return the list of patched function names. Used for patching
functions imported from the module.
"""
dir = [
'access', 'chdir', 'chmod', 'chown', 'close', 'fstat', 'fsync',
'getcwd', 'lchmod', 'link', 'listdir', 'lstat', 'makedirs',
'mkdir', 'mknod', 'open', 'read', 'readlink', 'remove',
'removedirs', 'rename', 'rmdir', 'stat', 'symlink', 'umask',
'unlink', 'utime', 'walk', 'write'
]
if IS_PY2:
dir += ['getcwdu']
else:
dir += ['getcwdb', 'replace']
if sys.platform.startswith('linux'):
dir += [
'fdatasync', 'getxattr', 'listxattr',
'removexattr', 'setxattr'
]
if use_scandir:
dir += ['scandir']
return dir | def function[dir, parameter[]]:
constant[Return the list of patched function names. Used for patching
functions imported from the module.
]
variable[dir] assign[=] list[[<ast.Constant object at 0x7da20c794430>, <ast.Constant object at 0x7da20c795570>, <ast.Constant object at 0x7da20c794e20>, <ast.Constant object at 0x7da20c7955a0>, <ast.Constant object at 0x7da20c794250>, <ast.Constant object at 0x7da20c795750>, <ast.Constant object at 0x7da20c795b10>, <ast.Constant object at 0x7da20c794cd0>, <ast.Constant object at 0x7da20c796410>, <ast.Constant object at 0x7da20c7969e0>, <ast.Constant object at 0x7da20c796860>, <ast.Constant object at 0x7da20c795f00>, <ast.Constant object at 0x7da20c794460>, <ast.Constant object at 0x7da20c7942b0>, <ast.Constant object at 0x7da20c796470>, <ast.Constant object at 0x7da20c795060>, <ast.Constant object at 0x7da20c7952a0>, <ast.Constant object at 0x7da20c794ee0>, <ast.Constant object at 0x7da20c795930>, <ast.Constant object at 0x7da20c7952d0>, <ast.Constant object at 0x7da20c7960b0>, <ast.Constant object at 0x7da20c795c60>, <ast.Constant object at 0x7da20c796290>, <ast.Constant object at 0x7da20c795000>, <ast.Constant object at 0x7da20c794eb0>, <ast.Constant object at 0x7da20c795c30>, <ast.Constant object at 0x7da20c796b00>, <ast.Constant object at 0x7da20c794700>, <ast.Constant object at 0x7da20c794a60>]]
if name[IS_PY2] begin[:]
<ast.AugAssign object at 0x7da20c796a10>
if name[use_scandir] begin[:]
<ast.AugAssign object at 0x7da20c7966b0>
return[name[dir]] | keyword[def] identifier[dir] ():
literal[string]
identifier[dir] =[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string]
]
keyword[if] identifier[IS_PY2] :
identifier[dir] +=[ literal[string] ]
keyword[else] :
identifier[dir] +=[ literal[string] , literal[string] ]
keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ):
identifier[dir] +=[
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string]
]
keyword[if] identifier[use_scandir] :
identifier[dir] +=[ literal[string] ]
keyword[return] identifier[dir] | def dir():
"""Return the list of patched function names. Used for patching
functions imported from the module.
"""
dir = ['access', 'chdir', 'chmod', 'chown', 'close', 'fstat', 'fsync', 'getcwd', 'lchmod', 'link', 'listdir', 'lstat', 'makedirs', 'mkdir', 'mknod', 'open', 'read', 'readlink', 'remove', 'removedirs', 'rename', 'rmdir', 'stat', 'symlink', 'umask', 'unlink', 'utime', 'walk', 'write']
if IS_PY2:
dir += ['getcwdu'] # depends on [control=['if'], data=[]]
else:
dir += ['getcwdb', 'replace']
if sys.platform.startswith('linux'):
dir += ['fdatasync', 'getxattr', 'listxattr', 'removexattr', 'setxattr'] # depends on [control=['if'], data=[]]
if use_scandir:
dir += ['scandir'] # depends on [control=['if'], data=[]]
return dir |
def do_loop_turn(self): # pylint: disable=too-many-branches
"""Satellite main loop::
* Check and delete zombies actions / modules
* Get returns from queues
* Adjust worker number
* Get new actions
:return: None
"""
# Try to see if one of my module is dead, and restart previously dead modules
self.check_and_del_zombie_modules()
# Also if some zombie workers exist...
self.check_and_del_zombie_workers()
# Call modules that manage a starting tick pass
self.hook_point('tick')
# Print stats for debug
for _, sched in self.schedulers.items():
for mod in self.q_by_mod:
# In workers we've got actions sent to queue - queue size
for (worker_id, queue) in list(self.q_by_mod[mod].items()):
try:
actions_count = queue.qsize()
results_count = self.returns_queue.qsize()
logger.debug("[%s][%s][%s] actions queued: %d, results queued: %d",
sched.name, mod, worker_id, actions_count, results_count)
# Update the statistics
statsmgr.gauge('worker.%s.actions-queue-size' % worker_id,
actions_count)
statsmgr.gauge('worker.%s.results-queue-size' % worker_id,
results_count)
except (IOError, EOFError):
pass
# todo temporaray deactivate all this stuff!
# Before return or get new actions, see how we managed
# the former ones: are they still in queue(s)? If so, we
# must wait more or at least have more workers
# wait_ratio = self.wait_ratio.get_load()
# total_q = 0
# try:
# for mod in self.q_by_mod:
# for queue in list(self.q_by_mod[mod].values()):
# total_q += queue.qsize()
# except (IOError, EOFError):
# pass
# if total_q != 0 and wait_ratio < 2 * self.worker_polling_interval:
# logger.debug("I decide to increase the wait ratio")
# self.wait_ratio.update_load(wait_ratio * 2)
# # self.wait_ratio.update_load(self.worker_polling_interval)
# else:
# # Go to self.worker_polling_interval on normal run, if wait_ratio
# # was >2*self.worker_polling_interval,
# # it make it come near 2 because if < 2, go up :)
# self.wait_ratio.update_load(self.worker_polling_interval)
# wait_ratio = self.wait_ratio.get_load()
# statsmgr.timer('core.wait-ratio', wait_ratio)
# if self.log_loop:
# logger.debug("[%s] wait ratio: %f", self.name, wait_ratio)
# Maybe we do not have enough workers, we check for it
# and launch the new ones if needed
self.adjust_worker_number_by_load()
# Manage all messages we've got in the last timeout
# for queue in self.return_messages:
try:
logger.debug("[%s] manage action results: %d results",
self.name, self.returns_queue.qsize())
while self.returns_queue.qsize():
msg = self.returns_queue.get_nowait()
if msg is None:
continue
if not isinstance(msg, Message):
logger.warning("Should have received a Message, got a %s!", type(msg))
continue
logger.debug("Got a message: %s", msg)
if msg.get_type() == 'Done':
logger.debug("Got (from %s) an action result: %s",
msg.get_source(), msg.get_data())
self.manage_action_return(msg.get_data())
elif msg.get_type() == 'Stats':
logger.debug("Got (from %s) stats: %s",
msg.get_source(), msg.get_data())
if msg.get_source() in self.workers:
self.workers[msg.get_source()].stats = msg.get_data()
else:
logger.warning("Ignoring message of type: %s", msg.get_type())
except Full:
logger.warning("Returns queue is full")
except Empty:
logger.debug("Returns queue is empty")
except (IOError, EOFError) as exp:
logger.warning("My returns queue is no more available: %s", str(exp))
except Exception as exp: # pylint: disable=broad-except
logger.error("Failed getting messages in returns queue: %s", str(exp))
logger.error(traceback.format_exc())
for _, sched in self.schedulers.items():
if sched.wait_homerun:
logger.debug("scheduler home run: %d results", len(sched.wait_homerun))
if not self.passive:
# If we are an active satellite, we do not initiate the check getting
# and return
try:
# We send to our schedulers the results of all finished checks
logger.debug("pushing results...")
self.push_results()
except LinkError as exp:
logger.warning("Scheduler connection failed, I could not send my results!")
try:
# And we get the new actions from our schedulers
logger.debug("getting new actions...")
self.get_new_actions()
except LinkError as exp:
logger.warning("Scheduler connection failed, I could not get new actions!")
# Get objects from our modules that are not Worker based
if self.log_loop:
logger.debug("[%s] get objects from queues", self.name)
self.get_objects_from_from_queues()
statsmgr.gauge('external-commands.count', len(self.external_commands))
statsmgr.gauge('broks.count', len(self.broks))
statsmgr.gauge('events.count', len(self.events)) | def function[do_loop_turn, parameter[self]]:
constant[Satellite main loop::
* Check and delete zombies actions / modules
* Get returns from queues
* Adjust worker number
* Get new actions
:return: None
]
call[name[self].check_and_del_zombie_modules, parameter[]]
call[name[self].check_and_del_zombie_workers, parameter[]]
call[name[self].hook_point, parameter[constant[tick]]]
for taget[tuple[[<ast.Name object at 0x7da18dc9a4d0>, <ast.Name object at 0x7da18dc9ae30>]]] in starred[call[name[self].schedulers.items, parameter[]]] begin[:]
for taget[name[mod]] in starred[name[self].q_by_mod] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18dc98af0>, <ast.Name object at 0x7da18dc99210>]]] in starred[call[name[list], parameter[call[call[name[self].q_by_mod][name[mod]].items, parameter[]]]]] begin[:]
<ast.Try object at 0x7da18dc98550>
call[name[self].adjust_worker_number_by_load, parameter[]]
<ast.Try object at 0x7da20c794220>
for taget[tuple[[<ast.Name object at 0x7da18dc06770>, <ast.Name object at 0x7da18dc06b60>]]] in starred[call[name[self].schedulers.items, parameter[]]] begin[:]
if name[sched].wait_homerun begin[:]
call[name[logger].debug, parameter[constant[scheduler home run: %d results], call[name[len], parameter[name[sched].wait_homerun]]]]
if <ast.UnaryOp object at 0x7da18bc71ea0> begin[:]
<ast.Try object at 0x7da18bc724a0>
<ast.Try object at 0x7da18bc73700>
if name[self].log_loop begin[:]
call[name[logger].debug, parameter[constant[[%s] get objects from queues], name[self].name]]
call[name[self].get_objects_from_from_queues, parameter[]]
call[name[statsmgr].gauge, parameter[constant[external-commands.count], call[name[len], parameter[name[self].external_commands]]]]
call[name[statsmgr].gauge, parameter[constant[broks.count], call[name[len], parameter[name[self].broks]]]]
call[name[statsmgr].gauge, parameter[constant[events.count], call[name[len], parameter[name[self].events]]]] | keyword[def] identifier[do_loop_turn] ( identifier[self] ):
literal[string]
identifier[self] . identifier[check_and_del_zombie_modules] ()
identifier[self] . identifier[check_and_del_zombie_workers] ()
identifier[self] . identifier[hook_point] ( literal[string] )
keyword[for] identifier[_] , identifier[sched] keyword[in] identifier[self] . identifier[schedulers] . identifier[items] ():
keyword[for] identifier[mod] keyword[in] identifier[self] . identifier[q_by_mod] :
keyword[for] ( identifier[worker_id] , identifier[queue] ) keyword[in] identifier[list] ( identifier[self] . identifier[q_by_mod] [ identifier[mod] ]. identifier[items] ()):
keyword[try] :
identifier[actions_count] = identifier[queue] . identifier[qsize] ()
identifier[results_count] = identifier[self] . identifier[returns_queue] . identifier[qsize] ()
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[sched] . identifier[name] , identifier[mod] , identifier[worker_id] , identifier[actions_count] , identifier[results_count] )
identifier[statsmgr] . identifier[gauge] ( literal[string] % identifier[worker_id] ,
identifier[actions_count] )
identifier[statsmgr] . identifier[gauge] ( literal[string] % identifier[worker_id] ,
identifier[results_count] )
keyword[except] ( identifier[IOError] , identifier[EOFError] ):
keyword[pass]
identifier[self] . identifier[adjust_worker_number_by_load] ()
keyword[try] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[self] . identifier[name] , identifier[self] . identifier[returns_queue] . identifier[qsize] ())
keyword[while] identifier[self] . identifier[returns_queue] . identifier[qsize] ():
identifier[msg] = identifier[self] . identifier[returns_queue] . identifier[get_nowait] ()
keyword[if] identifier[msg] keyword[is] keyword[None] :
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[msg] , identifier[Message] ):
identifier[logger] . identifier[warning] ( literal[string] , identifier[type] ( identifier[msg] ))
keyword[continue]
identifier[logger] . identifier[debug] ( literal[string] , identifier[msg] )
keyword[if] identifier[msg] . identifier[get_type] ()== literal[string] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[msg] . identifier[get_source] (), identifier[msg] . identifier[get_data] ())
identifier[self] . identifier[manage_action_return] ( identifier[msg] . identifier[get_data] ())
keyword[elif] identifier[msg] . identifier[get_type] ()== literal[string] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[msg] . identifier[get_source] (), identifier[msg] . identifier[get_data] ())
keyword[if] identifier[msg] . identifier[get_source] () keyword[in] identifier[self] . identifier[workers] :
identifier[self] . identifier[workers] [ identifier[msg] . identifier[get_source] ()]. identifier[stats] = identifier[msg] . identifier[get_data] ()
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[msg] . identifier[get_type] ())
keyword[except] identifier[Full] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[except] identifier[Empty] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[except] ( identifier[IOError] , identifier[EOFError] ) keyword[as] identifier[exp] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[str] ( identifier[exp] ))
keyword[except] identifier[Exception] keyword[as] identifier[exp] :
identifier[logger] . identifier[error] ( literal[string] , identifier[str] ( identifier[exp] ))
identifier[logger] . identifier[error] ( identifier[traceback] . identifier[format_exc] ())
keyword[for] identifier[_] , identifier[sched] keyword[in] identifier[self] . identifier[schedulers] . identifier[items] ():
keyword[if] identifier[sched] . identifier[wait_homerun] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[len] ( identifier[sched] . identifier[wait_homerun] ))
keyword[if] keyword[not] identifier[self] . identifier[passive] :
keyword[try] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[push_results] ()
keyword[except] identifier[LinkError] keyword[as] identifier[exp] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[try] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[get_new_actions] ()
keyword[except] identifier[LinkError] keyword[as] identifier[exp] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[if] identifier[self] . identifier[log_loop] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] )
identifier[self] . identifier[get_objects_from_from_queues] ()
identifier[statsmgr] . identifier[gauge] ( literal[string] , identifier[len] ( identifier[self] . identifier[external_commands] ))
identifier[statsmgr] . identifier[gauge] ( literal[string] , identifier[len] ( identifier[self] . identifier[broks] ))
identifier[statsmgr] . identifier[gauge] ( literal[string] , identifier[len] ( identifier[self] . identifier[events] )) | def do_loop_turn(self): # pylint: disable=too-many-branches
'Satellite main loop::\n\n * Check and delete zombies actions / modules\n * Get returns from queues\n * Adjust worker number\n * Get new actions\n\n :return: None\n '
# Try to see if one of my module is dead, and restart previously dead modules
self.check_and_del_zombie_modules()
# Also if some zombie workers exist...
self.check_and_del_zombie_workers()
# Call modules that manage a starting tick pass
self.hook_point('tick')
# Print stats for debug
for (_, sched) in self.schedulers.items():
for mod in self.q_by_mod:
# In workers we've got actions sent to queue - queue size
for (worker_id, queue) in list(self.q_by_mod[mod].items()):
try:
actions_count = queue.qsize()
results_count = self.returns_queue.qsize()
logger.debug('[%s][%s][%s] actions queued: %d, results queued: %d', sched.name, mod, worker_id, actions_count, results_count)
# Update the statistics
statsmgr.gauge('worker.%s.actions-queue-size' % worker_id, actions_count)
statsmgr.gauge('worker.%s.results-queue-size' % worker_id, results_count) # depends on [control=['try'], data=[]]
except (IOError, EOFError):
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['mod']] # depends on [control=['for'], data=[]]
# todo temporaray deactivate all this stuff!
# Before return or get new actions, see how we managed
# the former ones: are they still in queue(s)? If so, we
# must wait more or at least have more workers
# wait_ratio = self.wait_ratio.get_load()
# total_q = 0
# try:
# for mod in self.q_by_mod:
# for queue in list(self.q_by_mod[mod].values()):
# total_q += queue.qsize()
# except (IOError, EOFError):
# pass
# if total_q != 0 and wait_ratio < 2 * self.worker_polling_interval:
# logger.debug("I decide to increase the wait ratio")
# self.wait_ratio.update_load(wait_ratio * 2)
# # self.wait_ratio.update_load(self.worker_polling_interval)
# else:
# # Go to self.worker_polling_interval on normal run, if wait_ratio
# # was >2*self.worker_polling_interval,
# # it make it come near 2 because if < 2, go up :)
# self.wait_ratio.update_load(self.worker_polling_interval)
# wait_ratio = self.wait_ratio.get_load()
# statsmgr.timer('core.wait-ratio', wait_ratio)
# if self.log_loop:
# logger.debug("[%s] wait ratio: %f", self.name, wait_ratio)
# Maybe we do not have enough workers, we check for it
# and launch the new ones if needed
self.adjust_worker_number_by_load()
# Manage all messages we've got in the last timeout
# for queue in self.return_messages:
try:
logger.debug('[%s] manage action results: %d results', self.name, self.returns_queue.qsize())
while self.returns_queue.qsize():
msg = self.returns_queue.get_nowait()
if msg is None:
continue # depends on [control=['if'], data=[]]
if not isinstance(msg, Message):
logger.warning('Should have received a Message, got a %s!', type(msg))
continue # depends on [control=['if'], data=[]]
logger.debug('Got a message: %s', msg)
if msg.get_type() == 'Done':
logger.debug('Got (from %s) an action result: %s', msg.get_source(), msg.get_data())
self.manage_action_return(msg.get_data()) # depends on [control=['if'], data=[]]
elif msg.get_type() == 'Stats':
logger.debug('Got (from %s) stats: %s', msg.get_source(), msg.get_data())
if msg.get_source() in self.workers:
self.workers[msg.get_source()].stats = msg.get_data() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
logger.warning('Ignoring message of type: %s', msg.get_type()) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except Full:
logger.warning('Returns queue is full') # depends on [control=['except'], data=[]]
except Empty:
logger.debug('Returns queue is empty') # depends on [control=['except'], data=[]]
except (IOError, EOFError) as exp:
logger.warning('My returns queue is no more available: %s', str(exp)) # depends on [control=['except'], data=['exp']]
except Exception as exp: # pylint: disable=broad-except
logger.error('Failed getting messages in returns queue: %s', str(exp))
logger.error(traceback.format_exc()) # depends on [control=['except'], data=['exp']]
for (_, sched) in self.schedulers.items():
if sched.wait_homerun:
logger.debug('scheduler home run: %d results', len(sched.wait_homerun)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not self.passive:
# If we are an active satellite, we do not initiate the check getting
# and return
try:
# We send to our schedulers the results of all finished checks
logger.debug('pushing results...')
self.push_results() # depends on [control=['try'], data=[]]
except LinkError as exp:
logger.warning('Scheduler connection failed, I could not send my results!') # depends on [control=['except'], data=[]]
try:
# And we get the new actions from our schedulers
logger.debug('getting new actions...')
self.get_new_actions() # depends on [control=['try'], data=[]]
except LinkError as exp:
logger.warning('Scheduler connection failed, I could not get new actions!') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Get objects from our modules that are not Worker based
if self.log_loop:
logger.debug('[%s] get objects from queues', self.name) # depends on [control=['if'], data=[]]
self.get_objects_from_from_queues()
statsmgr.gauge('external-commands.count', len(self.external_commands))
statsmgr.gauge('broks.count', len(self.broks))
statsmgr.gauge('events.count', len(self.events)) |
def make_definition(name, base, schema):
"""
Create a new definition.
"""
class_name = make_class_name(name)
cls = register(make(class_name, base, schema))
globals()[class_name] = cls | def function[make_definition, parameter[name, base, schema]]:
constant[
Create a new definition.
]
variable[class_name] assign[=] call[name[make_class_name], parameter[name[name]]]
variable[cls] assign[=] call[name[register], parameter[call[name[make], parameter[name[class_name], name[base], name[schema]]]]]
call[call[name[globals], parameter[]]][name[class_name]] assign[=] name[cls] | keyword[def] identifier[make_definition] ( identifier[name] , identifier[base] , identifier[schema] ):
literal[string]
identifier[class_name] = identifier[make_class_name] ( identifier[name] )
identifier[cls] = identifier[register] ( identifier[make] ( identifier[class_name] , identifier[base] , identifier[schema] ))
identifier[globals] ()[ identifier[class_name] ]= identifier[cls] | def make_definition(name, base, schema):
"""
Create a new definition.
"""
class_name = make_class_name(name)
cls = register(make(class_name, base, schema))
globals()[class_name] = cls |
def footprint(self,nside):
"""
Download the survey footprint for HEALpix pixels.
"""
import healpy
import ugali.utils.projector
if nside > 2**9: raise Exception("Overflow error: nside must be <=2**9")
pix = np.arange(healpy.nside2npix(nside),dtype='int')
footprint = np.zeros(healpy.nside2npix(nside),dtype='bool')
ra,dec = ugali.utils.projector.pixToAng(nside,pix)
table_name = 'Pix%i'%nside
self.upload(np.array([pix,ra,dec]).T, ['pix','ra','dec'], name=table_name)
radius = healpy.nside2resol(nside_superpix,arcmin=True)
query="""
SELECT t.pix, dbo.fInFootprintEq(t.ra, t.dec, %g)
FROM %s AS t
"""%(radius, table_name) | def function[footprint, parameter[self, nside]]:
constant[
Download the survey footprint for HEALpix pixels.
]
import module[healpy]
import module[ugali.utils.projector]
if compare[name[nside] greater[>] binary_operation[constant[2] ** constant[9]]] begin[:]
<ast.Raise object at 0x7da2054a5480>
variable[pix] assign[=] call[name[np].arange, parameter[call[name[healpy].nside2npix, parameter[name[nside]]]]]
variable[footprint] assign[=] call[name[np].zeros, parameter[call[name[healpy].nside2npix, parameter[name[nside]]]]]
<ast.Tuple object at 0x7da2054a63b0> assign[=] call[name[ugali].utils.projector.pixToAng, parameter[name[nside], name[pix]]]
variable[table_name] assign[=] binary_operation[constant[Pix%i] <ast.Mod object at 0x7da2590d6920> name[nside]]
call[name[self].upload, parameter[call[name[np].array, parameter[list[[<ast.Name object at 0x7da2054a5b10>, <ast.Name object at 0x7da2054a63e0>, <ast.Name object at 0x7da2054a5150>]]]].T, list[[<ast.Constant object at 0x7da2054a5ab0>, <ast.Constant object at 0x7da2054a5ed0>, <ast.Constant object at 0x7da2054a7a00>]]]]
variable[radius] assign[=] call[name[healpy].nside2resol, parameter[name[nside_superpix]]]
variable[query] assign[=] binary_operation[constant[
SELECT t.pix, dbo.fInFootprintEq(t.ra, t.dec, %g)
FROM %s AS t
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a7e20>, <ast.Name object at 0x7da2054a6980>]]] | keyword[def] identifier[footprint] ( identifier[self] , identifier[nside] ):
literal[string]
keyword[import] identifier[healpy]
keyword[import] identifier[ugali] . identifier[utils] . identifier[projector]
keyword[if] identifier[nside] > literal[int] ** literal[int] : keyword[raise] identifier[Exception] ( literal[string] )
identifier[pix] = identifier[np] . identifier[arange] ( identifier[healpy] . identifier[nside2npix] ( identifier[nside] ), identifier[dtype] = literal[string] )
identifier[footprint] = identifier[np] . identifier[zeros] ( identifier[healpy] . identifier[nside2npix] ( identifier[nside] ), identifier[dtype] = literal[string] )
identifier[ra] , identifier[dec] = identifier[ugali] . identifier[utils] . identifier[projector] . identifier[pixToAng] ( identifier[nside] , identifier[pix] )
identifier[table_name] = literal[string] % identifier[nside]
identifier[self] . identifier[upload] ( identifier[np] . identifier[array] ([ identifier[pix] , identifier[ra] , identifier[dec] ]). identifier[T] ,[ literal[string] , literal[string] , literal[string] ], identifier[name] = identifier[table_name] )
identifier[radius] = identifier[healpy] . identifier[nside2resol] ( identifier[nside_superpix] , identifier[arcmin] = keyword[True] )
identifier[query] = literal[string] %( identifier[radius] , identifier[table_name] ) | def footprint(self, nside):
"""
Download the survey footprint for HEALpix pixels.
"""
import healpy
import ugali.utils.projector
if nside > 2 ** 9:
raise Exception('Overflow error: nside must be <=2**9') # depends on [control=['if'], data=[]]
pix = np.arange(healpy.nside2npix(nside), dtype='int')
footprint = np.zeros(healpy.nside2npix(nside), dtype='bool')
(ra, dec) = ugali.utils.projector.pixToAng(nside, pix)
table_name = 'Pix%i' % nside
self.upload(np.array([pix, ra, dec]).T, ['pix', 'ra', 'dec'], name=table_name)
radius = healpy.nside2resol(nside_superpix, arcmin=True)
query = '\n SELECT t.pix, dbo.fInFootprintEq(t.ra, t.dec, %g)\n FROM %s AS t\n ' % (radius, table_name) |
def clear_reply_handlers_by_message_id(self, message_id):
"""
Clears all callback functions registered by register_for_reply() and register_for_reply_by_message_id().
:param message_id: The message id for which we want to clear reply handlers
"""
self.reply_handlers[message_id] = []
if self.reply_saver is not None:
self.reply_saver.start_save_timer() | def function[clear_reply_handlers_by_message_id, parameter[self, message_id]]:
constant[
Clears all callback functions registered by register_for_reply() and register_for_reply_by_message_id().
:param message_id: The message id for which we want to clear reply handlers
]
call[name[self].reply_handlers][name[message_id]] assign[=] list[[]]
if compare[name[self].reply_saver is_not constant[None]] begin[:]
call[name[self].reply_saver.start_save_timer, parameter[]] | keyword[def] identifier[clear_reply_handlers_by_message_id] ( identifier[self] , identifier[message_id] ):
literal[string]
identifier[self] . identifier[reply_handlers] [ identifier[message_id] ]=[]
keyword[if] identifier[self] . identifier[reply_saver] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[reply_saver] . identifier[start_save_timer] () | def clear_reply_handlers_by_message_id(self, message_id):
"""
Clears all callback functions registered by register_for_reply() and register_for_reply_by_message_id().
:param message_id: The message id for which we want to clear reply handlers
"""
self.reply_handlers[message_id] = []
if self.reply_saver is not None:
self.reply_saver.start_save_timer() # depends on [control=['if'], data=[]] |
def align(args):
"""
%prog align database.fasta read1.fq [read2.fq]
Wrapper for `bowtie2` single-end or paired-end, depending on the number of args.
"""
from jcvi.formats.fastq import guessoffset
p = OptionParser(align.__doc__)
p.set_firstN(firstN=0)
p.add_option("--full", default=False, action="store_true",
help="Enforce end-to-end alignment [default: local]")
p.add_option("--reorder", default=False, action="store_true",
help="Keep the input read order [default: %default]")
p.add_option("--null", default=False, action="store_true",
help="Do not write to SAM/BAM output")
p.add_option("--fasta", default=False, action="store_true",
help="Query reads are FASTA")
p.set_cutoff(cutoff=800)
p.set_mateorientation(mateorientation="+-")
p.set_sam_options(bowtie=True)
opts, args = p.parse_args(args)
extra = opts.extra
mo = opts.mateorientation
if mo == '+-':
extra += ""
elif mo == '-+':
extra += "--rf"
else:
extra += "--ff"
PE = True
if len(args) == 2:
logging.debug("Single-end alignment")
PE = False
elif len(args) == 3:
logging.debug("Paired-end alignment")
else:
sys.exit(not p.print_help())
firstN = opts.firstN
mapped = opts.mapped
unmapped = opts.unmapped
fasta = opts.fasta
gl = "--end-to-end" if opts.full else "--local"
dbfile, readfile = args[0:2]
dbfile = check_index(dbfile)
prefix = get_prefix(readfile, dbfile)
samfile, mapped, unmapped = get_samfile(readfile, dbfile, bowtie=True,
mapped=mapped, unmapped=unmapped,
bam=opts.bam)
logfile = prefix + ".log"
if not fasta:
offset = guessoffset([readfile])
if not need_update(dbfile, samfile):
logging.error("`{0}` exists. `bowtie2` already run.".format(samfile))
return samfile, logfile
cmd = "bowtie2 -x {0}".format(dbfile)
if PE:
r1, r2 = args[1:3]
cmd += " -1 {0} -2 {1}".format(r1, r2)
cmd += " --maxins {0}".format(opts.cutoff)
mtag, utag = "--al-conc", "--un-conc"
else:
cmd += " -U {0}".format(readfile)
mtag, utag = "--al", "--un"
if mapped:
cmd += " {0} {1}".format(mtag, mapped)
if unmapped:
cmd += " {0} {1}".format(utag, unmapped)
if firstN:
cmd += " --upto {0}".format(firstN)
cmd += " -p {0}".format(opts.cpus)
if fasta:
cmd += " -f"
else:
cmd += " --phred{0}".format(offset)
cmd += " {0}".format(gl)
if opts.reorder:
cmd += " --reorder"
cmd += " {0}".format(extra)
# Finally the log
cmd += " 2> {0}".format(logfile)
if opts.null:
samfile = "/dev/null"
cmd = output_bam(cmd, samfile)
sh(cmd)
print(open(logfile).read(), file=sys.stderr)
return samfile, logfile | def function[align, parameter[args]]:
constant[
%prog align database.fasta read1.fq [read2.fq]
Wrapper for `bowtie2` single-end or paired-end, depending on the number of args.
]
from relative_module[jcvi.formats.fastq] import module[guessoffset]
variable[p] assign[=] call[name[OptionParser], parameter[name[align].__doc__]]
call[name[p].set_firstN, parameter[]]
call[name[p].add_option, parameter[constant[--full]]]
call[name[p].add_option, parameter[constant[--reorder]]]
call[name[p].add_option, parameter[constant[--null]]]
call[name[p].add_option, parameter[constant[--fasta]]]
call[name[p].set_cutoff, parameter[]]
call[name[p].set_mateorientation, parameter[]]
call[name[p].set_sam_options, parameter[]]
<ast.Tuple object at 0x7da1b0902320> assign[=] call[name[p].parse_args, parameter[name[args]]]
variable[extra] assign[=] name[opts].extra
variable[mo] assign[=] name[opts].mateorientation
if compare[name[mo] equal[==] constant[+-]] begin[:]
<ast.AugAssign object at 0x7da1b09038b0>
variable[PE] assign[=] constant[True]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[2]] begin[:]
call[name[logging].debug, parameter[constant[Single-end alignment]]]
variable[PE] assign[=] constant[False]
variable[firstN] assign[=] name[opts].firstN
variable[mapped] assign[=] name[opts].mapped
variable[unmapped] assign[=] name[opts].unmapped
variable[fasta] assign[=] name[opts].fasta
variable[gl] assign[=] <ast.IfExp object at 0x7da18c4cd3c0>
<ast.Tuple object at 0x7da18c4cfaf0> assign[=] call[name[args]][<ast.Slice object at 0x7da18c4ce920>]
variable[dbfile] assign[=] call[name[check_index], parameter[name[dbfile]]]
variable[prefix] assign[=] call[name[get_prefix], parameter[name[readfile], name[dbfile]]]
<ast.Tuple object at 0x7da18c4cc520> assign[=] call[name[get_samfile], parameter[name[readfile], name[dbfile]]]
variable[logfile] assign[=] binary_operation[name[prefix] + constant[.log]]
if <ast.UnaryOp object at 0x7da18f810430> begin[:]
variable[offset] assign[=] call[name[guessoffset], parameter[list[[<ast.Name object at 0x7da18f810280>]]]]
if <ast.UnaryOp object at 0x7da18f812f50> begin[:]
call[name[logging].error, parameter[call[constant[`{0}` exists. `bowtie2` already run.].format, parameter[name[samfile]]]]]
return[tuple[[<ast.Name object at 0x7da18f810e50>, <ast.Name object at 0x7da18f810f10>]]]
variable[cmd] assign[=] call[constant[bowtie2 -x {0}].format, parameter[name[dbfile]]]
if name[PE] begin[:]
<ast.Tuple object at 0x7da18f813f70> assign[=] call[name[args]][<ast.Slice object at 0x7da18f811b70>]
<ast.AugAssign object at 0x7da18f812b30>
<ast.AugAssign object at 0x7da18f810880>
<ast.Tuple object at 0x7da18f8136d0> assign[=] tuple[[<ast.Constant object at 0x7da18f8136a0>, <ast.Constant object at 0x7da18f812c20>]]
if name[mapped] begin[:]
<ast.AugAssign object at 0x7da18f810190>
if name[unmapped] begin[:]
<ast.AugAssign object at 0x7da18f812c80>
if name[firstN] begin[:]
<ast.AugAssign object at 0x7da18f811ed0>
<ast.AugAssign object at 0x7da18f812410>
if name[fasta] begin[:]
<ast.AugAssign object at 0x7da18f811f30>
<ast.AugAssign object at 0x7da18f810250>
if name[opts].reorder begin[:]
<ast.AugAssign object at 0x7da18f811c60>
<ast.AugAssign object at 0x7da18f810b50>
<ast.AugAssign object at 0x7da18f810610>
if name[opts].null begin[:]
variable[samfile] assign[=] constant[/dev/null]
variable[cmd] assign[=] call[name[output_bam], parameter[name[cmd], name[samfile]]]
call[name[sh], parameter[name[cmd]]]
call[name[print], parameter[call[call[name[open], parameter[name[logfile]]].read, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da18f811de0>, <ast.Name object at 0x7da18f811810>]]] | keyword[def] identifier[align] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[fastq] keyword[import] identifier[guessoffset]
identifier[p] = identifier[OptionParser] ( identifier[align] . identifier[__doc__] )
identifier[p] . identifier[set_firstN] ( identifier[firstN] = literal[int] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[set_cutoff] ( identifier[cutoff] = literal[int] )
identifier[p] . identifier[set_mateorientation] ( identifier[mateorientation] = literal[string] )
identifier[p] . identifier[set_sam_options] ( identifier[bowtie] = keyword[True] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
identifier[extra] = identifier[opts] . identifier[extra]
identifier[mo] = identifier[opts] . identifier[mateorientation]
keyword[if] identifier[mo] == literal[string] :
identifier[extra] += literal[string]
keyword[elif] identifier[mo] == literal[string] :
identifier[extra] += literal[string]
keyword[else] :
identifier[extra] += literal[string]
identifier[PE] = keyword[True]
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
identifier[logging] . identifier[debug] ( literal[string] )
identifier[PE] = keyword[False]
keyword[elif] identifier[len] ( identifier[args] )== literal[int] :
identifier[logging] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[firstN] = identifier[opts] . identifier[firstN]
identifier[mapped] = identifier[opts] . identifier[mapped]
identifier[unmapped] = identifier[opts] . identifier[unmapped]
identifier[fasta] = identifier[opts] . identifier[fasta]
identifier[gl] = literal[string] keyword[if] identifier[opts] . identifier[full] keyword[else] literal[string]
identifier[dbfile] , identifier[readfile] = identifier[args] [ literal[int] : literal[int] ]
identifier[dbfile] = identifier[check_index] ( identifier[dbfile] )
identifier[prefix] = identifier[get_prefix] ( identifier[readfile] , identifier[dbfile] )
identifier[samfile] , identifier[mapped] , identifier[unmapped] = identifier[get_samfile] ( identifier[readfile] , identifier[dbfile] , identifier[bowtie] = keyword[True] ,
identifier[mapped] = identifier[mapped] , identifier[unmapped] = identifier[unmapped] ,
identifier[bam] = identifier[opts] . identifier[bam] )
identifier[logfile] = identifier[prefix] + literal[string]
keyword[if] keyword[not] identifier[fasta] :
identifier[offset] = identifier[guessoffset] ([ identifier[readfile] ])
keyword[if] keyword[not] identifier[need_update] ( identifier[dbfile] , identifier[samfile] ):
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[samfile] ))
keyword[return] identifier[samfile] , identifier[logfile]
identifier[cmd] = literal[string] . identifier[format] ( identifier[dbfile] )
keyword[if] identifier[PE] :
identifier[r1] , identifier[r2] = identifier[args] [ literal[int] : literal[int] ]
identifier[cmd] += literal[string] . identifier[format] ( identifier[r1] , identifier[r2] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[cutoff] )
identifier[mtag] , identifier[utag] = literal[string] , literal[string]
keyword[else] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[readfile] )
identifier[mtag] , identifier[utag] = literal[string] , literal[string]
keyword[if] identifier[mapped] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[mtag] , identifier[mapped] )
keyword[if] identifier[unmapped] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[utag] , identifier[unmapped] )
keyword[if] identifier[firstN] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[firstN] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[cpus] )
keyword[if] identifier[fasta] :
identifier[cmd] += literal[string]
keyword[else] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[offset] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[gl] )
keyword[if] identifier[opts] . identifier[reorder] :
identifier[cmd] += literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[extra] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[logfile] )
keyword[if] identifier[opts] . identifier[null] :
identifier[samfile] = literal[string]
identifier[cmd] = identifier[output_bam] ( identifier[cmd] , identifier[samfile] )
identifier[sh] ( identifier[cmd] )
identifier[print] ( identifier[open] ( identifier[logfile] ). identifier[read] (), identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return] identifier[samfile] , identifier[logfile] | def align(args):
"""
%prog align database.fasta read1.fq [read2.fq]
Wrapper for `bowtie2` single-end or paired-end, depending on the number of args.
"""
from jcvi.formats.fastq import guessoffset
p = OptionParser(align.__doc__)
p.set_firstN(firstN=0)
p.add_option('--full', default=False, action='store_true', help='Enforce end-to-end alignment [default: local]')
p.add_option('--reorder', default=False, action='store_true', help='Keep the input read order [default: %default]')
p.add_option('--null', default=False, action='store_true', help='Do not write to SAM/BAM output')
p.add_option('--fasta', default=False, action='store_true', help='Query reads are FASTA')
p.set_cutoff(cutoff=800)
p.set_mateorientation(mateorientation='+-')
p.set_sam_options(bowtie=True)
(opts, args) = p.parse_args(args)
extra = opts.extra
mo = opts.mateorientation
if mo == '+-':
extra += '' # depends on [control=['if'], data=[]]
elif mo == '-+':
extra += '--rf' # depends on [control=['if'], data=[]]
else:
extra += '--ff'
PE = True
if len(args) == 2:
logging.debug('Single-end alignment')
PE = False # depends on [control=['if'], data=[]]
elif len(args) == 3:
logging.debug('Paired-end alignment') # depends on [control=['if'], data=[]]
else:
sys.exit(not p.print_help())
firstN = opts.firstN
mapped = opts.mapped
unmapped = opts.unmapped
fasta = opts.fasta
gl = '--end-to-end' if opts.full else '--local'
(dbfile, readfile) = args[0:2]
dbfile = check_index(dbfile)
prefix = get_prefix(readfile, dbfile)
(samfile, mapped, unmapped) = get_samfile(readfile, dbfile, bowtie=True, mapped=mapped, unmapped=unmapped, bam=opts.bam)
logfile = prefix + '.log'
if not fasta:
offset = guessoffset([readfile]) # depends on [control=['if'], data=[]]
if not need_update(dbfile, samfile):
logging.error('`{0}` exists. `bowtie2` already run.'.format(samfile))
return (samfile, logfile) # depends on [control=['if'], data=[]]
cmd = 'bowtie2 -x {0}'.format(dbfile)
if PE:
(r1, r2) = args[1:3]
cmd += ' -1 {0} -2 {1}'.format(r1, r2)
cmd += ' --maxins {0}'.format(opts.cutoff)
(mtag, utag) = ('--al-conc', '--un-conc') # depends on [control=['if'], data=[]]
else:
cmd += ' -U {0}'.format(readfile)
(mtag, utag) = ('--al', '--un')
if mapped:
cmd += ' {0} {1}'.format(mtag, mapped) # depends on [control=['if'], data=[]]
if unmapped:
cmd += ' {0} {1}'.format(utag, unmapped) # depends on [control=['if'], data=[]]
if firstN:
cmd += ' --upto {0}'.format(firstN) # depends on [control=['if'], data=[]]
cmd += ' -p {0}'.format(opts.cpus)
if fasta:
cmd += ' -f' # depends on [control=['if'], data=[]]
else:
cmd += ' --phred{0}'.format(offset)
cmd += ' {0}'.format(gl)
if opts.reorder:
cmd += ' --reorder' # depends on [control=['if'], data=[]]
cmd += ' {0}'.format(extra)
# Finally the log
cmd += ' 2> {0}'.format(logfile)
if opts.null:
samfile = '/dev/null' # depends on [control=['if'], data=[]]
cmd = output_bam(cmd, samfile)
sh(cmd)
print(open(logfile).read(), file=sys.stderr)
return (samfile, logfile) |
def _outside_contraction_fn(objective_function,
simplex,
objective_values,
face_centroid,
best_index,
worst_index,
reflected,
objective_at_reflected,
contraction,
shrinkage,
batch_evaluate_objective):
"""Creates the condition function pair for an outside contraction."""
def _contraction():
"""Performs a contraction."""
contracted = face_centroid + contraction * (reflected - face_centroid)
objective_at_contracted = objective_function(contracted)
is_contracted_acceptable = objective_at_contracted <= objective_at_reflected
def _accept_contraction():
next_simplex = _replace_at_index(simplex, worst_index, contracted)
objective_at_next_simplex = _replace_at_index(
objective_values,
worst_index,
objective_at_contracted)
return (False,
next_simplex,
objective_at_next_simplex,
1)
def _reject_contraction():
return _shrink_towards_best(objective_function,
simplex,
best_index,
shrinkage,
batch_evaluate_objective)
return prefer_static.cond(is_contracted_acceptable,
_accept_contraction,
_reject_contraction)
return _contraction | def function[_outside_contraction_fn, parameter[objective_function, simplex, objective_values, face_centroid, best_index, worst_index, reflected, objective_at_reflected, contraction, shrinkage, batch_evaluate_objective]]:
constant[Creates the condition function pair for an outside contraction.]
def function[_contraction, parameter[]]:
constant[Performs a contraction.]
variable[contracted] assign[=] binary_operation[name[face_centroid] + binary_operation[name[contraction] * binary_operation[name[reflected] - name[face_centroid]]]]
variable[objective_at_contracted] assign[=] call[name[objective_function], parameter[name[contracted]]]
variable[is_contracted_acceptable] assign[=] compare[name[objective_at_contracted] less_or_equal[<=] name[objective_at_reflected]]
def function[_accept_contraction, parameter[]]:
variable[next_simplex] assign[=] call[name[_replace_at_index], parameter[name[simplex], name[worst_index], name[contracted]]]
variable[objective_at_next_simplex] assign[=] call[name[_replace_at_index], parameter[name[objective_values], name[worst_index], name[objective_at_contracted]]]
return[tuple[[<ast.Constant object at 0x7da1b02fc610>, <ast.Name object at 0x7da1b02fc640>, <ast.Name object at 0x7da1b02fc670>, <ast.Constant object at 0x7da1b02fc6a0>]]]
def function[_reject_contraction, parameter[]]:
return[call[name[_shrink_towards_best], parameter[name[objective_function], name[simplex], name[best_index], name[shrinkage], name[batch_evaluate_objective]]]]
return[call[name[prefer_static].cond, parameter[name[is_contracted_acceptable], name[_accept_contraction], name[_reject_contraction]]]]
return[name[_contraction]] | keyword[def] identifier[_outside_contraction_fn] ( identifier[objective_function] ,
identifier[simplex] ,
identifier[objective_values] ,
identifier[face_centroid] ,
identifier[best_index] ,
identifier[worst_index] ,
identifier[reflected] ,
identifier[objective_at_reflected] ,
identifier[contraction] ,
identifier[shrinkage] ,
identifier[batch_evaluate_objective] ):
literal[string]
keyword[def] identifier[_contraction] ():
literal[string]
identifier[contracted] = identifier[face_centroid] + identifier[contraction] *( identifier[reflected] - identifier[face_centroid] )
identifier[objective_at_contracted] = identifier[objective_function] ( identifier[contracted] )
identifier[is_contracted_acceptable] = identifier[objective_at_contracted] <= identifier[objective_at_reflected]
keyword[def] identifier[_accept_contraction] ():
identifier[next_simplex] = identifier[_replace_at_index] ( identifier[simplex] , identifier[worst_index] , identifier[contracted] )
identifier[objective_at_next_simplex] = identifier[_replace_at_index] (
identifier[objective_values] ,
identifier[worst_index] ,
identifier[objective_at_contracted] )
keyword[return] ( keyword[False] ,
identifier[next_simplex] ,
identifier[objective_at_next_simplex] ,
literal[int] )
keyword[def] identifier[_reject_contraction] ():
keyword[return] identifier[_shrink_towards_best] ( identifier[objective_function] ,
identifier[simplex] ,
identifier[best_index] ,
identifier[shrinkage] ,
identifier[batch_evaluate_objective] )
keyword[return] identifier[prefer_static] . identifier[cond] ( identifier[is_contracted_acceptable] ,
identifier[_accept_contraction] ,
identifier[_reject_contraction] )
keyword[return] identifier[_contraction] | def _outside_contraction_fn(objective_function, simplex, objective_values, face_centroid, best_index, worst_index, reflected, objective_at_reflected, contraction, shrinkage, batch_evaluate_objective):
"""Creates the condition function pair for an outside contraction."""
def _contraction():
"""Performs a contraction."""
contracted = face_centroid + contraction * (reflected - face_centroid)
objective_at_contracted = objective_function(contracted)
is_contracted_acceptable = objective_at_contracted <= objective_at_reflected
def _accept_contraction():
next_simplex = _replace_at_index(simplex, worst_index, contracted)
objective_at_next_simplex = _replace_at_index(objective_values, worst_index, objective_at_contracted)
return (False, next_simplex, objective_at_next_simplex, 1)
def _reject_contraction():
return _shrink_towards_best(objective_function, simplex, best_index, shrinkage, batch_evaluate_objective)
return prefer_static.cond(is_contracted_acceptable, _accept_contraction, _reject_contraction)
return _contraction |
def radio_status_send(self, rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed, force_mavlink1=False):
'''
Status generated by radio and injected into MAVLink stream.
rssi : Local signal strength (uint8_t)
remrssi : Remote signal strength (uint8_t)
txbuf : Remaining free buffer space in percent. (uint8_t)
noise : Background noise level (uint8_t)
remnoise : Remote background noise level (uint8_t)
rxerrors : Receive errors (uint16_t)
fixed : Count of error corrected packets (uint16_t)
'''
return self.send(self.radio_status_encode(rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed), force_mavlink1=force_mavlink1) | def function[radio_status_send, parameter[self, rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed, force_mavlink1]]:
constant[
Status generated by radio and injected into MAVLink stream.
rssi : Local signal strength (uint8_t)
remrssi : Remote signal strength (uint8_t)
txbuf : Remaining free buffer space in percent. (uint8_t)
noise : Background noise level (uint8_t)
remnoise : Remote background noise level (uint8_t)
rxerrors : Receive errors (uint16_t)
fixed : Count of error corrected packets (uint16_t)
]
return[call[name[self].send, parameter[call[name[self].radio_status_encode, parameter[name[rssi], name[remrssi], name[txbuf], name[noise], name[remnoise], name[rxerrors], name[fixed]]]]]] | keyword[def] identifier[radio_status_send] ( identifier[self] , identifier[rssi] , identifier[remrssi] , identifier[txbuf] , identifier[noise] , identifier[remnoise] , identifier[rxerrors] , identifier[fixed] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[radio_status_encode] ( identifier[rssi] , identifier[remrssi] , identifier[txbuf] , identifier[noise] , identifier[remnoise] , identifier[rxerrors] , identifier[fixed] ), identifier[force_mavlink1] = identifier[force_mavlink1] ) | def radio_status_send(self, rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed, force_mavlink1=False):
"""
Status generated by radio and injected into MAVLink stream.
rssi : Local signal strength (uint8_t)
remrssi : Remote signal strength (uint8_t)
txbuf : Remaining free buffer space in percent. (uint8_t)
noise : Background noise level (uint8_t)
remnoise : Remote background noise level (uint8_t)
rxerrors : Receive errors (uint16_t)
fixed : Count of error corrected packets (uint16_t)
"""
return self.send(self.radio_status_encode(rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed), force_mavlink1=force_mavlink1) |
def transaction(self):
"""
Sets up a context where all the statements within it are ran within
a single database transaction. For internal use only.
"""
# The idea here is to fake the nesting of transactions. Only when
# we've gotten back to the topmost transaction context do we actually
# commit or rollback.
with self.mdr:
try:
self._depth += 1
yield self
self._depth -= 1
except self.mdr.OperationalError:
# We've lost the connection, so there's no sense in
# attempting to roll back back the transaction.
self._depth -= 1
raise
except:
self._depth -= 1
if self._depth == 0:
self.mdr.rollback()
raise
if self._depth == 0:
self.mdr.commit() | def function[transaction, parameter[self]]:
constant[
Sets up a context where all the statements within it are ran within
a single database transaction. For internal use only.
]
with name[self].mdr begin[:]
<ast.Try object at 0x7da18fe92d70>
if compare[name[self]._depth equal[==] constant[0]] begin[:]
call[name[self].mdr.commit, parameter[]] | keyword[def] identifier[transaction] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[mdr] :
keyword[try] :
identifier[self] . identifier[_depth] += literal[int]
keyword[yield] identifier[self]
identifier[self] . identifier[_depth] -= literal[int]
keyword[except] identifier[self] . identifier[mdr] . identifier[OperationalError] :
identifier[self] . identifier[_depth] -= literal[int]
keyword[raise]
keyword[except] :
identifier[self] . identifier[_depth] -= literal[int]
keyword[if] identifier[self] . identifier[_depth] == literal[int] :
identifier[self] . identifier[mdr] . identifier[rollback] ()
keyword[raise]
keyword[if] identifier[self] . identifier[_depth] == literal[int] :
identifier[self] . identifier[mdr] . identifier[commit] () | def transaction(self):
"""
Sets up a context where all the statements within it are ran within
a single database transaction. For internal use only.
"""
# The idea here is to fake the nesting of transactions. Only when
# we've gotten back to the topmost transaction context do we actually
# commit or rollback.
with self.mdr:
try:
self._depth += 1
yield self
self._depth -= 1 # depends on [control=['try'], data=[]]
except self.mdr.OperationalError:
# We've lost the connection, so there's no sense in
# attempting to roll back back the transaction.
self._depth -= 1
raise # depends on [control=['except'], data=[]]
except:
self._depth -= 1
if self._depth == 0:
self.mdr.rollback() # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=[]]
if self._depth == 0:
self.mdr.commit() # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] |
def rename_page(self, old_slug, new_title):
'''Load the page corresponding to the slug, and rename it.'''
#load page
p = s2page.Page(self, old_slug, isslug=True)
p.rename(new_title) | def function[rename_page, parameter[self, old_slug, new_title]]:
constant[Load the page corresponding to the slug, and rename it.]
variable[p] assign[=] call[name[s2page].Page, parameter[name[self], name[old_slug]]]
call[name[p].rename, parameter[name[new_title]]] | keyword[def] identifier[rename_page] ( identifier[self] , identifier[old_slug] , identifier[new_title] ):
literal[string]
identifier[p] = identifier[s2page] . identifier[Page] ( identifier[self] , identifier[old_slug] , identifier[isslug] = keyword[True] )
identifier[p] . identifier[rename] ( identifier[new_title] ) | def rename_page(self, old_slug, new_title):
"""Load the page corresponding to the slug, and rename it."""
#load page
p = s2page.Page(self, old_slug, isslug=True)
p.rename(new_title) |
def lognet_vxlan_walker(prepush = True):
"""
Return a walker function to retrieve necessary information from ObjectDB
"""
def _walk_lognet(key, value, walk, save):
save(key)
if value is None:
return
try:
phynet = walk(value.physicalnetwork.getkey())
except KeyError:
pass
else:
if phynet is not None and getattr(phynet, 'type') == 'vxlan':
try:
vxlan_endpoint_key = VXLANEndpointSet.default_key(value.id)
walk(vxlan_endpoint_key)
except KeyError:
pass
else:
save(vxlan_endpoint_key)
if prepush:
# Acquire all logical ports
try:
netmap = walk(LogicalNetworkMap.default_key(value.id))
except KeyError:
pass
else:
save(netmap.getkey())
for logport in netmap.ports.dataset():
try:
_ = walk(logport.getkey())
except KeyError:
pass
else:
save(logport.getkey())
try:
_, (portid,) = LogicalPort._getIndices(logport.getkey())
portinfokey = LogicalPortVXLANInfo.default_key(portid)
_ = walk(portinfokey)
except KeyError:
pass
else:
save(portinfokey)
return _walk_lognet | def function[lognet_vxlan_walker, parameter[prepush]]:
constant[
Return a walker function to retrieve necessary information from ObjectDB
]
def function[_walk_lognet, parameter[key, value, walk, save]]:
call[name[save], parameter[name[key]]]
if compare[name[value] is constant[None]] begin[:]
return[None]
<ast.Try object at 0x7da20c6e54e0>
return[name[_walk_lognet]] | keyword[def] identifier[lognet_vxlan_walker] ( identifier[prepush] = keyword[True] ):
literal[string]
keyword[def] identifier[_walk_lognet] ( identifier[key] , identifier[value] , identifier[walk] , identifier[save] ):
identifier[save] ( identifier[key] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return]
keyword[try] :
identifier[phynet] = identifier[walk] ( identifier[value] . identifier[physicalnetwork] . identifier[getkey] ())
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
keyword[if] identifier[phynet] keyword[is] keyword[not] keyword[None] keyword[and] identifier[getattr] ( identifier[phynet] , literal[string] )== literal[string] :
keyword[try] :
identifier[vxlan_endpoint_key] = identifier[VXLANEndpointSet] . identifier[default_key] ( identifier[value] . identifier[id] )
identifier[walk] ( identifier[vxlan_endpoint_key] )
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[save] ( identifier[vxlan_endpoint_key] )
keyword[if] identifier[prepush] :
keyword[try] :
identifier[netmap] = identifier[walk] ( identifier[LogicalNetworkMap] . identifier[default_key] ( identifier[value] . identifier[id] ))
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[save] ( identifier[netmap] . identifier[getkey] ())
keyword[for] identifier[logport] keyword[in] identifier[netmap] . identifier[ports] . identifier[dataset] ():
keyword[try] :
identifier[_] = identifier[walk] ( identifier[logport] . identifier[getkey] ())
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[save] ( identifier[logport] . identifier[getkey] ())
keyword[try] :
identifier[_] ,( identifier[portid] ,)= identifier[LogicalPort] . identifier[_getIndices] ( identifier[logport] . identifier[getkey] ())
identifier[portinfokey] = identifier[LogicalPortVXLANInfo] . identifier[default_key] ( identifier[portid] )
identifier[_] = identifier[walk] ( identifier[portinfokey] )
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[save] ( identifier[portinfokey] )
keyword[return] identifier[_walk_lognet] | def lognet_vxlan_walker(prepush=True):
"""
Return a walker function to retrieve necessary information from ObjectDB
"""
def _walk_lognet(key, value, walk, save):
save(key)
if value is None:
return # depends on [control=['if'], data=[]]
try:
phynet = walk(value.physicalnetwork.getkey()) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
if phynet is not None and getattr(phynet, 'type') == 'vxlan':
try:
vxlan_endpoint_key = VXLANEndpointSet.default_key(value.id)
walk(vxlan_endpoint_key) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
save(vxlan_endpoint_key)
if prepush:
# Acquire all logical ports
try:
netmap = walk(LogicalNetworkMap.default_key(value.id)) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
save(netmap.getkey())
for logport in netmap.ports.dataset():
try:
_ = walk(logport.getkey()) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
save(logport.getkey())
try:
(_, (portid,)) = LogicalPort._getIndices(logport.getkey())
portinfokey = LogicalPortVXLANInfo.default_key(portid)
_ = walk(portinfokey) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
save(portinfokey) # depends on [control=['for'], data=['logport']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return _walk_lognet |
def _transfer_str(self, conn, tmp, name, data):
''' transfer string to remote file '''
if type(data) == dict:
data = utils.jsonify(data)
afd, afile = tempfile.mkstemp()
afo = os.fdopen(afd, 'w')
try:
afo.write(data.encode('utf8'))
except:
raise errors.AnsibleError("failure encoding into utf-8")
afo.flush()
afo.close()
remote = os.path.join(tmp, name)
try:
conn.put_file(afile, remote)
finally:
os.unlink(afile)
return remote | def function[_transfer_str, parameter[self, conn, tmp, name, data]]:
constant[ transfer string to remote file ]
if compare[call[name[type], parameter[name[data]]] equal[==] name[dict]] begin[:]
variable[data] assign[=] call[name[utils].jsonify, parameter[name[data]]]
<ast.Tuple object at 0x7da18f721a80> assign[=] call[name[tempfile].mkstemp, parameter[]]
variable[afo] assign[=] call[name[os].fdopen, parameter[name[afd], constant[w]]]
<ast.Try object at 0x7da18f722ef0>
call[name[afo].flush, parameter[]]
call[name[afo].close, parameter[]]
variable[remote] assign[=] call[name[os].path.join, parameter[name[tmp], name[name]]]
<ast.Try object at 0x7da18f723910>
return[name[remote]] | keyword[def] identifier[_transfer_str] ( identifier[self] , identifier[conn] , identifier[tmp] , identifier[name] , identifier[data] ):
literal[string]
keyword[if] identifier[type] ( identifier[data] )== identifier[dict] :
identifier[data] = identifier[utils] . identifier[jsonify] ( identifier[data] )
identifier[afd] , identifier[afile] = identifier[tempfile] . identifier[mkstemp] ()
identifier[afo] = identifier[os] . identifier[fdopen] ( identifier[afd] , literal[string] )
keyword[try] :
identifier[afo] . identifier[write] ( identifier[data] . identifier[encode] ( literal[string] ))
keyword[except] :
keyword[raise] identifier[errors] . identifier[AnsibleError] ( literal[string] )
identifier[afo] . identifier[flush] ()
identifier[afo] . identifier[close] ()
identifier[remote] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmp] , identifier[name] )
keyword[try] :
identifier[conn] . identifier[put_file] ( identifier[afile] , identifier[remote] )
keyword[finally] :
identifier[os] . identifier[unlink] ( identifier[afile] )
keyword[return] identifier[remote] | def _transfer_str(self, conn, tmp, name, data):
""" transfer string to remote file """
if type(data) == dict:
data = utils.jsonify(data) # depends on [control=['if'], data=[]]
(afd, afile) = tempfile.mkstemp()
afo = os.fdopen(afd, 'w')
try:
afo.write(data.encode('utf8')) # depends on [control=['try'], data=[]]
except:
raise errors.AnsibleError('failure encoding into utf-8') # depends on [control=['except'], data=[]]
afo.flush()
afo.close()
remote = os.path.join(tmp, name)
try:
conn.put_file(afile, remote) # depends on [control=['try'], data=[]]
finally:
os.unlink(afile)
return remote |
def generate_hosts(config, app):
'''
app: {
"container id": continer details
...
}
'''
# hosts = {app[container]["NetworkSettings"]["IPAddress"]:app[container]["Name"].replace('/','') for container in app}
hosts = {}
for container in app:
hosts[app[container]["NetworkSettings"]["IPAddress"]] = [app[container]["Name"].replace('/','')]
for container in app:
path = (app[container]["HostsPath"]
if boot2docker.has() is not True
else os.path.join(config["config_path"],"docker","containers",app[container]["Id"],"hosts"))
try:
with open(path, 'r+') as f:
for host in hosts:
f.write("%s\t%s\n"%(host," ".join(hosts[host])))
except Exception as e:
utils.error(e) | def function[generate_hosts, parameter[config, app]]:
constant[
app: {
"container id": continer details
...
}
]
variable[hosts] assign[=] dictionary[[], []]
for taget[name[container]] in starred[name[app]] begin[:]
call[name[hosts]][call[call[call[name[app]][name[container]]][constant[NetworkSettings]]][constant[IPAddress]]] assign[=] list[[<ast.Call object at 0x7da1b09e9a80>]]
for taget[name[container]] in starred[name[app]] begin[:]
variable[path] assign[=] <ast.IfExp object at 0x7da1b09e8910>
<ast.Try object at 0x7da1b09e8130> | keyword[def] identifier[generate_hosts] ( identifier[config] , identifier[app] ):
literal[string]
identifier[hosts] ={}
keyword[for] identifier[container] keyword[in] identifier[app] :
identifier[hosts] [ identifier[app] [ identifier[container] ][ literal[string] ][ literal[string] ]]=[ identifier[app] [ identifier[container] ][ literal[string] ]. identifier[replace] ( literal[string] , literal[string] )]
keyword[for] identifier[container] keyword[in] identifier[app] :
identifier[path] =( identifier[app] [ identifier[container] ][ literal[string] ]
keyword[if] identifier[boot2docker] . identifier[has] () keyword[is] keyword[not] keyword[True]
keyword[else] identifier[os] . identifier[path] . identifier[join] ( identifier[config] [ literal[string] ], literal[string] , literal[string] , identifier[app] [ identifier[container] ][ literal[string] ], literal[string] ))
keyword[try] :
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[host] keyword[in] identifier[hosts] :
identifier[f] . identifier[write] ( literal[string] %( identifier[host] , literal[string] . identifier[join] ( identifier[hosts] [ identifier[host] ])))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[utils] . identifier[error] ( identifier[e] ) | def generate_hosts(config, app):
"""
app: {
"container id": continer details
...
}
"""
# hosts = {app[container]["NetworkSettings"]["IPAddress"]:app[container]["Name"].replace('/','') for container in app}
hosts = {}
for container in app:
hosts[app[container]['NetworkSettings']['IPAddress']] = [app[container]['Name'].replace('/', '')] # depends on [control=['for'], data=['container']]
for container in app:
path = app[container]['HostsPath'] if boot2docker.has() is not True else os.path.join(config['config_path'], 'docker', 'containers', app[container]['Id'], 'hosts')
try:
with open(path, 'r+') as f:
for host in hosts:
f.write('%s\t%s\n' % (host, ' '.join(hosts[host]))) # depends on [control=['for'], data=['host']] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except Exception as e:
utils.error(e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['container']] |
def parse_keyvalue(self, tup_tree):
"""
Parse a KEYVALUE element and return the keybinding value as a CIM data
type object, based upon the type information in its VALUETYPE and TYPE
attributes, if present.
If TYPE is specified, its value is used to create the corresponding CIM
data type object. in this case, VALUETYPE is ignored and may be
omitted. Discrepancies between TYPE and VALUETYPE are not checked.
Note that DSP0201 does not detail how such discrepancies should be
resolved, including the precedence of the DTD-defined default for
VALUETYPE over a specified TYPE value.
If TYPE is not specified but VALUETYPE is specified, the CIM type is
defaulted for a VALUETYPE of 'string' and 'boolean'. For a VALUETYPE of
'numeric', the CIM type remains undetermined and the numeric values are
returned as Python int/long or float objects.
::
<!ELEMENT KEYVALUE (#PCDATA)>
<!ATTLIST KEYVALUE
VALUETYPE (string | boolean | numeric) "string"
%CIMType; #IMPLIED>
"""
self.check_node(tup_tree, 'KEYVALUE', (), ('VALUETYPE', 'TYPE'), (),
allow_pcdata=True)
data = self.pcdata(tup_tree)
attrl = attrs(tup_tree)
valuetype = attrl.get('VALUETYPE', None)
cimtype = attrl.get('TYPE', None)
# Tolerate that some WBEM servers return TYPE="" instead of omitting
# TYPE (e.g. the WBEM Solutions server).
if cimtype == '':
cimtype = None
# Default the CIM type from VALUETYPE if not specified in TYPE
if cimtype is None:
if valuetype is None or valuetype == 'string':
cimtype = 'string'
elif valuetype == 'boolean':
cimtype = 'boolean'
elif valuetype == 'numeric':
pass
else:
raise CIMXMLParseError(
_format("Element {0!A} has invalid 'VALUETYPE' attribute "
"value {1!A}", name(tup_tree), valuetype),
conn_id=self.conn_id)
return self.unpack_single_value(data, cimtype) | def function[parse_keyvalue, parameter[self, tup_tree]]:
constant[
Parse a KEYVALUE element and return the keybinding value as a CIM data
type object, based upon the type information in its VALUETYPE and TYPE
attributes, if present.
If TYPE is specified, its value is used to create the corresponding CIM
data type object. in this case, VALUETYPE is ignored and may be
omitted. Discrepancies between TYPE and VALUETYPE are not checked.
Note that DSP0201 does not detail how such discrepancies should be
resolved, including the precedence of the DTD-defined default for
VALUETYPE over a specified TYPE value.
If TYPE is not specified but VALUETYPE is specified, the CIM type is
defaulted for a VALUETYPE of 'string' and 'boolean'. For a VALUETYPE of
'numeric', the CIM type remains undetermined and the numeric values are
returned as Python int/long or float objects.
::
<!ELEMENT KEYVALUE (#PCDATA)>
<!ATTLIST KEYVALUE
VALUETYPE (string | boolean | numeric) "string"
%CIMType; #IMPLIED>
]
call[name[self].check_node, parameter[name[tup_tree], constant[KEYVALUE], tuple[[]], tuple[[<ast.Constant object at 0x7da2041d8100>, <ast.Constant object at 0x7da1b0b11cf0>]], tuple[[]]]]
variable[data] assign[=] call[name[self].pcdata, parameter[name[tup_tree]]]
variable[attrl] assign[=] call[name[attrs], parameter[name[tup_tree]]]
variable[valuetype] assign[=] call[name[attrl].get, parameter[constant[VALUETYPE], constant[None]]]
variable[cimtype] assign[=] call[name[attrl].get, parameter[constant[TYPE], constant[None]]]
if compare[name[cimtype] equal[==] constant[]] begin[:]
variable[cimtype] assign[=] constant[None]
if compare[name[cimtype] is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b0b125f0> begin[:]
variable[cimtype] assign[=] constant[string]
return[call[name[self].unpack_single_value, parameter[name[data], name[cimtype]]]] | keyword[def] identifier[parse_keyvalue] ( identifier[self] , identifier[tup_tree] ):
literal[string]
identifier[self] . identifier[check_node] ( identifier[tup_tree] , literal[string] ,(),( literal[string] , literal[string] ),(),
identifier[allow_pcdata] = keyword[True] )
identifier[data] = identifier[self] . identifier[pcdata] ( identifier[tup_tree] )
identifier[attrl] = identifier[attrs] ( identifier[tup_tree] )
identifier[valuetype] = identifier[attrl] . identifier[get] ( literal[string] , keyword[None] )
identifier[cimtype] = identifier[attrl] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[cimtype] == literal[string] :
identifier[cimtype] = keyword[None]
keyword[if] identifier[cimtype] keyword[is] keyword[None] :
keyword[if] identifier[valuetype] keyword[is] keyword[None] keyword[or] identifier[valuetype] == literal[string] :
identifier[cimtype] = literal[string]
keyword[elif] identifier[valuetype] == literal[string] :
identifier[cimtype] = literal[string]
keyword[elif] identifier[valuetype] == literal[string] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[CIMXMLParseError] (
identifier[_format] ( literal[string]
literal[string] , identifier[name] ( identifier[tup_tree] ), identifier[valuetype] ),
identifier[conn_id] = identifier[self] . identifier[conn_id] )
keyword[return] identifier[self] . identifier[unpack_single_value] ( identifier[data] , identifier[cimtype] ) | def parse_keyvalue(self, tup_tree):
"""
Parse a KEYVALUE element and return the keybinding value as a CIM data
type object, based upon the type information in its VALUETYPE and TYPE
attributes, if present.
If TYPE is specified, its value is used to create the corresponding CIM
data type object. in this case, VALUETYPE is ignored and may be
omitted. Discrepancies between TYPE and VALUETYPE are not checked.
Note that DSP0201 does not detail how such discrepancies should be
resolved, including the precedence of the DTD-defined default for
VALUETYPE over a specified TYPE value.
If TYPE is not specified but VALUETYPE is specified, the CIM type is
defaulted for a VALUETYPE of 'string' and 'boolean'. For a VALUETYPE of
'numeric', the CIM type remains undetermined and the numeric values are
returned as Python int/long or float objects.
::
<!ELEMENT KEYVALUE (#PCDATA)>
<!ATTLIST KEYVALUE
VALUETYPE (string | boolean | numeric) "string"
%CIMType; #IMPLIED>
"""
self.check_node(tup_tree, 'KEYVALUE', (), ('VALUETYPE', 'TYPE'), (), allow_pcdata=True)
data = self.pcdata(tup_tree)
attrl = attrs(tup_tree)
valuetype = attrl.get('VALUETYPE', None)
cimtype = attrl.get('TYPE', None)
# Tolerate that some WBEM servers return TYPE="" instead of omitting
# TYPE (e.g. the WBEM Solutions server).
if cimtype == '':
cimtype = None # depends on [control=['if'], data=['cimtype']]
# Default the CIM type from VALUETYPE if not specified in TYPE
if cimtype is None:
if valuetype is None or valuetype == 'string':
cimtype = 'string' # depends on [control=['if'], data=[]]
elif valuetype == 'boolean':
cimtype = 'boolean' # depends on [control=['if'], data=[]]
elif valuetype == 'numeric':
pass # depends on [control=['if'], data=[]]
else:
raise CIMXMLParseError(_format("Element {0!A} has invalid 'VALUETYPE' attribute value {1!A}", name(tup_tree), valuetype), conn_id=self.conn_id) # depends on [control=['if'], data=['cimtype']]
return self.unpack_single_value(data, cimtype) |
def _bin_exp(self, n_bin, scale=1.0):
""" Calculate the bin locations to approximate exponential distribution.
It breaks the cumulative probability of exponential distribution
into n_bin equal bins, each covering 1 / n_bin probability. Then it
calculates the center of mass in each bins and returns the
centers of mass. So, it approximates the exponential distribution
with n_bin of Delta function weighted by 1 / n_bin, at the
locations of these centers of mass.
Parameters:
-----------
n_bin: int
The number of bins to approximate the exponential distribution
scale: float.
The scale parameter of the exponential distribution, defined in
the same way as scipy.stats. It does not influence the ratios
between the bins, but just controls the spacing between the bins.
So generally users should not change its default.
Returns:
--------
bins: numpy array of size [n_bin,]
The centers of mass for each segment of the
exponential distribution.
"""
boundaries = np.flip(scipy.stats.expon.isf(
np.linspace(0, 1, n_bin + 1),
scale=scale), axis=0)
bins = np.empty(n_bin)
for i in np.arange(n_bin):
bins[i] = utils.center_mass_exp(
(boundaries[i], boundaries[i + 1]), scale=scale)
return bins | def function[_bin_exp, parameter[self, n_bin, scale]]:
constant[ Calculate the bin locations to approximate exponential distribution.
It breaks the cumulative probability of exponential distribution
into n_bin equal bins, each covering 1 / n_bin probability. Then it
calculates the center of mass in each bins and returns the
centers of mass. So, it approximates the exponential distribution
with n_bin of Delta function weighted by 1 / n_bin, at the
locations of these centers of mass.
Parameters:
-----------
n_bin: int
The number of bins to approximate the exponential distribution
scale: float.
The scale parameter of the exponential distribution, defined in
the same way as scipy.stats. It does not influence the ratios
between the bins, but just controls the spacing between the bins.
So generally users should not change its default.
Returns:
--------
bins: numpy array of size [n_bin,]
The centers of mass for each segment of the
exponential distribution.
]
variable[boundaries] assign[=] call[name[np].flip, parameter[call[name[scipy].stats.expon.isf, parameter[call[name[np].linspace, parameter[constant[0], constant[1], binary_operation[name[n_bin] + constant[1]]]]]]]]
variable[bins] assign[=] call[name[np].empty, parameter[name[n_bin]]]
for taget[name[i]] in starred[call[name[np].arange, parameter[name[n_bin]]]] begin[:]
call[name[bins]][name[i]] assign[=] call[name[utils].center_mass_exp, parameter[tuple[[<ast.Subscript object at 0x7da1b08e8370>, <ast.Subscript object at 0x7da1b08e8280>]]]]
return[name[bins]] | keyword[def] identifier[_bin_exp] ( identifier[self] , identifier[n_bin] , identifier[scale] = literal[int] ):
literal[string]
identifier[boundaries] = identifier[np] . identifier[flip] ( identifier[scipy] . identifier[stats] . identifier[expon] . identifier[isf] (
identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[n_bin] + literal[int] ),
identifier[scale] = identifier[scale] ), identifier[axis] = literal[int] )
identifier[bins] = identifier[np] . identifier[empty] ( identifier[n_bin] )
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[n_bin] ):
identifier[bins] [ identifier[i] ]= identifier[utils] . identifier[center_mass_exp] (
( identifier[boundaries] [ identifier[i] ], identifier[boundaries] [ identifier[i] + literal[int] ]), identifier[scale] = identifier[scale] )
keyword[return] identifier[bins] | def _bin_exp(self, n_bin, scale=1.0):
""" Calculate the bin locations to approximate exponential distribution.
It breaks the cumulative probability of exponential distribution
into n_bin equal bins, each covering 1 / n_bin probability. Then it
calculates the center of mass in each bins and returns the
centers of mass. So, it approximates the exponential distribution
with n_bin of Delta function weighted by 1 / n_bin, at the
locations of these centers of mass.
Parameters:
-----------
n_bin: int
The number of bins to approximate the exponential distribution
scale: float.
The scale parameter of the exponential distribution, defined in
the same way as scipy.stats. It does not influence the ratios
between the bins, but just controls the spacing between the bins.
So generally users should not change its default.
Returns:
--------
bins: numpy array of size [n_bin,]
The centers of mass for each segment of the
exponential distribution.
"""
boundaries = np.flip(scipy.stats.expon.isf(np.linspace(0, 1, n_bin + 1), scale=scale), axis=0)
bins = np.empty(n_bin)
for i in np.arange(n_bin):
bins[i] = utils.center_mass_exp((boundaries[i], boundaries[i + 1]), scale=scale) # depends on [control=['for'], data=['i']]
return bins |
def open_channel(self):
"""Open a new channel with RabbitMQ by issuing the Channel.Open RPC
command. When RabbitMQ responds that the channel is open, the
on_channel_open callback will be invoked by pika.
"""
_logger.info('Creating a new channel')
self._connection.channel(on_open_callback=self.on_channel_open) | def function[open_channel, parameter[self]]:
constant[Open a new channel with RabbitMQ by issuing the Channel.Open RPC
command. When RabbitMQ responds that the channel is open, the
on_channel_open callback will be invoked by pika.
]
call[name[_logger].info, parameter[constant[Creating a new channel]]]
call[name[self]._connection.channel, parameter[]] | keyword[def] identifier[open_channel] ( identifier[self] ):
literal[string]
identifier[_logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[_connection] . identifier[channel] ( identifier[on_open_callback] = identifier[self] . identifier[on_channel_open] ) | def open_channel(self):
"""Open a new channel with RabbitMQ by issuing the Channel.Open RPC
command. When RabbitMQ responds that the channel is open, the
on_channel_open callback will be invoked by pika.
"""
_logger.info('Creating a new channel')
self._connection.channel(on_open_callback=self.on_channel_open) |
def _get_securitygroupname_id(securitygroupname_list):
'''
Returns the SecurityGroupId of a SecurityGroupName to use
'''
securitygroupid_set = set()
if not isinstance(securitygroupname_list, list):
securitygroupname_list = [securitygroupname_list]
params = {'Action': 'DescribeSecurityGroups'}
for sg in aws.query(params, location=get_location(),
provider=get_provider(), opts=__opts__, sigver='4'):
if sg['groupName'] in securitygroupname_list:
log.debug(
'AWS SecurityGroup ID of %s is %s',
sg['groupName'], sg['groupId']
)
securitygroupid_set.add(sg['groupId'])
return list(securitygroupid_set) | def function[_get_securitygroupname_id, parameter[securitygroupname_list]]:
constant[
Returns the SecurityGroupId of a SecurityGroupName to use
]
variable[securitygroupid_set] assign[=] call[name[set], parameter[]]
if <ast.UnaryOp object at 0x7da1b1c66f20> begin[:]
variable[securitygroupname_list] assign[=] list[[<ast.Name object at 0x7da20cabcd00>]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20cabcbe0>], [<ast.Constant object at 0x7da20cabe5f0>]]
for taget[name[sg]] in starred[call[name[aws].query, parameter[name[params]]]] begin[:]
if compare[call[name[sg]][constant[groupName]] in name[securitygroupname_list]] begin[:]
call[name[log].debug, parameter[constant[AWS SecurityGroup ID of %s is %s], call[name[sg]][constant[groupName]], call[name[sg]][constant[groupId]]]]
call[name[securitygroupid_set].add, parameter[call[name[sg]][constant[groupId]]]]
return[call[name[list], parameter[name[securitygroupid_set]]]] | keyword[def] identifier[_get_securitygroupname_id] ( identifier[securitygroupname_list] ):
literal[string]
identifier[securitygroupid_set] = identifier[set] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[securitygroupname_list] , identifier[list] ):
identifier[securitygroupname_list] =[ identifier[securitygroupname_list] ]
identifier[params] ={ literal[string] : literal[string] }
keyword[for] identifier[sg] keyword[in] identifier[aws] . identifier[query] ( identifier[params] , identifier[location] = identifier[get_location] (),
identifier[provider] = identifier[get_provider] (), identifier[opts] = identifier[__opts__] , identifier[sigver] = literal[string] ):
keyword[if] identifier[sg] [ literal[string] ] keyword[in] identifier[securitygroupname_list] :
identifier[log] . identifier[debug] (
literal[string] ,
identifier[sg] [ literal[string] ], identifier[sg] [ literal[string] ]
)
identifier[securitygroupid_set] . identifier[add] ( identifier[sg] [ literal[string] ])
keyword[return] identifier[list] ( identifier[securitygroupid_set] ) | def _get_securitygroupname_id(securitygroupname_list):
"""
Returns the SecurityGroupId of a SecurityGroupName to use
"""
securitygroupid_set = set()
if not isinstance(securitygroupname_list, list):
securitygroupname_list = [securitygroupname_list] # depends on [control=['if'], data=[]]
params = {'Action': 'DescribeSecurityGroups'}
for sg in aws.query(params, location=get_location(), provider=get_provider(), opts=__opts__, sigver='4'):
if sg['groupName'] in securitygroupname_list:
log.debug('AWS SecurityGroup ID of %s is %s', sg['groupName'], sg['groupId'])
securitygroupid_set.add(sg['groupId']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sg']]
return list(securitygroupid_set) |
def binary_gas_search(state: BaseState, transaction: BaseTransaction, tolerance: int=1) -> int:
"""
Run the transaction with various gas limits, progressively
approaching the minimum needed to succeed without an OutOfGas exception.
The starting range of possible estimates is:
[transaction.intrinsic_gas, state.gas_limit].
After the first OutOfGas exception, the range is: (largest_limit_out_of_gas, state.gas_limit].
After the first run not out of gas, the range is: (largest_limit_out_of_gas, smallest_success].
:param int tolerance: When the range of estimates is less than tolerance,
return the top of the range.
:returns int: The smallest confirmed gas to not throw an OutOfGas exception,
subject to tolerance. If OutOfGas is thrown at block limit, return block limit.
:raises VMError: if the computation fails even when given the block gas_limit to complete
"""
if not hasattr(transaction, 'sender'):
raise TypeError(
"Transaction is missing attribute sender.",
"If sending an unsigned transaction, use SpoofTransaction and provide the",
"sender using the 'from' parameter")
minimum_transaction = SpoofTransaction(
transaction,
gas=transaction.intrinsic_gas,
gas_price=0,
)
if _get_computation_error(state, minimum_transaction) is None:
return transaction.intrinsic_gas
maximum_transaction = SpoofTransaction(
transaction,
gas=state.gas_limit,
gas_price=0,
)
error = _get_computation_error(state, maximum_transaction)
if error is not None:
raise error
minimum_viable = state.gas_limit
maximum_out_of_gas = transaction.intrinsic_gas
while minimum_viable - maximum_out_of_gas > tolerance:
midpoint = (minimum_viable + maximum_out_of_gas) // 2
test_transaction = SpoofTransaction(transaction, gas=midpoint)
if _get_computation_error(state, test_transaction) is None:
minimum_viable = midpoint
else:
maximum_out_of_gas = midpoint
return minimum_viable | def function[binary_gas_search, parameter[state, transaction, tolerance]]:
constant[
Run the transaction with various gas limits, progressively
approaching the minimum needed to succeed without an OutOfGas exception.
The starting range of possible estimates is:
[transaction.intrinsic_gas, state.gas_limit].
After the first OutOfGas exception, the range is: (largest_limit_out_of_gas, state.gas_limit].
After the first run not out of gas, the range is: (largest_limit_out_of_gas, smallest_success].
:param int tolerance: When the range of estimates is less than tolerance,
return the top of the range.
:returns int: The smallest confirmed gas to not throw an OutOfGas exception,
subject to tolerance. If OutOfGas is thrown at block limit, return block limit.
:raises VMError: if the computation fails even when given the block gas_limit to complete
]
if <ast.UnaryOp object at 0x7da1b1644bb0> begin[:]
<ast.Raise object at 0x7da1b1644940>
variable[minimum_transaction] assign[=] call[name[SpoofTransaction], parameter[name[transaction]]]
if compare[call[name[_get_computation_error], parameter[name[state], name[minimum_transaction]]] is constant[None]] begin[:]
return[name[transaction].intrinsic_gas]
variable[maximum_transaction] assign[=] call[name[SpoofTransaction], parameter[name[transaction]]]
variable[error] assign[=] call[name[_get_computation_error], parameter[name[state], name[maximum_transaction]]]
if compare[name[error] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da1b1647e80>
variable[minimum_viable] assign[=] name[state].gas_limit
variable[maximum_out_of_gas] assign[=] name[transaction].intrinsic_gas
while compare[binary_operation[name[minimum_viable] - name[maximum_out_of_gas]] greater[>] name[tolerance]] begin[:]
variable[midpoint] assign[=] binary_operation[binary_operation[name[minimum_viable] + name[maximum_out_of_gas]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
variable[test_transaction] assign[=] call[name[SpoofTransaction], parameter[name[transaction]]]
if compare[call[name[_get_computation_error], parameter[name[state], name[test_transaction]]] is constant[None]] begin[:]
variable[minimum_viable] assign[=] name[midpoint]
return[name[minimum_viable]] | keyword[def] identifier[binary_gas_search] ( identifier[state] : identifier[BaseState] , identifier[transaction] : identifier[BaseTransaction] , identifier[tolerance] : identifier[int] = literal[int] )-> identifier[int] :
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[transaction] , literal[string] ):
keyword[raise] identifier[TypeError] (
literal[string] ,
literal[string] ,
literal[string] )
identifier[minimum_transaction] = identifier[SpoofTransaction] (
identifier[transaction] ,
identifier[gas] = identifier[transaction] . identifier[intrinsic_gas] ,
identifier[gas_price] = literal[int] ,
)
keyword[if] identifier[_get_computation_error] ( identifier[state] , identifier[minimum_transaction] ) keyword[is] keyword[None] :
keyword[return] identifier[transaction] . identifier[intrinsic_gas]
identifier[maximum_transaction] = identifier[SpoofTransaction] (
identifier[transaction] ,
identifier[gas] = identifier[state] . identifier[gas_limit] ,
identifier[gas_price] = literal[int] ,
)
identifier[error] = identifier[_get_computation_error] ( identifier[state] , identifier[maximum_transaction] )
keyword[if] identifier[error] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[error]
identifier[minimum_viable] = identifier[state] . identifier[gas_limit]
identifier[maximum_out_of_gas] = identifier[transaction] . identifier[intrinsic_gas]
keyword[while] identifier[minimum_viable] - identifier[maximum_out_of_gas] > identifier[tolerance] :
identifier[midpoint] =( identifier[minimum_viable] + identifier[maximum_out_of_gas] )// literal[int]
identifier[test_transaction] = identifier[SpoofTransaction] ( identifier[transaction] , identifier[gas] = identifier[midpoint] )
keyword[if] identifier[_get_computation_error] ( identifier[state] , identifier[test_transaction] ) keyword[is] keyword[None] :
identifier[minimum_viable] = identifier[midpoint]
keyword[else] :
identifier[maximum_out_of_gas] = identifier[midpoint]
keyword[return] identifier[minimum_viable] | def binary_gas_search(state: BaseState, transaction: BaseTransaction, tolerance: int=1) -> int:
"""
Run the transaction with various gas limits, progressively
approaching the minimum needed to succeed without an OutOfGas exception.
The starting range of possible estimates is:
[transaction.intrinsic_gas, state.gas_limit].
After the first OutOfGas exception, the range is: (largest_limit_out_of_gas, state.gas_limit].
After the first run not out of gas, the range is: (largest_limit_out_of_gas, smallest_success].
:param int tolerance: When the range of estimates is less than tolerance,
return the top of the range.
:returns int: The smallest confirmed gas to not throw an OutOfGas exception,
subject to tolerance. If OutOfGas is thrown at block limit, return block limit.
:raises VMError: if the computation fails even when given the block gas_limit to complete
"""
if not hasattr(transaction, 'sender'):
raise TypeError('Transaction is missing attribute sender.', 'If sending an unsigned transaction, use SpoofTransaction and provide the', "sender using the 'from' parameter") # depends on [control=['if'], data=[]]
minimum_transaction = SpoofTransaction(transaction, gas=transaction.intrinsic_gas, gas_price=0)
if _get_computation_error(state, minimum_transaction) is None:
return transaction.intrinsic_gas # depends on [control=['if'], data=[]]
maximum_transaction = SpoofTransaction(transaction, gas=state.gas_limit, gas_price=0)
error = _get_computation_error(state, maximum_transaction)
if error is not None:
raise error # depends on [control=['if'], data=['error']]
minimum_viable = state.gas_limit
maximum_out_of_gas = transaction.intrinsic_gas
while minimum_viable - maximum_out_of_gas > tolerance:
midpoint = (minimum_viable + maximum_out_of_gas) // 2
test_transaction = SpoofTransaction(transaction, gas=midpoint)
if _get_computation_error(state, test_transaction) is None:
minimum_viable = midpoint # depends on [control=['if'], data=[]]
else:
maximum_out_of_gas = midpoint # depends on [control=['while'], data=[]]
return minimum_viable |
def compcor_variance_plot(metadata_files, metadata_sources=None,
output_file=None, varexp_thresh=(0.5, 0.7, 0.9),
fig=None):
"""
Parameters
----------
metadata_files: list
List of paths to files containing component metadata. If more than one
decomposition has been performed (e.g., anatomical and temporal
CompCor decompositions), then all metadata files can be provided in
the list. However, each metadata file should have a corresponding
entry in `metadata_sources`.
metadata_sources: list or None
List of source names (e.g., ['aCompCor']) for decompositions. This
list should be of the same length as `metadata_files`.
output_file: str or None
Path where the output figure should be saved. If this is not defined,
then the plotting axes will be returned instead of the saved figure
path.
varexp_thresh: tuple
Set of variance thresholds to include in the plot (default 0.5, 0.7,
0.9).
fig: figure or None
Existing figure on which to plot.
Returns
-------
ax: axes
Plotting axes. Returned only if the `output_file` parameter is None.
output_file: str
The file where the figure is saved.
"""
metadata = {}
if metadata_sources is None:
if len(metadata_files) == 1:
metadata_sources = ['CompCor']
else:
metadata_sources = ['Decomposition {:d}'.format(i)
for i in range(len(metadata_files))]
for file, source in zip(metadata_files, metadata_sources):
metadata[source] = pd.read_table(str(file))
metadata[source]['source'] = source
metadata = pd.concat(list(metadata.values()))
bbox_txt = {
'boxstyle': 'round',
'fc': 'white',
'ec': 'none',
'color': 'none',
'linewidth': 0,
'alpha': 0.8
}
decompositions = []
data_sources = list(metadata.groupby(['source', 'mask']).groups.keys())
for source, mask in data_sources:
if not np.isnan(
metadata.loc[
(metadata['source'] == source)
& (metadata['mask'] == mask)
]['singular_value'].values[0]):
decompositions.append((source, mask))
if fig is not None:
ax = [fig.add_subplot(1, len(decompositions), i+1)
for i in range(len(decompositions))]
elif len(decompositions) > 1:
fig, ax = plt.subplots(1, len(decompositions),
figsize=(5*len(decompositions), 5))
else:
ax = [plt.axes()]
for m, (source, mask) in enumerate(decompositions):
components = metadata[(metadata['mask'] == mask)
& (metadata['source'] == source)]
if len([m for s, m in decompositions if s == source]) > 1:
title_mask = ' ({} mask)'.format(mask)
else:
title_mask = ''
fig_title = '{}{}'.format(source, title_mask)
ax[m].plot(np.arange(components.shape[0]+1),
[0] + list(
100*components['cumulative_variance_explained']),
color='purple',
linewidth=2.5)
ax[m].grid(False)
ax[m].set_xlabel('number of components in model')
ax[m].set_ylabel('cumulative variance explained (%)')
ax[m].set_title(fig_title)
varexp = {}
for i, thr in enumerate(varexp_thresh):
varexp[thr] = np.searchsorted(
components['cumulative_variance_explained'], thr) + 1
ax[m].axhline(y=100*thr, color='lightgrey', linewidth=0.25)
ax[m].axvline(x=varexp[thr], color='C{}'.format(i),
linewidth=2, linestyle=':')
ax[m].text(0, 100*thr, '{:.0f}'.format(100*thr),
fontsize='x-small', bbox=bbox_txt)
ax[m].text(varexp[thr][0], 25,
'{} components explain\n{:.0f}% of variance'.format(
varexp[thr][0], 100*thr),
rotation=90,
horizontalalignment='center',
fontsize='xx-small',
bbox=bbox_txt)
ax[m].set_yticks([])
ax[m].set_yticklabels([])
for tick in ax[m].xaxis.get_major_ticks():
tick.label.set_fontsize('x-small')
tick.label.set_rotation('vertical')
for side in ['top', 'right', 'left']:
ax[m].spines[side].set_color('none')
ax[m].spines[side].set_visible(False)
if output_file is not None:
figure = plt.gcf()
figure.savefig(output_file, bbox_inches='tight')
plt.close(figure)
figure = None
return output_file
return ax | def function[compcor_variance_plot, parameter[metadata_files, metadata_sources, output_file, varexp_thresh, fig]]:
constant[
Parameters
----------
metadata_files: list
List of paths to files containing component metadata. If more than one
decomposition has been performed (e.g., anatomical and temporal
CompCor decompositions), then all metadata files can be provided in
the list. However, each metadata file should have a corresponding
entry in `metadata_sources`.
metadata_sources: list or None
List of source names (e.g., ['aCompCor']) for decompositions. This
list should be of the same length as `metadata_files`.
output_file: str or None
Path where the output figure should be saved. If this is not defined,
then the plotting axes will be returned instead of the saved figure
path.
varexp_thresh: tuple
Set of variance thresholds to include in the plot (default 0.5, 0.7,
0.9).
fig: figure or None
Existing figure on which to plot.
Returns
-------
ax: axes
Plotting axes. Returned only if the `output_file` parameter is None.
output_file: str
The file where the figure is saved.
]
variable[metadata] assign[=] dictionary[[], []]
if compare[name[metadata_sources] is constant[None]] begin[:]
if compare[call[name[len], parameter[name[metadata_files]]] equal[==] constant[1]] begin[:]
variable[metadata_sources] assign[=] list[[<ast.Constant object at 0x7da20e9b1210>]]
for taget[tuple[[<ast.Name object at 0x7da20e9b1d50>, <ast.Name object at 0x7da20e9b1c90>]]] in starred[call[name[zip], parameter[name[metadata_files], name[metadata_sources]]]] begin[:]
call[name[metadata]][name[source]] assign[=] call[name[pd].read_table, parameter[call[name[str], parameter[name[file]]]]]
call[call[name[metadata]][name[source]]][constant[source]] assign[=] name[source]
variable[metadata] assign[=] call[name[pd].concat, parameter[call[name[list], parameter[call[name[metadata].values, parameter[]]]]]]
variable[bbox_txt] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b14e0>, <ast.Constant object at 0x7da20e9b35e0>, <ast.Constant object at 0x7da20e9b2560>, <ast.Constant object at 0x7da20e9b2ce0>, <ast.Constant object at 0x7da20e9b2980>, <ast.Constant object at 0x7da20e9b0490>], [<ast.Constant object at 0x7da20e9b34c0>, <ast.Constant object at 0x7da20e9b2650>, <ast.Constant object at 0x7da20e9b02e0>, <ast.Constant object at 0x7da20e9b2a10>, <ast.Constant object at 0x7da20e9b0460>, <ast.Constant object at 0x7da20e9b24d0>]]
variable[decompositions] assign[=] list[[]]
variable[data_sources] assign[=] call[name[list], parameter[call[call[name[metadata].groupby, parameter[list[[<ast.Constant object at 0x7da18f00fc10>, <ast.Constant object at 0x7da18f00c760>]]]].groups.keys, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da18f00d840>, <ast.Name object at 0x7da18f00fb80>]]] in starred[name[data_sources]] begin[:]
if <ast.UnaryOp object at 0x7da18f00c490> begin[:]
call[name[decompositions].append, parameter[tuple[[<ast.Name object at 0x7da18f00e0e0>, <ast.Name object at 0x7da18f00df30>]]]]
if compare[name[fig] is_not constant[None]] begin[:]
variable[ax] assign[=] <ast.ListComp object at 0x7da18f00eb60>
for taget[tuple[[<ast.Name object at 0x7da18f00cf10>, <ast.Tuple object at 0x7da18f00e5c0>]]] in starred[call[name[enumerate], parameter[name[decompositions]]]] begin[:]
variable[components] assign[=] call[name[metadata]][binary_operation[compare[call[name[metadata]][constant[mask]] equal[==] name[mask]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[metadata]][constant[source]] equal[==] name[source]]]]
if compare[call[name[len], parameter[<ast.ListComp object at 0x7da18f00f7f0>]] greater[>] constant[1]] begin[:]
variable[title_mask] assign[=] call[constant[ ({} mask)].format, parameter[name[mask]]]
variable[fig_title] assign[=] call[constant[{}{}].format, parameter[name[source], name[title_mask]]]
call[call[name[ax]][name[m]].plot, parameter[call[name[np].arange, parameter[binary_operation[call[name[components].shape][constant[0]] + constant[1]]]], binary_operation[list[[<ast.Constant object at 0x7da18f00fd90>]] + call[name[list], parameter[binary_operation[constant[100] * call[name[components]][constant[cumulative_variance_explained]]]]]]]]
call[call[name[ax]][name[m]].grid, parameter[constant[False]]]
call[call[name[ax]][name[m]].set_xlabel, parameter[constant[number of components in model]]]
call[call[name[ax]][name[m]].set_ylabel, parameter[constant[cumulative variance explained (%)]]]
call[call[name[ax]][name[m]].set_title, parameter[name[fig_title]]]
variable[varexp] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18f00fbe0>, <ast.Name object at 0x7da18f00e290>]]] in starred[call[name[enumerate], parameter[name[varexp_thresh]]]] begin[:]
call[name[varexp]][name[thr]] assign[=] binary_operation[call[name[np].searchsorted, parameter[call[name[components]][constant[cumulative_variance_explained]], name[thr]]] + constant[1]]
call[call[name[ax]][name[m]].axhline, parameter[]]
call[call[name[ax]][name[m]].axvline, parameter[]]
call[call[name[ax]][name[m]].text, parameter[constant[0], binary_operation[constant[100] * name[thr]], call[constant[{:.0f}].format, parameter[binary_operation[constant[100] * name[thr]]]]]]
call[call[name[ax]][name[m]].text, parameter[call[call[name[varexp]][name[thr]]][constant[0]], constant[25], call[constant[{} components explain
{:.0f}% of variance].format, parameter[call[call[name[varexp]][name[thr]]][constant[0]], binary_operation[constant[100] * name[thr]]]]]]
call[call[name[ax]][name[m]].set_yticks, parameter[list[[]]]]
call[call[name[ax]][name[m]].set_yticklabels, parameter[list[[]]]]
for taget[name[tick]] in starred[call[call[name[ax]][name[m]].xaxis.get_major_ticks, parameter[]]] begin[:]
call[name[tick].label.set_fontsize, parameter[constant[x-small]]]
call[name[tick].label.set_rotation, parameter[constant[vertical]]]
for taget[name[side]] in starred[list[[<ast.Constant object at 0x7da18f58c550>, <ast.Constant object at 0x7da18f58fdf0>, <ast.Constant object at 0x7da18f58ca60>]]] begin[:]
call[call[call[name[ax]][name[m]].spines][name[side]].set_color, parameter[constant[none]]]
call[call[call[name[ax]][name[m]].spines][name[side]].set_visible, parameter[constant[False]]]
if compare[name[output_file] is_not constant[None]] begin[:]
variable[figure] assign[=] call[name[plt].gcf, parameter[]]
call[name[figure].savefig, parameter[name[output_file]]]
call[name[plt].close, parameter[name[figure]]]
variable[figure] assign[=] constant[None]
return[name[output_file]]
return[name[ax]] | keyword[def] identifier[compcor_variance_plot] ( identifier[metadata_files] , identifier[metadata_sources] = keyword[None] ,
identifier[output_file] = keyword[None] , identifier[varexp_thresh] =( literal[int] , literal[int] , literal[int] ),
identifier[fig] = keyword[None] ):
literal[string]
identifier[metadata] ={}
keyword[if] identifier[metadata_sources] keyword[is] keyword[None] :
keyword[if] identifier[len] ( identifier[metadata_files] )== literal[int] :
identifier[metadata_sources] =[ literal[string] ]
keyword[else] :
identifier[metadata_sources] =[ literal[string] . identifier[format] ( identifier[i] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[metadata_files] ))]
keyword[for] identifier[file] , identifier[source] keyword[in] identifier[zip] ( identifier[metadata_files] , identifier[metadata_sources] ):
identifier[metadata] [ identifier[source] ]= identifier[pd] . identifier[read_table] ( identifier[str] ( identifier[file] ))
identifier[metadata] [ identifier[source] ][ literal[string] ]= identifier[source]
identifier[metadata] = identifier[pd] . identifier[concat] ( identifier[list] ( identifier[metadata] . identifier[values] ()))
identifier[bbox_txt] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int]
}
identifier[decompositions] =[]
identifier[data_sources] = identifier[list] ( identifier[metadata] . identifier[groupby] ([ literal[string] , literal[string] ]). identifier[groups] . identifier[keys] ())
keyword[for] identifier[source] , identifier[mask] keyword[in] identifier[data_sources] :
keyword[if] keyword[not] identifier[np] . identifier[isnan] (
identifier[metadata] . identifier[loc] [
( identifier[metadata] [ literal[string] ]== identifier[source] )
&( identifier[metadata] [ literal[string] ]== identifier[mask] )
][ literal[string] ]. identifier[values] [ literal[int] ]):
identifier[decompositions] . identifier[append] (( identifier[source] , identifier[mask] ))
keyword[if] identifier[fig] keyword[is] keyword[not] keyword[None] :
identifier[ax] =[ identifier[fig] . identifier[add_subplot] ( literal[int] , identifier[len] ( identifier[decompositions] ), identifier[i] + literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[decompositions] ))]
keyword[elif] identifier[len] ( identifier[decompositions] )> literal[int] :
identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( literal[int] , identifier[len] ( identifier[decompositions] ),
identifier[figsize] =( literal[int] * identifier[len] ( identifier[decompositions] ), literal[int] ))
keyword[else] :
identifier[ax] =[ identifier[plt] . identifier[axes] ()]
keyword[for] identifier[m] ,( identifier[source] , identifier[mask] ) keyword[in] identifier[enumerate] ( identifier[decompositions] ):
identifier[components] = identifier[metadata] [( identifier[metadata] [ literal[string] ]== identifier[mask] )
&( identifier[metadata] [ literal[string] ]== identifier[source] )]
keyword[if] identifier[len] ([ identifier[m] keyword[for] identifier[s] , identifier[m] keyword[in] identifier[decompositions] keyword[if] identifier[s] == identifier[source] ])> literal[int] :
identifier[title_mask] = literal[string] . identifier[format] ( identifier[mask] )
keyword[else] :
identifier[title_mask] = literal[string]
identifier[fig_title] = literal[string] . identifier[format] ( identifier[source] , identifier[title_mask] )
identifier[ax] [ identifier[m] ]. identifier[plot] ( identifier[np] . identifier[arange] ( identifier[components] . identifier[shape] [ literal[int] ]+ literal[int] ),
[ literal[int] ]+ identifier[list] (
literal[int] * identifier[components] [ literal[string] ]),
identifier[color] = literal[string] ,
identifier[linewidth] = literal[int] )
identifier[ax] [ identifier[m] ]. identifier[grid] ( keyword[False] )
identifier[ax] [ identifier[m] ]. identifier[set_xlabel] ( literal[string] )
identifier[ax] [ identifier[m] ]. identifier[set_ylabel] ( literal[string] )
identifier[ax] [ identifier[m] ]. identifier[set_title] ( identifier[fig_title] )
identifier[varexp] ={}
keyword[for] identifier[i] , identifier[thr] keyword[in] identifier[enumerate] ( identifier[varexp_thresh] ):
identifier[varexp] [ identifier[thr] ]= identifier[np] . identifier[searchsorted] (
identifier[components] [ literal[string] ], identifier[thr] )+ literal[int]
identifier[ax] [ identifier[m] ]. identifier[axhline] ( identifier[y] = literal[int] * identifier[thr] , identifier[color] = literal[string] , identifier[linewidth] = literal[int] )
identifier[ax] [ identifier[m] ]. identifier[axvline] ( identifier[x] = identifier[varexp] [ identifier[thr] ], identifier[color] = literal[string] . identifier[format] ( identifier[i] ),
identifier[linewidth] = literal[int] , identifier[linestyle] = literal[string] )
identifier[ax] [ identifier[m] ]. identifier[text] ( literal[int] , literal[int] * identifier[thr] , literal[string] . identifier[format] ( literal[int] * identifier[thr] ),
identifier[fontsize] = literal[string] , identifier[bbox] = identifier[bbox_txt] )
identifier[ax] [ identifier[m] ]. identifier[text] ( identifier[varexp] [ identifier[thr] ][ literal[int] ], literal[int] ,
literal[string] . identifier[format] (
identifier[varexp] [ identifier[thr] ][ literal[int] ], literal[int] * identifier[thr] ),
identifier[rotation] = literal[int] ,
identifier[horizontalalignment] = literal[string] ,
identifier[fontsize] = literal[string] ,
identifier[bbox] = identifier[bbox_txt] )
identifier[ax] [ identifier[m] ]. identifier[set_yticks] ([])
identifier[ax] [ identifier[m] ]. identifier[set_yticklabels] ([])
keyword[for] identifier[tick] keyword[in] identifier[ax] [ identifier[m] ]. identifier[xaxis] . identifier[get_major_ticks] ():
identifier[tick] . identifier[label] . identifier[set_fontsize] ( literal[string] )
identifier[tick] . identifier[label] . identifier[set_rotation] ( literal[string] )
keyword[for] identifier[side] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[ax] [ identifier[m] ]. identifier[spines] [ identifier[side] ]. identifier[set_color] ( literal[string] )
identifier[ax] [ identifier[m] ]. identifier[spines] [ identifier[side] ]. identifier[set_visible] ( keyword[False] )
keyword[if] identifier[output_file] keyword[is] keyword[not] keyword[None] :
identifier[figure] = identifier[plt] . identifier[gcf] ()
identifier[figure] . identifier[savefig] ( identifier[output_file] , identifier[bbox_inches] = literal[string] )
identifier[plt] . identifier[close] ( identifier[figure] )
identifier[figure] = keyword[None]
keyword[return] identifier[output_file]
keyword[return] identifier[ax] | def compcor_variance_plot(metadata_files, metadata_sources=None, output_file=None, varexp_thresh=(0.5, 0.7, 0.9), fig=None):
"""
Parameters
----------
metadata_files: list
List of paths to files containing component metadata. If more than one
decomposition has been performed (e.g., anatomical and temporal
CompCor decompositions), then all metadata files can be provided in
the list. However, each metadata file should have a corresponding
entry in `metadata_sources`.
metadata_sources: list or None
List of source names (e.g., ['aCompCor']) for decompositions. This
list should be of the same length as `metadata_files`.
output_file: str or None
Path where the output figure should be saved. If this is not defined,
then the plotting axes will be returned instead of the saved figure
path.
varexp_thresh: tuple
Set of variance thresholds to include in the plot (default 0.5, 0.7,
0.9).
fig: figure or None
Existing figure on which to plot.
Returns
-------
ax: axes
Plotting axes. Returned only if the `output_file` parameter is None.
output_file: str
The file where the figure is saved.
"""
metadata = {}
if metadata_sources is None:
if len(metadata_files) == 1:
metadata_sources = ['CompCor'] # depends on [control=['if'], data=[]]
else:
metadata_sources = ['Decomposition {:d}'.format(i) for i in range(len(metadata_files))] # depends on [control=['if'], data=['metadata_sources']]
for (file, source) in zip(metadata_files, metadata_sources):
metadata[source] = pd.read_table(str(file))
metadata[source]['source'] = source # depends on [control=['for'], data=[]]
metadata = pd.concat(list(metadata.values()))
bbox_txt = {'boxstyle': 'round', 'fc': 'white', 'ec': 'none', 'color': 'none', 'linewidth': 0, 'alpha': 0.8}
decompositions = []
data_sources = list(metadata.groupby(['source', 'mask']).groups.keys())
for (source, mask) in data_sources:
if not np.isnan(metadata.loc[(metadata['source'] == source) & (metadata['mask'] == mask)]['singular_value'].values[0]):
decompositions.append((source, mask)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if fig is not None:
ax = [fig.add_subplot(1, len(decompositions), i + 1) for i in range(len(decompositions))] # depends on [control=['if'], data=['fig']]
elif len(decompositions) > 1:
(fig, ax) = plt.subplots(1, len(decompositions), figsize=(5 * len(decompositions), 5)) # depends on [control=['if'], data=[]]
else:
ax = [plt.axes()]
for (m, (source, mask)) in enumerate(decompositions):
components = metadata[(metadata['mask'] == mask) & (metadata['source'] == source)]
if len([m for (s, m) in decompositions if s == source]) > 1:
title_mask = ' ({} mask)'.format(mask) # depends on [control=['if'], data=[]]
else:
title_mask = ''
fig_title = '{}{}'.format(source, title_mask)
ax[m].plot(np.arange(components.shape[0] + 1), [0] + list(100 * components['cumulative_variance_explained']), color='purple', linewidth=2.5)
ax[m].grid(False)
ax[m].set_xlabel('number of components in model')
ax[m].set_ylabel('cumulative variance explained (%)')
ax[m].set_title(fig_title)
varexp = {}
for (i, thr) in enumerate(varexp_thresh):
varexp[thr] = np.searchsorted(components['cumulative_variance_explained'], thr) + 1
ax[m].axhline(y=100 * thr, color='lightgrey', linewidth=0.25)
ax[m].axvline(x=varexp[thr], color='C{}'.format(i), linewidth=2, linestyle=':')
ax[m].text(0, 100 * thr, '{:.0f}'.format(100 * thr), fontsize='x-small', bbox=bbox_txt)
ax[m].text(varexp[thr][0], 25, '{} components explain\n{:.0f}% of variance'.format(varexp[thr][0], 100 * thr), rotation=90, horizontalalignment='center', fontsize='xx-small', bbox=bbox_txt) # depends on [control=['for'], data=[]]
ax[m].set_yticks([])
ax[m].set_yticklabels([])
for tick in ax[m].xaxis.get_major_ticks():
tick.label.set_fontsize('x-small')
tick.label.set_rotation('vertical') # depends on [control=['for'], data=['tick']]
for side in ['top', 'right', 'left']:
ax[m].spines[side].set_color('none')
ax[m].spines[side].set_visible(False) # depends on [control=['for'], data=['side']] # depends on [control=['for'], data=[]]
if output_file is not None:
figure = plt.gcf()
figure.savefig(output_file, bbox_inches='tight')
plt.close(figure)
figure = None
return output_file # depends on [control=['if'], data=['output_file']]
return ax |
def lru_cache(maxsize=128, typed=False, state=None, unhashable='error'):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and
the cache can grow without bound.
If *typed* is True, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct
calls with distinct results.
If *state* is a list or dict, the items will be incorporated into
argument hash.
The result of calling the cached function with unhashable (mutable)
arguments depends on the value of *unhashable*:
If *unhashable* is 'error', a TypeError will be raised.
If *unhashable* is 'warning', a UserWarning will be raised, and
the wrapped function will be called with the supplied arguments.
A miss will be recorded in the cache statistics.
If *unhashable* is 'ignore', the wrapped function will be called
with the supplied arguments. A miss will will be recorded in
the cache statistics.
View the cache statistics named tuple (hits, misses, maxsize, currsize)
with f.cache_info(). Clear the cache and statistics with
f.cache_clear(). Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
def func_wrapper(func):
_cached_func = clru_cache(maxsize, typed, state, unhashable)(func)
def wrapper(*args, **kwargs):
return _cached_func(*args, **kwargs)
wrapper.__wrapped__ = func
wrapper.cache_info = _cached_func.cache_info
wrapper.cache_clear = _cached_func.cache_clear
return update_wrapper(wrapper,func)
return func_wrapper | def function[lru_cache, parameter[maxsize, typed, state, unhashable]]:
constant[Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and
the cache can grow without bound.
If *typed* is True, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct
calls with distinct results.
If *state* is a list or dict, the items will be incorporated into
argument hash.
The result of calling the cached function with unhashable (mutable)
arguments depends on the value of *unhashable*:
If *unhashable* is 'error', a TypeError will be raised.
If *unhashable* is 'warning', a UserWarning will be raised, and
the wrapped function will be called with the supplied arguments.
A miss will be recorded in the cache statistics.
If *unhashable* is 'ignore', the wrapped function will be called
with the supplied arguments. A miss will will be recorded in
the cache statistics.
View the cache statistics named tuple (hits, misses, maxsize, currsize)
with f.cache_info(). Clear the cache and statistics with
f.cache_clear(). Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
]
def function[func_wrapper, parameter[func]]:
variable[_cached_func] assign[=] call[call[name[clru_cache], parameter[name[maxsize], name[typed], name[state], name[unhashable]]], parameter[name[func]]]
def function[wrapper, parameter[]]:
return[call[name[_cached_func], parameter[<ast.Starred object at 0x7da1b06cc850>]]]
name[wrapper].__wrapped__ assign[=] name[func]
name[wrapper].cache_info assign[=] name[_cached_func].cache_info
name[wrapper].cache_clear assign[=] name[_cached_func].cache_clear
return[call[name[update_wrapper], parameter[name[wrapper], name[func]]]]
return[name[func_wrapper]] | keyword[def] identifier[lru_cache] ( identifier[maxsize] = literal[int] , identifier[typed] = keyword[False] , identifier[state] = keyword[None] , identifier[unhashable] = literal[string] ):
literal[string]
keyword[def] identifier[func_wrapper] ( identifier[func] ):
identifier[_cached_func] = identifier[clru_cache] ( identifier[maxsize] , identifier[typed] , identifier[state] , identifier[unhashable] )( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[_cached_func] (* identifier[args] ,** identifier[kwargs] )
identifier[wrapper] . identifier[__wrapped__] = identifier[func]
identifier[wrapper] . identifier[cache_info] = identifier[_cached_func] . identifier[cache_info]
identifier[wrapper] . identifier[cache_clear] = identifier[_cached_func] . identifier[cache_clear]
keyword[return] identifier[update_wrapper] ( identifier[wrapper] , identifier[func] )
keyword[return] identifier[func_wrapper] | def lru_cache(maxsize=128, typed=False, state=None, unhashable='error'):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and
the cache can grow without bound.
If *typed* is True, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct
calls with distinct results.
If *state* is a list or dict, the items will be incorporated into
argument hash.
The result of calling the cached function with unhashable (mutable)
arguments depends on the value of *unhashable*:
If *unhashable* is 'error', a TypeError will be raised.
If *unhashable* is 'warning', a UserWarning will be raised, and
the wrapped function will be called with the supplied arguments.
A miss will be recorded in the cache statistics.
If *unhashable* is 'ignore', the wrapped function will be called
with the supplied arguments. A miss will will be recorded in
the cache statistics.
View the cache statistics named tuple (hits, misses, maxsize, currsize)
with f.cache_info(). Clear the cache and statistics with
f.cache_clear(). Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
def func_wrapper(func):
_cached_func = clru_cache(maxsize, typed, state, unhashable)(func)
def wrapper(*args, **kwargs):
return _cached_func(*args, **kwargs)
wrapper.__wrapped__ = func
wrapper.cache_info = _cached_func.cache_info
wrapper.cache_clear = _cached_func.cache_clear
return update_wrapper(wrapper, func)
return func_wrapper |
def add_final_training_ops(self,
embeddings,
all_labels_count,
bottleneck_tensor_size,
hidden_layer_size=BOTTLENECK_TENSOR_SIZE / 4,
dropout_keep_prob=None):
"""Adds a new softmax and fully-connected layer for training.
The set up for the softmax and fully-connected layers is based on:
https://tensorflow.org/versions/master/tutorials/mnist/beginners/index.html
This function can be customized to add arbitrary layers for
application-specific requirements.
Args:
embeddings: The embedding (bottleneck) tensor.
all_labels_count: The number of all labels including the default label.
bottleneck_tensor_size: The number of embeddings.
hidden_layer_size: The size of the hidden_layer. Roughtly, 1/4 of the
bottleneck tensor size.
dropout_keep_prob: the percentage of activation values that are retained.
Returns:
softmax: The softmax or tensor. It stores the final scores.
logits: The logits tensor.
"""
with tf.name_scope('input'):
bottleneck_input = tf.placeholder_with_default(
embeddings,
shape=[None, bottleneck_tensor_size],
name='ReshapeSqueezed')
bottleneck_with_no_gradient = tf.stop_gradient(bottleneck_input)
with tf.name_scope('Wx_plus_b'):
hidden = layers.fully_connected(bottleneck_with_no_gradient,
hidden_layer_size)
# We need a dropout when the size of the dataset is rather small.
if dropout_keep_prob:
hidden = tf.nn.dropout(hidden, dropout_keep_prob)
logits = layers.fully_connected(
hidden, all_labels_count, activation_fn=None)
softmax = tf.nn.softmax(logits, name='softmax')
return softmax, logits | def function[add_final_training_ops, parameter[self, embeddings, all_labels_count, bottleneck_tensor_size, hidden_layer_size, dropout_keep_prob]]:
constant[Adds a new softmax and fully-connected layer for training.
The set up for the softmax and fully-connected layers is based on:
https://tensorflow.org/versions/master/tutorials/mnist/beginners/index.html
This function can be customized to add arbitrary layers for
application-specific requirements.
Args:
embeddings: The embedding (bottleneck) tensor.
all_labels_count: The number of all labels including the default label.
bottleneck_tensor_size: The number of embeddings.
hidden_layer_size: The size of the hidden_layer. Roughtly, 1/4 of the
bottleneck tensor size.
dropout_keep_prob: the percentage of activation values that are retained.
Returns:
softmax: The softmax or tensor. It stores the final scores.
logits: The logits tensor.
]
with call[name[tf].name_scope, parameter[constant[input]]] begin[:]
variable[bottleneck_input] assign[=] call[name[tf].placeholder_with_default, parameter[name[embeddings]]]
variable[bottleneck_with_no_gradient] assign[=] call[name[tf].stop_gradient, parameter[name[bottleneck_input]]]
with call[name[tf].name_scope, parameter[constant[Wx_plus_b]]] begin[:]
variable[hidden] assign[=] call[name[layers].fully_connected, parameter[name[bottleneck_with_no_gradient], name[hidden_layer_size]]]
if name[dropout_keep_prob] begin[:]
variable[hidden] assign[=] call[name[tf].nn.dropout, parameter[name[hidden], name[dropout_keep_prob]]]
variable[logits] assign[=] call[name[layers].fully_connected, parameter[name[hidden], name[all_labels_count]]]
variable[softmax] assign[=] call[name[tf].nn.softmax, parameter[name[logits]]]
return[tuple[[<ast.Name object at 0x7da1b1122f20>, <ast.Name object at 0x7da1b1121c90>]]] | keyword[def] identifier[add_final_training_ops] ( identifier[self] ,
identifier[embeddings] ,
identifier[all_labels_count] ,
identifier[bottleneck_tensor_size] ,
identifier[hidden_layer_size] = identifier[BOTTLENECK_TENSOR_SIZE] / literal[int] ,
identifier[dropout_keep_prob] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ):
identifier[bottleneck_input] = identifier[tf] . identifier[placeholder_with_default] (
identifier[embeddings] ,
identifier[shape] =[ keyword[None] , identifier[bottleneck_tensor_size] ],
identifier[name] = literal[string] )
identifier[bottleneck_with_no_gradient] = identifier[tf] . identifier[stop_gradient] ( identifier[bottleneck_input] )
keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ):
identifier[hidden] = identifier[layers] . identifier[fully_connected] ( identifier[bottleneck_with_no_gradient] ,
identifier[hidden_layer_size] )
keyword[if] identifier[dropout_keep_prob] :
identifier[hidden] = identifier[tf] . identifier[nn] . identifier[dropout] ( identifier[hidden] , identifier[dropout_keep_prob] )
identifier[logits] = identifier[layers] . identifier[fully_connected] (
identifier[hidden] , identifier[all_labels_count] , identifier[activation_fn] = keyword[None] )
identifier[softmax] = identifier[tf] . identifier[nn] . identifier[softmax] ( identifier[logits] , identifier[name] = literal[string] )
keyword[return] identifier[softmax] , identifier[logits] | def add_final_training_ops(self, embeddings, all_labels_count, bottleneck_tensor_size, hidden_layer_size=BOTTLENECK_TENSOR_SIZE / 4, dropout_keep_prob=None):
"""Adds a new softmax and fully-connected layer for training.
The set up for the softmax and fully-connected layers is based on:
https://tensorflow.org/versions/master/tutorials/mnist/beginners/index.html
This function can be customized to add arbitrary layers for
application-specific requirements.
Args:
embeddings: The embedding (bottleneck) tensor.
all_labels_count: The number of all labels including the default label.
bottleneck_tensor_size: The number of embeddings.
hidden_layer_size: The size of the hidden_layer. Roughtly, 1/4 of the
bottleneck tensor size.
dropout_keep_prob: the percentage of activation values that are retained.
Returns:
softmax: The softmax or tensor. It stores the final scores.
logits: The logits tensor.
"""
with tf.name_scope('input'):
bottleneck_input = tf.placeholder_with_default(embeddings, shape=[None, bottleneck_tensor_size], name='ReshapeSqueezed')
bottleneck_with_no_gradient = tf.stop_gradient(bottleneck_input)
with tf.name_scope('Wx_plus_b'):
hidden = layers.fully_connected(bottleneck_with_no_gradient, hidden_layer_size)
# We need a dropout when the size of the dataset is rather small.
if dropout_keep_prob:
hidden = tf.nn.dropout(hidden, dropout_keep_prob) # depends on [control=['if'], data=[]]
logits = layers.fully_connected(hidden, all_labels_count, activation_fn=None) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]]
softmax = tf.nn.softmax(logits, name='softmax')
return (softmax, logits) |
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val | def function[perform_import, parameter[val, setting_name]]:
constant[
If the given setting is a string import notation,
then perform the necessary import or imports.
]
if compare[name[val] is constant[None]] begin[:]
return[constant[None]]
return[name[val]] | keyword[def] identifier[perform_import] ( identifier[val] , identifier[setting_name] ):
literal[string]
keyword[if] identifier[val] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[string_types] ):
keyword[return] identifier[import_from_string] ( identifier[val] , identifier[setting_name] )
keyword[elif] identifier[isinstance] ( identifier[val] ,( identifier[list] , identifier[tuple] )):
keyword[return] [ identifier[import_from_string] ( identifier[item] , identifier[setting_name] ) keyword[for] identifier[item] keyword[in] identifier[val] ]
keyword[return] identifier[val] | def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
"""
if val is None:
return None # depends on [control=['if'], data=[]]
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name) # depends on [control=['if'], data=[]]
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val] # depends on [control=['if'], data=[]]
return val |
def on_open(self, websocket):
'''When a new websocket connection is established it creates a
new :class:`ChatClient` and adds it to the set of clients of the
:attr:`pubsub` handler.'''
self.pubsub.add_client(ChatClient(websocket, self.channel)) | def function[on_open, parameter[self, websocket]]:
constant[When a new websocket connection is established it creates a
new :class:`ChatClient` and adds it to the set of clients of the
:attr:`pubsub` handler.]
call[name[self].pubsub.add_client, parameter[call[name[ChatClient], parameter[name[websocket], name[self].channel]]]] | keyword[def] identifier[on_open] ( identifier[self] , identifier[websocket] ):
literal[string]
identifier[self] . identifier[pubsub] . identifier[add_client] ( identifier[ChatClient] ( identifier[websocket] , identifier[self] . identifier[channel] )) | def on_open(self, websocket):
"""When a new websocket connection is established it creates a
new :class:`ChatClient` and adds it to the set of clients of the
:attr:`pubsub` handler."""
self.pubsub.add_client(ChatClient(websocket, self.channel)) |
def metadata(access_token, text): # (Legacy)
'''
Name: metadata_only
Parameters: access_token, text (string)
Return: dictionary
'''
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + str(access_token)
}
payload = {'text': text}
request = requests.post(metadata_url, json=payload, headers=headers)
if request.status_code == 201:
metadata = request.json()
return metadata
return {'status': request.status_code, "message": request.text} | def function[metadata, parameter[access_token, text]]:
constant[
Name: metadata_only
Parameters: access_token, text (string)
Return: dictionary
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c76a0>, <ast.Constant object at 0x7da20c6c5cc0>], [<ast.Constant object at 0x7da20c6c58d0>, <ast.BinOp object at 0x7da20c6c64a0>]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c72e0>], [<ast.Name object at 0x7da20c6c4940>]]
variable[request] assign[=] call[name[requests].post, parameter[name[metadata_url]]]
if compare[name[request].status_code equal[==] constant[201]] begin[:]
variable[metadata] assign[=] call[name[request].json, parameter[]]
return[name[metadata]]
return[dictionary[[<ast.Constant object at 0x7da20c6c6500>, <ast.Constant object at 0x7da20c6c6740>], [<ast.Attribute object at 0x7da20c6c7400>, <ast.Attribute object at 0x7da20c6c69b0>]]] | keyword[def] identifier[metadata] ( identifier[access_token] , identifier[text] ):
literal[string]
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] + identifier[str] ( identifier[access_token] )
}
identifier[payload] ={ literal[string] : identifier[text] }
identifier[request] = identifier[requests] . identifier[post] ( identifier[metadata_url] , identifier[json] = identifier[payload] , identifier[headers] = identifier[headers] )
keyword[if] identifier[request] . identifier[status_code] == literal[int] :
identifier[metadata] = identifier[request] . identifier[json] ()
keyword[return] identifier[metadata]
keyword[return] { literal[string] : identifier[request] . identifier[status_code] , literal[string] : identifier[request] . identifier[text] } | def metadata(access_token, text): # (Legacy)
'\n\tName: metadata_only\n\tParameters: access_token, text (string)\n\tReturn: dictionary\n\t'
headers = {'Content-Type': 'application/json', 'Authorization': 'Bearer ' + str(access_token)}
payload = {'text': text}
request = requests.post(metadata_url, json=payload, headers=headers)
if request.status_code == 201:
metadata = request.json()
return metadata # depends on [control=['if'], data=[]]
return {'status': request.status_code, 'message': request.text} |
def format_general_name(name):
"""Format a single general name.
>>> import ipaddress
>>> format_general_name(x509.DNSName('example.com'))
'DNS:example.com'
>>> format_general_name(x509.IPAddress(ipaddress.IPv4Address('127.0.0.1')))
'IP:127.0.0.1'
"""
if isinstance(name, x509.DirectoryName):
value = format_name(name.value)
else:
value = name.value
return '%s:%s' % (SAN_NAME_MAPPINGS[type(name)], value) | def function[format_general_name, parameter[name]]:
constant[Format a single general name.
>>> import ipaddress
>>> format_general_name(x509.DNSName('example.com'))
'DNS:example.com'
>>> format_general_name(x509.IPAddress(ipaddress.IPv4Address('127.0.0.1')))
'IP:127.0.0.1'
]
if call[name[isinstance], parameter[name[name], name[x509].DirectoryName]] begin[:]
variable[value] assign[=] call[name[format_name], parameter[name[name].value]]
return[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0d187c0>, <ast.Name object at 0x7da1b0d19930>]]]] | keyword[def] identifier[format_general_name] ( identifier[name] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[name] , identifier[x509] . identifier[DirectoryName] ):
identifier[value] = identifier[format_name] ( identifier[name] . identifier[value] )
keyword[else] :
identifier[value] = identifier[name] . identifier[value]
keyword[return] literal[string] %( identifier[SAN_NAME_MAPPINGS] [ identifier[type] ( identifier[name] )], identifier[value] ) | def format_general_name(name):
"""Format a single general name.
>>> import ipaddress
>>> format_general_name(x509.DNSName('example.com'))
'DNS:example.com'
>>> format_general_name(x509.IPAddress(ipaddress.IPv4Address('127.0.0.1')))
'IP:127.0.0.1'
"""
if isinstance(name, x509.DirectoryName):
value = format_name(name.value) # depends on [control=['if'], data=[]]
else:
value = name.value
return '%s:%s' % (SAN_NAME_MAPPINGS[type(name)], value) |
def load_tile(self, tile):
'''load a tile from cache or tile server'''
# see if its in the tile cache
key = tile.key()
if key in self._tile_cache:
img = self._tile_cache[key]
if np.array_equal(img, self._unavailable):
img = self.load_tile_lowres(tile)
if img is None:
img = self._unavailable
return img
path = self.tile_to_path(tile)
ret = cv2.imread(path)
if ret is not None:
# if it is an old tile, then try to refresh
if os.path.getmtime(path) + self.refresh_age < time.time():
try:
self._download_pending[key].refresh_time()
except Exception:
self._download_pending[key] = tile
self.start_download_thread()
# add it to the tile cache
self._tile_cache[key] = ret
while len(self._tile_cache) > self.cache_size:
self._tile_cache.popitem(0)
return ret
if not self.download:
img = self.load_tile_lowres(tile)
if img is None:
img = self._unavailable
return img
try:
self._download_pending[key].refresh_time()
except Exception:
self._download_pending[key] = tile
self.start_download_thread()
img = self.load_tile_lowres(tile)
if img is None:
img = self._loading
return img | def function[load_tile, parameter[self, tile]]:
constant[load a tile from cache or tile server]
variable[key] assign[=] call[name[tile].key, parameter[]]
if compare[name[key] in name[self]._tile_cache] begin[:]
variable[img] assign[=] call[name[self]._tile_cache][name[key]]
if call[name[np].array_equal, parameter[name[img], name[self]._unavailable]] begin[:]
variable[img] assign[=] call[name[self].load_tile_lowres, parameter[name[tile]]]
if compare[name[img] is constant[None]] begin[:]
variable[img] assign[=] name[self]._unavailable
return[name[img]]
variable[path] assign[=] call[name[self].tile_to_path, parameter[name[tile]]]
variable[ret] assign[=] call[name[cv2].imread, parameter[name[path]]]
if compare[name[ret] is_not constant[None]] begin[:]
if compare[binary_operation[call[name[os].path.getmtime, parameter[name[path]]] + name[self].refresh_age] less[<] call[name[time].time, parameter[]]] begin[:]
<ast.Try object at 0x7da20c76e1d0>
call[name[self].start_download_thread, parameter[]]
call[name[self]._tile_cache][name[key]] assign[=] name[ret]
while compare[call[name[len], parameter[name[self]._tile_cache]] greater[>] name[self].cache_size] begin[:]
call[name[self]._tile_cache.popitem, parameter[constant[0]]]
return[name[ret]]
if <ast.UnaryOp object at 0x7da1b26afa00> begin[:]
variable[img] assign[=] call[name[self].load_tile_lowres, parameter[name[tile]]]
if compare[name[img] is constant[None]] begin[:]
variable[img] assign[=] name[self]._unavailable
return[name[img]]
<ast.Try object at 0x7da1b26af4c0>
call[name[self].start_download_thread, parameter[]]
variable[img] assign[=] call[name[self].load_tile_lowres, parameter[name[tile]]]
if compare[name[img] is constant[None]] begin[:]
variable[img] assign[=] name[self]._loading
return[name[img]] | keyword[def] identifier[load_tile] ( identifier[self] , identifier[tile] ):
literal[string]
identifier[key] = identifier[tile] . identifier[key] ()
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[_tile_cache] :
identifier[img] = identifier[self] . identifier[_tile_cache] [ identifier[key] ]
keyword[if] identifier[np] . identifier[array_equal] ( identifier[img] , identifier[self] . identifier[_unavailable] ):
identifier[img] = identifier[self] . identifier[load_tile_lowres] ( identifier[tile] )
keyword[if] identifier[img] keyword[is] keyword[None] :
identifier[img] = identifier[self] . identifier[_unavailable]
keyword[return] identifier[img]
identifier[path] = identifier[self] . identifier[tile_to_path] ( identifier[tile] )
identifier[ret] = identifier[cv2] . identifier[imread] ( identifier[path] )
keyword[if] identifier[ret] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[os] . identifier[path] . identifier[getmtime] ( identifier[path] )+ identifier[self] . identifier[refresh_age] < identifier[time] . identifier[time] ():
keyword[try] :
identifier[self] . identifier[_download_pending] [ identifier[key] ]. identifier[refresh_time] ()
keyword[except] identifier[Exception] :
identifier[self] . identifier[_download_pending] [ identifier[key] ]= identifier[tile]
identifier[self] . identifier[start_download_thread] ()
identifier[self] . identifier[_tile_cache] [ identifier[key] ]= identifier[ret]
keyword[while] identifier[len] ( identifier[self] . identifier[_tile_cache] )> identifier[self] . identifier[cache_size] :
identifier[self] . identifier[_tile_cache] . identifier[popitem] ( literal[int] )
keyword[return] identifier[ret]
keyword[if] keyword[not] identifier[self] . identifier[download] :
identifier[img] = identifier[self] . identifier[load_tile_lowres] ( identifier[tile] )
keyword[if] identifier[img] keyword[is] keyword[None] :
identifier[img] = identifier[self] . identifier[_unavailable]
keyword[return] identifier[img]
keyword[try] :
identifier[self] . identifier[_download_pending] [ identifier[key] ]. identifier[refresh_time] ()
keyword[except] identifier[Exception] :
identifier[self] . identifier[_download_pending] [ identifier[key] ]= identifier[tile]
identifier[self] . identifier[start_download_thread] ()
identifier[img] = identifier[self] . identifier[load_tile_lowres] ( identifier[tile] )
keyword[if] identifier[img] keyword[is] keyword[None] :
identifier[img] = identifier[self] . identifier[_loading]
keyword[return] identifier[img] | def load_tile(self, tile):
"""load a tile from cache or tile server"""
# see if its in the tile cache
key = tile.key()
if key in self._tile_cache:
img = self._tile_cache[key]
if np.array_equal(img, self._unavailable):
img = self.load_tile_lowres(tile)
if img is None:
img = self._unavailable # depends on [control=['if'], data=['img']] # depends on [control=['if'], data=[]]
return img # depends on [control=['if'], data=['key']]
path = self.tile_to_path(tile)
ret = cv2.imread(path)
if ret is not None:
# if it is an old tile, then try to refresh
if os.path.getmtime(path) + self.refresh_age < time.time():
try:
self._download_pending[key].refresh_time() # depends on [control=['try'], data=[]]
except Exception:
self._download_pending[key] = tile # depends on [control=['except'], data=[]]
self.start_download_thread() # depends on [control=['if'], data=[]]
# add it to the tile cache
self._tile_cache[key] = ret
while len(self._tile_cache) > self.cache_size:
self._tile_cache.popitem(0) # depends on [control=['while'], data=[]]
return ret # depends on [control=['if'], data=['ret']]
if not self.download:
img = self.load_tile_lowres(tile)
if img is None:
img = self._unavailable # depends on [control=['if'], data=['img']]
return img # depends on [control=['if'], data=[]]
try:
self._download_pending[key].refresh_time() # depends on [control=['try'], data=[]]
except Exception:
self._download_pending[key] = tile # depends on [control=['except'], data=[]]
self.start_download_thread()
img = self.load_tile_lowres(tile)
if img is None:
img = self._loading # depends on [control=['if'], data=['img']]
return img |
def setDefault(self, key, value):
"""
Sets the default for a given key to the value.
:param key | <str>
value | <str> || <XResourceManager>
"""
if value is None:
self._defaults.pop(key, None)
else:
self._defaults[key] = value | def function[setDefault, parameter[self, key, value]]:
constant[
Sets the default for a given key to the value.
:param key | <str>
value | <str> || <XResourceManager>
]
if compare[name[value] is constant[None]] begin[:]
call[name[self]._defaults.pop, parameter[name[key], constant[None]]] | keyword[def] identifier[setDefault] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[self] . identifier[_defaults] . identifier[pop] ( identifier[key] , keyword[None] )
keyword[else] :
identifier[self] . identifier[_defaults] [ identifier[key] ]= identifier[value] | def setDefault(self, key, value):
"""
Sets the default for a given key to the value.
:param key | <str>
value | <str> || <XResourceManager>
"""
if value is None:
self._defaults.pop(key, None) # depends on [control=['if'], data=[]]
else:
self._defaults[key] = value |
def createNoiseExperimentArgs():
"""Run the probability of false negatives with noise experiment."""
experimentArguments = []
n = 6000
for a in [128]:
noisePct = 0.75
while noisePct <= 0.85:
noise = int(round(noisePct*a,0))
# Some parameter combinations are just not worth running!
experimentArguments.append(
("./sdr_calculations2",
"results_noise_10m/temp_"+str(n)+"_"+str(a)+"_"+str(noise)+"_30.csv",
"200000", str(n), str(a), str(noise))
)
noisePct += 0.05
return experimentArguments | def function[createNoiseExperimentArgs, parameter[]]:
constant[Run the probability of false negatives with noise experiment.]
variable[experimentArguments] assign[=] list[[]]
variable[n] assign[=] constant[6000]
for taget[name[a]] in starred[list[[<ast.Constant object at 0x7da1b0847f10>]]] begin[:]
variable[noisePct] assign[=] constant[0.75]
while compare[name[noisePct] less_or_equal[<=] constant[0.85]] begin[:]
variable[noise] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[name[noisePct] * name[a]], constant[0]]]]]
call[name[experimentArguments].append, parameter[tuple[[<ast.Constant object at 0x7da18eb54970>, <ast.BinOp object at 0x7da18eb55e40>, <ast.Constant object at 0x7da1b0855b40>, <ast.Call object at 0x7da1b0855900>, <ast.Call object at 0x7da1b0856260>, <ast.Call object at 0x7da1b0855bd0>]]]]
<ast.AugAssign object at 0x7da1b0856740>
return[name[experimentArguments]] | keyword[def] identifier[createNoiseExperimentArgs] ():
literal[string]
identifier[experimentArguments] =[]
identifier[n] = literal[int]
keyword[for] identifier[a] keyword[in] [ literal[int] ]:
identifier[noisePct] = literal[int]
keyword[while] identifier[noisePct] <= literal[int] :
identifier[noise] = identifier[int] ( identifier[round] ( identifier[noisePct] * identifier[a] , literal[int] ))
identifier[experimentArguments] . identifier[append] (
( literal[string] ,
literal[string] + identifier[str] ( identifier[n] )+ literal[string] + identifier[str] ( identifier[a] )+ literal[string] + identifier[str] ( identifier[noise] )+ literal[string] ,
literal[string] , identifier[str] ( identifier[n] ), identifier[str] ( identifier[a] ), identifier[str] ( identifier[noise] ))
)
identifier[noisePct] += literal[int]
keyword[return] identifier[experimentArguments] | def createNoiseExperimentArgs():
"""Run the probability of false negatives with noise experiment."""
experimentArguments = []
n = 6000
for a in [128]:
noisePct = 0.75
while noisePct <= 0.85:
noise = int(round(noisePct * a, 0))
# Some parameter combinations are just not worth running!
experimentArguments.append(('./sdr_calculations2', 'results_noise_10m/temp_' + str(n) + '_' + str(a) + '_' + str(noise) + '_30.csv', '200000', str(n), str(a), str(noise)))
noisePct += 0.05 # depends on [control=['while'], data=['noisePct']] # depends on [control=['for'], data=['a']]
return experimentArguments |
def is_key(cls, result):
"""Return ``True`` if result is a key object."""
from boto.s3.key import Key
return isinstance(result, Key) | def function[is_key, parameter[cls, result]]:
constant[Return ``True`` if result is a key object.]
from relative_module[boto.s3.key] import module[Key]
return[call[name[isinstance], parameter[name[result], name[Key]]]] | keyword[def] identifier[is_key] ( identifier[cls] , identifier[result] ):
literal[string]
keyword[from] identifier[boto] . identifier[s3] . identifier[key] keyword[import] identifier[Key]
keyword[return] identifier[isinstance] ( identifier[result] , identifier[Key] ) | def is_key(cls, result):
"""Return ``True`` if result is a key object."""
from boto.s3.key import Key
return isinstance(result, Key) |
def do_eos(self, _: argparse.Namespace) -> None:
"""Handle cleanup when a script has finished executing"""
if self._script_dir:
self._script_dir.pop() | def function[do_eos, parameter[self, _]]:
constant[Handle cleanup when a script has finished executing]
if name[self]._script_dir begin[:]
call[name[self]._script_dir.pop, parameter[]] | keyword[def] identifier[do_eos] ( identifier[self] , identifier[_] : identifier[argparse] . identifier[Namespace] )-> keyword[None] :
literal[string]
keyword[if] identifier[self] . identifier[_script_dir] :
identifier[self] . identifier[_script_dir] . identifier[pop] () | def do_eos(self, _: argparse.Namespace) -> None:
"""Handle cleanup when a script has finished executing"""
if self._script_dir:
self._script_dir.pop() # depends on [control=['if'], data=[]] |
def GetJson(self):
"""Returns JSON version of ClientData compatible with FIDO spec."""
# The U2F Raw Messages specification specifies that the challenge is encoded
# with URL safe Base64 without padding encoding specified in RFC 4648.
# Python does not natively support a paddingless encoding, so we simply
# remove the padding from the end of the string.
server_challenge_b64 = base64.urlsafe_b64encode(
self.raw_server_challenge).decode()
server_challenge_b64 = server_challenge_b64.rstrip('=')
return json.dumps({'typ': self.typ,
'challenge': server_challenge_b64,
'origin': self.origin}, sort_keys=True) | def function[GetJson, parameter[self]]:
constant[Returns JSON version of ClientData compatible with FIDO spec.]
variable[server_challenge_b64] assign[=] call[call[name[base64].urlsafe_b64encode, parameter[name[self].raw_server_challenge]].decode, parameter[]]
variable[server_challenge_b64] assign[=] call[name[server_challenge_b64].rstrip, parameter[constant[=]]]
return[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da20c6c4a30>, <ast.Constant object at 0x7da20c6c5870>, <ast.Constant object at 0x7da20c6c59c0>], [<ast.Attribute object at 0x7da20c6c4520>, <ast.Name object at 0x7da20c6c44f0>, <ast.Attribute object at 0x7da20c6c4be0>]]]]] | keyword[def] identifier[GetJson] ( identifier[self] ):
literal[string]
identifier[server_challenge_b64] = identifier[base64] . identifier[urlsafe_b64encode] (
identifier[self] . identifier[raw_server_challenge] ). identifier[decode] ()
identifier[server_challenge_b64] = identifier[server_challenge_b64] . identifier[rstrip] ( literal[string] )
keyword[return] identifier[json] . identifier[dumps] ({ literal[string] : identifier[self] . identifier[typ] ,
literal[string] : identifier[server_challenge_b64] ,
literal[string] : identifier[self] . identifier[origin] }, identifier[sort_keys] = keyword[True] ) | def GetJson(self):
"""Returns JSON version of ClientData compatible with FIDO spec."""
# The U2F Raw Messages specification specifies that the challenge is encoded
# with URL safe Base64 without padding encoding specified in RFC 4648.
# Python does not natively support a paddingless encoding, so we simply
# remove the padding from the end of the string.
server_challenge_b64 = base64.urlsafe_b64encode(self.raw_server_challenge).decode()
server_challenge_b64 = server_challenge_b64.rstrip('=')
return json.dumps({'typ': self.typ, 'challenge': server_challenge_b64, 'origin': self.origin}, sort_keys=True) |
def debug(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'DEBUG'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
"""
self._baseLogger.debug(self, self.getExtendedMsg(msg), *args, **kwargs) | def function[debug, parameter[self, msg]]:
constant[
Log 'msg % args' with severity 'DEBUG'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
]
call[name[self]._baseLogger.debug, parameter[name[self], call[name[self].getExtendedMsg, parameter[name[msg]]], <ast.Starred object at 0x7da1b23450c0>]] | keyword[def] identifier[debug] ( identifier[self] , identifier[msg] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_baseLogger] . identifier[debug] ( identifier[self] , identifier[self] . identifier[getExtendedMsg] ( identifier[msg] ),* identifier[args] ,** identifier[kwargs] ) | def debug(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'DEBUG'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
"""
self._baseLogger.debug(self, self.getExtendedMsg(msg), *args, **kwargs) |
def connect(self):
"""Returns an open connection (socket) to the Splunk instance.
This method is used for writing bulk events to an index or similar tasks
where the overhead of opening a connection multiple times would be
prohibitive.
:returns: A socket.
**Example**::
import splunklib.binding as binding
c = binding.connect(...)
socket = c.connect()
socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to")
socket.write("Host: %s:%s\\r\\n" % (c.host, c.port))
socket.write("Accept-Encoding: identity\\r\\n")
socket.write("Authorization: %s\\r\\n" % c.token)
socket.write("X-Splunk-Input-Mode: Streaming\\r\\n")
socket.write("\\r\\n")
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.scheme == "https":
sock = ssl.wrap_socket(sock)
sock.connect((socket.gethostbyname(self.host), self.port))
return sock | def function[connect, parameter[self]]:
constant[Returns an open connection (socket) to the Splunk instance.
This method is used for writing bulk events to an index or similar tasks
where the overhead of opening a connection multiple times would be
prohibitive.
:returns: A socket.
**Example**::
import splunklib.binding as binding
c = binding.connect(...)
socket = c.connect()
socket.write("POST %s HTTP/1.1\r\n" % "some/path/to/post/to")
socket.write("Host: %s:%s\r\n" % (c.host, c.port))
socket.write("Accept-Encoding: identity\r\n")
socket.write("Authorization: %s\r\n" % c.token)
socket.write("X-Splunk-Input-Mode: Streaming\r\n")
socket.write("\r\n")
]
variable[sock] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
if compare[name[self].scheme equal[==] constant[https]] begin[:]
variable[sock] assign[=] call[name[ssl].wrap_socket, parameter[name[sock]]]
call[name[sock].connect, parameter[tuple[[<ast.Call object at 0x7da1b19f24d0>, <ast.Attribute object at 0x7da1b19f0940>]]]]
return[name[sock]] | keyword[def] identifier[connect] ( identifier[self] ):
literal[string]
identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
keyword[if] identifier[self] . identifier[scheme] == literal[string] :
identifier[sock] = identifier[ssl] . identifier[wrap_socket] ( identifier[sock] )
identifier[sock] . identifier[connect] (( identifier[socket] . identifier[gethostbyname] ( identifier[self] . identifier[host] ), identifier[self] . identifier[port] ))
keyword[return] identifier[sock] | def connect(self):
"""Returns an open connection (socket) to the Splunk instance.
This method is used for writing bulk events to an index or similar tasks
where the overhead of opening a connection multiple times would be
prohibitive.
:returns: A socket.
**Example**::
import splunklib.binding as binding
c = binding.connect(...)
socket = c.connect()
socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to")
socket.write("Host: %s:%s\\r\\n" % (c.host, c.port))
socket.write("Accept-Encoding: identity\\r\\n")
socket.write("Authorization: %s\\r\\n" % c.token)
socket.write("X-Splunk-Input-Mode: Streaming\\r\\n")
socket.write("\\r\\n")
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.scheme == 'https':
sock = ssl.wrap_socket(sock) # depends on [control=['if'], data=[]]
sock.connect((socket.gethostbyname(self.host), self.port))
return sock |
def plus_dora(tile, dora_indicators):
"""
:param tile: int 136 tiles format
:param dora_indicators: array of 136 tiles format
:return: int count of dora
"""
tile_index = tile // 4
dora_count = 0
for dora in dora_indicators:
dora //= 4
# sou, pin, man
if tile_index < EAST:
# with indicator 9, dora will be 1
if dora == 8:
dora = -1
elif dora == 17:
dora = 8
elif dora == 26:
dora = 17
if tile_index == dora + 1:
dora_count += 1
else:
if dora < EAST:
continue
dora -= 9 * 3
tile_index_temp = tile_index - 9 * 3
# dora indicator is north
if dora == 3:
dora = -1
# dora indicator is hatsu
if dora == 6:
dora = 3
if tile_index_temp == dora + 1:
dora_count += 1
return dora_count | def function[plus_dora, parameter[tile, dora_indicators]]:
constant[
:param tile: int 136 tiles format
:param dora_indicators: array of 136 tiles format
:return: int count of dora
]
variable[tile_index] assign[=] binary_operation[name[tile] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]]
variable[dora_count] assign[=] constant[0]
for taget[name[dora]] in starred[name[dora_indicators]] begin[:]
<ast.AugAssign object at 0x7da1b07e90c0>
if compare[name[tile_index] less[<] name[EAST]] begin[:]
if compare[name[dora] equal[==] constant[8]] begin[:]
variable[dora] assign[=] <ast.UnaryOp object at 0x7da1b07e9390>
if compare[name[tile_index] equal[==] binary_operation[name[dora] + constant[1]]] begin[:]
<ast.AugAssign object at 0x7da1b07e8910>
return[name[dora_count]] | keyword[def] identifier[plus_dora] ( identifier[tile] , identifier[dora_indicators] ):
literal[string]
identifier[tile_index] = identifier[tile] // literal[int]
identifier[dora_count] = literal[int]
keyword[for] identifier[dora] keyword[in] identifier[dora_indicators] :
identifier[dora] //= literal[int]
keyword[if] identifier[tile_index] < identifier[EAST] :
keyword[if] identifier[dora] == literal[int] :
identifier[dora] =- literal[int]
keyword[elif] identifier[dora] == literal[int] :
identifier[dora] = literal[int]
keyword[elif] identifier[dora] == literal[int] :
identifier[dora] = literal[int]
keyword[if] identifier[tile_index] == identifier[dora] + literal[int] :
identifier[dora_count] += literal[int]
keyword[else] :
keyword[if] identifier[dora] < identifier[EAST] :
keyword[continue]
identifier[dora] -= literal[int] * literal[int]
identifier[tile_index_temp] = identifier[tile_index] - literal[int] * literal[int]
keyword[if] identifier[dora] == literal[int] :
identifier[dora] =- literal[int]
keyword[if] identifier[dora] == literal[int] :
identifier[dora] = literal[int]
keyword[if] identifier[tile_index_temp] == identifier[dora] + literal[int] :
identifier[dora_count] += literal[int]
keyword[return] identifier[dora_count] | def plus_dora(tile, dora_indicators):
"""
:param tile: int 136 tiles format
:param dora_indicators: array of 136 tiles format
:return: int count of dora
"""
tile_index = tile // 4
dora_count = 0
for dora in dora_indicators:
dora //= 4
# sou, pin, man
if tile_index < EAST:
# with indicator 9, dora will be 1
if dora == 8:
dora = -1 # depends on [control=['if'], data=['dora']]
elif dora == 17:
dora = 8 # depends on [control=['if'], data=['dora']]
elif dora == 26:
dora = 17 # depends on [control=['if'], data=['dora']]
if tile_index == dora + 1:
dora_count += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['tile_index']]
else:
if dora < EAST:
continue # depends on [control=['if'], data=[]]
dora -= 9 * 3
tile_index_temp = tile_index - 9 * 3
# dora indicator is north
if dora == 3:
dora = -1 # depends on [control=['if'], data=['dora']]
# dora indicator is hatsu
if dora == 6:
dora = 3 # depends on [control=['if'], data=['dora']]
if tile_index_temp == dora + 1:
dora_count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dora']]
return dora_count |
def _parse_request(self, url=None, handle=None):
'''Handles setup of the plugin state, including request
arguments, handle, mode.
This method never needs to be called directly. For testing, see
plugin.test()
'''
# To accomdate self.redirect, we need to be able to parse a full url as
# well
if url is None:
url = sys.argv[0]
if len(sys.argv) == 3:
url += sys.argv[2]
if handle is None:
handle = sys.argv[1]
return Request(url, handle) | def function[_parse_request, parameter[self, url, handle]]:
constant[Handles setup of the plugin state, including request
arguments, handle, mode.
This method never needs to be called directly. For testing, see
plugin.test()
]
if compare[name[url] is constant[None]] begin[:]
variable[url] assign[=] call[name[sys].argv][constant[0]]
if compare[call[name[len], parameter[name[sys].argv]] equal[==] constant[3]] begin[:]
<ast.AugAssign object at 0x7da18f58ea10>
if compare[name[handle] is constant[None]] begin[:]
variable[handle] assign[=] call[name[sys].argv][constant[1]]
return[call[name[Request], parameter[name[url], name[handle]]]] | keyword[def] identifier[_parse_request] ( identifier[self] , identifier[url] = keyword[None] , identifier[handle] = keyword[None] ):
literal[string]
keyword[if] identifier[url] keyword[is] keyword[None] :
identifier[url] = identifier[sys] . identifier[argv] [ literal[int] ]
keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )== literal[int] :
identifier[url] += identifier[sys] . identifier[argv] [ literal[int] ]
keyword[if] identifier[handle] keyword[is] keyword[None] :
identifier[handle] = identifier[sys] . identifier[argv] [ literal[int] ]
keyword[return] identifier[Request] ( identifier[url] , identifier[handle] ) | def _parse_request(self, url=None, handle=None):
"""Handles setup of the plugin state, including request
arguments, handle, mode.
This method never needs to be called directly. For testing, see
plugin.test()
"""
# To accomdate self.redirect, we need to be able to parse a full url as
# well
if url is None:
url = sys.argv[0]
if len(sys.argv) == 3:
url += sys.argv[2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['url']]
if handle is None:
handle = sys.argv[1] # depends on [control=['if'], data=['handle']]
return Request(url, handle) |
def spkez(targ, et, ref, abcorr, obs):
"""
Return the state (position and velocity) of a target body
relative to an observing body, optionally corrected for light
time (planetary aberration) and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkez_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Reference frame of output state vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body.
:type obs: int
:return:
State of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = ctypes.c_int(obs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkez_c(targ, et, ref, abcorr, obs, starg, ctypes.byref(lt))
return stypes.cVectorToPython(starg), lt.value | def function[spkez, parameter[targ, et, ref, abcorr, obs]]:
constant[
Return the state (position and velocity) of a target body
relative to an observing body, optionally corrected for light
time (planetary aberration) and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkez_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Reference frame of output state vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body.
:type obs: int
:return:
State of target,
One way light time between observer and target.
:rtype: tuple
]
variable[targ] assign[=] call[name[ctypes].c_int, parameter[name[targ]]]
variable[et] assign[=] call[name[ctypes].c_double, parameter[name[et]]]
variable[ref] assign[=] call[name[stypes].stringToCharP, parameter[name[ref]]]
variable[abcorr] assign[=] call[name[stypes].stringToCharP, parameter[name[abcorr]]]
variable[obs] assign[=] call[name[ctypes].c_int, parameter[name[obs]]]
variable[starg] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[6]]]
variable[lt] assign[=] call[name[ctypes].c_double, parameter[]]
call[name[libspice].spkez_c, parameter[name[targ], name[et], name[ref], name[abcorr], name[obs], name[starg], call[name[ctypes].byref, parameter[name[lt]]]]]
return[tuple[[<ast.Call object at 0x7da18f09c490>, <ast.Attribute object at 0x7da18f09e110>]]] | keyword[def] identifier[spkez] ( identifier[targ] , identifier[et] , identifier[ref] , identifier[abcorr] , identifier[obs] ):
literal[string]
identifier[targ] = identifier[ctypes] . identifier[c_int] ( identifier[targ] )
identifier[et] = identifier[ctypes] . identifier[c_double] ( identifier[et] )
identifier[ref] = identifier[stypes] . identifier[stringToCharP] ( identifier[ref] )
identifier[abcorr] = identifier[stypes] . identifier[stringToCharP] ( identifier[abcorr] )
identifier[obs] = identifier[ctypes] . identifier[c_int] ( identifier[obs] )
identifier[starg] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[lt] = identifier[ctypes] . identifier[c_double] ()
identifier[libspice] . identifier[spkez_c] ( identifier[targ] , identifier[et] , identifier[ref] , identifier[abcorr] , identifier[obs] , identifier[starg] , identifier[ctypes] . identifier[byref] ( identifier[lt] ))
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[starg] ), identifier[lt] . identifier[value] | def spkez(targ, et, ref, abcorr, obs):
"""
Return the state (position and velocity) of a target body
relative to an observing body, optionally corrected for light
time (planetary aberration) and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkez_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Reference frame of output state vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body.
:type obs: int
:return:
State of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = ctypes.c_int(obs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkez_c(targ, et, ref, abcorr, obs, starg, ctypes.byref(lt))
return (stypes.cVectorToPython(starg), lt.value) |
def zoning_defined_configuration_alias_member_entry_alias_entry_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
zoning = ET.SubElement(config, "zoning", xmlns="urn:brocade.com:mgmt:brocade-zone")
defined_configuration = ET.SubElement(zoning, "defined-configuration")
alias = ET.SubElement(defined_configuration, "alias")
alias_name_key = ET.SubElement(alias, "alias-name")
alias_name_key.text = kwargs.pop('alias_name')
member_entry = ET.SubElement(alias, "member-entry")
alias_entry_name = ET.SubElement(member_entry, "alias-entry-name")
alias_entry_name.text = kwargs.pop('alias_entry_name')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[zoning_defined_configuration_alias_member_entry_alias_entry_name, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[zoning] assign[=] call[name[ET].SubElement, parameter[name[config], constant[zoning]]]
variable[defined_configuration] assign[=] call[name[ET].SubElement, parameter[name[zoning], constant[defined-configuration]]]
variable[alias] assign[=] call[name[ET].SubElement, parameter[name[defined_configuration], constant[alias]]]
variable[alias_name_key] assign[=] call[name[ET].SubElement, parameter[name[alias], constant[alias-name]]]
name[alias_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[alias_name]]]
variable[member_entry] assign[=] call[name[ET].SubElement, parameter[name[alias], constant[member-entry]]]
variable[alias_entry_name] assign[=] call[name[ET].SubElement, parameter[name[member_entry], constant[alias-entry-name]]]
name[alias_entry_name].text assign[=] call[name[kwargs].pop, parameter[constant[alias_entry_name]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[zoning_defined_configuration_alias_member_entry_alias_entry_name] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[zoning] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[defined_configuration] = identifier[ET] . identifier[SubElement] ( identifier[zoning] , literal[string] )
identifier[alias] = identifier[ET] . identifier[SubElement] ( identifier[defined_configuration] , literal[string] )
identifier[alias_name_key] = identifier[ET] . identifier[SubElement] ( identifier[alias] , literal[string] )
identifier[alias_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[member_entry] = identifier[ET] . identifier[SubElement] ( identifier[alias] , literal[string] )
identifier[alias_entry_name] = identifier[ET] . identifier[SubElement] ( identifier[member_entry] , literal[string] )
identifier[alias_entry_name] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def zoning_defined_configuration_alias_member_entry_alias_entry_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
zoning = ET.SubElement(config, 'zoning', xmlns='urn:brocade.com:mgmt:brocade-zone')
defined_configuration = ET.SubElement(zoning, 'defined-configuration')
alias = ET.SubElement(defined_configuration, 'alias')
alias_name_key = ET.SubElement(alias, 'alias-name')
alias_name_key.text = kwargs.pop('alias_name')
member_entry = ET.SubElement(alias, 'member-entry')
alias_entry_name = ET.SubElement(member_entry, 'alias-entry-name')
alias_entry_name.text = kwargs.pop('alias_entry_name')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def protocol(self, name):
"""Returns the protocol object in the database given a certain name. Raises
an error if that does not exist."""
return self.query(Protocol).filter(Protocol.name==name).one() | def function[protocol, parameter[self, name]]:
constant[Returns the protocol object in the database given a certain name. Raises
an error if that does not exist.]
return[call[call[call[name[self].query, parameter[name[Protocol]]].filter, parameter[compare[name[Protocol].name equal[==] name[name]]]].one, parameter[]]] | keyword[def] identifier[protocol] ( identifier[self] , identifier[name] ):
literal[string]
keyword[return] identifier[self] . identifier[query] ( identifier[Protocol] ). identifier[filter] ( identifier[Protocol] . identifier[name] == identifier[name] ). identifier[one] () | def protocol(self, name):
"""Returns the protocol object in the database given a certain name. Raises
an error if that does not exist."""
return self.query(Protocol).filter(Protocol.name == name).one() |
def get_artists(self, limit=50, cacheable=True):
"""
Returns a sequence of Album objects
if limit==None it will return all (may take a while)
"""
seq = []
for node in _collect_nodes(
limit, self, self.ws_prefix + ".getArtists", cacheable
):
name = _extract(node, "name")
playcount = _number(_extract(node, "playcount"))
tagcount = _number(_extract(node, "tagcount"))
seq.append(LibraryItem(Artist(name, self.network), playcount, tagcount))
return seq | def function[get_artists, parameter[self, limit, cacheable]]:
constant[
Returns a sequence of Album objects
if limit==None it will return all (may take a while)
]
variable[seq] assign[=] list[[]]
for taget[name[node]] in starred[call[name[_collect_nodes], parameter[name[limit], name[self], binary_operation[name[self].ws_prefix + constant[.getArtists]], name[cacheable]]]] begin[:]
variable[name] assign[=] call[name[_extract], parameter[name[node], constant[name]]]
variable[playcount] assign[=] call[name[_number], parameter[call[name[_extract], parameter[name[node], constant[playcount]]]]]
variable[tagcount] assign[=] call[name[_number], parameter[call[name[_extract], parameter[name[node], constant[tagcount]]]]]
call[name[seq].append, parameter[call[name[LibraryItem], parameter[call[name[Artist], parameter[name[name], name[self].network]], name[playcount], name[tagcount]]]]]
return[name[seq]] | keyword[def] identifier[get_artists] ( identifier[self] , identifier[limit] = literal[int] , identifier[cacheable] = keyword[True] ):
literal[string]
identifier[seq] =[]
keyword[for] identifier[node] keyword[in] identifier[_collect_nodes] (
identifier[limit] , identifier[self] , identifier[self] . identifier[ws_prefix] + literal[string] , identifier[cacheable]
):
identifier[name] = identifier[_extract] ( identifier[node] , literal[string] )
identifier[playcount] = identifier[_number] ( identifier[_extract] ( identifier[node] , literal[string] ))
identifier[tagcount] = identifier[_number] ( identifier[_extract] ( identifier[node] , literal[string] ))
identifier[seq] . identifier[append] ( identifier[LibraryItem] ( identifier[Artist] ( identifier[name] , identifier[self] . identifier[network] ), identifier[playcount] , identifier[tagcount] ))
keyword[return] identifier[seq] | def get_artists(self, limit=50, cacheable=True):
"""
Returns a sequence of Album objects
if limit==None it will return all (may take a while)
"""
seq = []
for node in _collect_nodes(limit, self, self.ws_prefix + '.getArtists', cacheable):
name = _extract(node, 'name')
playcount = _number(_extract(node, 'playcount'))
tagcount = _number(_extract(node, 'tagcount'))
seq.append(LibraryItem(Artist(name, self.network), playcount, tagcount)) # depends on [control=['for'], data=['node']]
return seq |
def from_EmailMessage(cls, message):
"""Create a Mail object from an instance of
email.message.EmailMessage.
:type message: email.message.EmailMessage
:rtype: Mail
"""
mail = cls(
from_email=Email(message.get('From')),
subject=message.get('Subject'),
to_emails=Email(message.get('To')),
)
try:
body = message.get_content()
except AttributeError:
# Python2
body = message.get_payload()
mail.add_content(Content(
message.get_content_type(),
body.strip()
))
for k, v in message.items():
mail.add_header(Header(k, v))
return mail | def function[from_EmailMessage, parameter[cls, message]]:
constant[Create a Mail object from an instance of
email.message.EmailMessage.
:type message: email.message.EmailMessage
:rtype: Mail
]
variable[mail] assign[=] call[name[cls], parameter[]]
<ast.Try object at 0x7da18c4cc580>
call[name[mail].add_content, parameter[call[name[Content], parameter[call[name[message].get_content_type, parameter[]], call[name[body].strip, parameter[]]]]]]
for taget[tuple[[<ast.Name object at 0x7da2047e9150>, <ast.Name object at 0x7da2047eac50>]]] in starred[call[name[message].items, parameter[]]] begin[:]
call[name[mail].add_header, parameter[call[name[Header], parameter[name[k], name[v]]]]]
return[name[mail]] | keyword[def] identifier[from_EmailMessage] ( identifier[cls] , identifier[message] ):
literal[string]
identifier[mail] = identifier[cls] (
identifier[from_email] = identifier[Email] ( identifier[message] . identifier[get] ( literal[string] )),
identifier[subject] = identifier[message] . identifier[get] ( literal[string] ),
identifier[to_emails] = identifier[Email] ( identifier[message] . identifier[get] ( literal[string] )),
)
keyword[try] :
identifier[body] = identifier[message] . identifier[get_content] ()
keyword[except] identifier[AttributeError] :
identifier[body] = identifier[message] . identifier[get_payload] ()
identifier[mail] . identifier[add_content] ( identifier[Content] (
identifier[message] . identifier[get_content_type] (),
identifier[body] . identifier[strip] ()
))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[message] . identifier[items] ():
identifier[mail] . identifier[add_header] ( identifier[Header] ( identifier[k] , identifier[v] ))
keyword[return] identifier[mail] | def from_EmailMessage(cls, message):
"""Create a Mail object from an instance of
email.message.EmailMessage.
:type message: email.message.EmailMessage
:rtype: Mail
"""
mail = cls(from_email=Email(message.get('From')), subject=message.get('Subject'), to_emails=Email(message.get('To')))
try:
body = message.get_content() # depends on [control=['try'], data=[]]
except AttributeError:
# Python2
body = message.get_payload() # depends on [control=['except'], data=[]]
mail.add_content(Content(message.get_content_type(), body.strip()))
for (k, v) in message.items():
mail.add_header(Header(k, v)) # depends on [control=['for'], data=[]]
return mail |
def set(self, value):
'''set a setting'''
if value == 'None' and self.default is None:
value = None
if value is not None:
if self.type == bool:
if str(value).lower() in ['1', 'true', 'yes']:
value = True
elif str(value).lower() in ['0', 'false', 'no']:
value = False
else:
return False
else:
try:
value = self.type(value)
except:
return False
if self.range is not None:
(minv,maxv) = self.range
if value < minv or value > maxv:
return False
if self.choice is not None:
found = False
for v in self.choice:
if v.lower() == value.lower():
found = True
value = v
break
if not found:
print("Must be one of %s" % str(self.choice))
return False
self.value = value
return True | def function[set, parameter[self, value]]:
constant[set a setting]
if <ast.BoolOp object at 0x7da20c76fbe0> begin[:]
variable[value] assign[=] constant[None]
if compare[name[value] is_not constant[None]] begin[:]
if compare[name[self].type equal[==] name[bool]] begin[:]
if compare[call[call[name[str], parameter[name[value]]].lower, parameter[]] in list[[<ast.Constant object at 0x7da20c76cc70>, <ast.Constant object at 0x7da1b16d0370>, <ast.Constant object at 0x7da1b16d3880>]]] begin[:]
variable[value] assign[=] constant[True]
if compare[name[self].range is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b16d0790> assign[=] name[self].range
if <ast.BoolOp object at 0x7da1b16d05e0> begin[:]
return[constant[False]]
if compare[name[self].choice is_not constant[None]] begin[:]
variable[found] assign[=] constant[False]
for taget[name[v]] in starred[name[self].choice] begin[:]
if compare[call[name[v].lower, parameter[]] equal[==] call[name[value].lower, parameter[]]] begin[:]
variable[found] assign[=] constant[True]
variable[value] assign[=] name[v]
break
if <ast.UnaryOp object at 0x7da20c76d990> begin[:]
call[name[print], parameter[binary_operation[constant[Must be one of %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].choice]]]]]
return[constant[False]]
name[self].value assign[=] name[value]
return[constant[True]] | keyword[def] identifier[set] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] == literal[string] keyword[and] identifier[self] . identifier[default] keyword[is] keyword[None] :
identifier[value] = keyword[None]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[type] == identifier[bool] :
keyword[if] identifier[str] ( identifier[value] ). identifier[lower] () keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[value] = keyword[True]
keyword[elif] identifier[str] ( identifier[value] ). identifier[lower] () keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[value] = keyword[False]
keyword[else] :
keyword[return] keyword[False]
keyword[else] :
keyword[try] :
identifier[value] = identifier[self] . identifier[type] ( identifier[value] )
keyword[except] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[range] keyword[is] keyword[not] keyword[None] :
( identifier[minv] , identifier[maxv] )= identifier[self] . identifier[range]
keyword[if] identifier[value] < identifier[minv] keyword[or] identifier[value] > identifier[maxv] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[choice] keyword[is] keyword[not] keyword[None] :
identifier[found] = keyword[False]
keyword[for] identifier[v] keyword[in] identifier[self] . identifier[choice] :
keyword[if] identifier[v] . identifier[lower] ()== identifier[value] . identifier[lower] ():
identifier[found] = keyword[True]
identifier[value] = identifier[v]
keyword[break]
keyword[if] keyword[not] identifier[found] :
identifier[print] ( literal[string] % identifier[str] ( identifier[self] . identifier[choice] ))
keyword[return] keyword[False]
identifier[self] . identifier[value] = identifier[value]
keyword[return] keyword[True] | def set(self, value):
"""set a setting"""
if value == 'None' and self.default is None:
value = None # depends on [control=['if'], data=[]]
if value is not None:
if self.type == bool:
if str(value).lower() in ['1', 'true', 'yes']:
value = True # depends on [control=['if'], data=[]]
elif str(value).lower() in ['0', 'false', 'no']:
value = False # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
else:
try:
value = self.type(value) # depends on [control=['try'], data=[]]
except:
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
if self.range is not None:
(minv, maxv) = self.range
if value < minv or value > maxv:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.choice is not None:
found = False
for v in self.choice:
if v.lower() == value.lower():
found = True
value = v
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']]
if not found:
print('Must be one of %s' % str(self.choice))
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self.value = value
return True |
def metadata(sceneid, pmin=2, pmax=98, **kwargs):
"""
Retrieve image bounds and band statistics.
Attributes
----------
sceneid : str
Landsat sceneid. For scenes after May 2017,
sceneid have to be LANDSAT_PRODUCT_ID.
pmin : int, optional, (default: 2)
Histogram minimum cut.
pmax : int, optional, (default: 98)
Histogram maximum cut.
kwargs : optional
These are passed to 'rio_tiler.landsat8._landsat_stats'
e.g: histogram_bins=20, dst_crs='epsg:4326'
Returns
-------
out : dict
Dictionary with bounds and bands statistics.
"""
scene_params = _landsat_parse_scene_id(sceneid)
meta_data = _landsat_get_mtl(sceneid).get("L1_METADATA_FILE")
path_prefix = "{}/{}".format(LANDSAT_BUCKET, scene_params["key"])
info = {"sceneid": sceneid}
_stats_worker = partial(
_landsat_stats,
address_prefix=path_prefix,
metadata=meta_data,
overview_level=1,
percentiles=(pmin, pmax),
**kwargs
)
with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
responses = list(executor.map(_stats_worker, LANDSAT_BANDS))
info["bounds"] = [
r["bounds"] for b, r in zip(LANDSAT_BANDS, responses) if b == "8"
][0]
info["statistics"] = {
b: v
for b, d in zip(LANDSAT_BANDS, responses)
for k, v in d["statistics"].items()
}
return info | def function[metadata, parameter[sceneid, pmin, pmax]]:
constant[
Retrieve image bounds and band statistics.
Attributes
----------
sceneid : str
Landsat sceneid. For scenes after May 2017,
sceneid have to be LANDSAT_PRODUCT_ID.
pmin : int, optional, (default: 2)
Histogram minimum cut.
pmax : int, optional, (default: 98)
Histogram maximum cut.
kwargs : optional
These are passed to 'rio_tiler.landsat8._landsat_stats'
e.g: histogram_bins=20, dst_crs='epsg:4326'
Returns
-------
out : dict
Dictionary with bounds and bands statistics.
]
variable[scene_params] assign[=] call[name[_landsat_parse_scene_id], parameter[name[sceneid]]]
variable[meta_data] assign[=] call[call[name[_landsat_get_mtl], parameter[name[sceneid]]].get, parameter[constant[L1_METADATA_FILE]]]
variable[path_prefix] assign[=] call[constant[{}/{}].format, parameter[name[LANDSAT_BUCKET], call[name[scene_params]][constant[key]]]]
variable[info] assign[=] dictionary[[<ast.Constant object at 0x7da1b07a2e30>], [<ast.Name object at 0x7da1b07a29b0>]]
variable[_stats_worker] assign[=] call[name[partial], parameter[name[_landsat_stats]]]
with call[name[futures].ThreadPoolExecutor, parameter[]] begin[:]
variable[responses] assign[=] call[name[list], parameter[call[name[executor].map, parameter[name[_stats_worker], name[LANDSAT_BANDS]]]]]
call[name[info]][constant[bounds]] assign[=] call[<ast.ListComp object at 0x7da1b07a2830>][constant[0]]
call[name[info]][constant[statistics]] assign[=] <ast.DictComp object at 0x7da1b07a2dd0>
return[name[info]] | keyword[def] identifier[metadata] ( identifier[sceneid] , identifier[pmin] = literal[int] , identifier[pmax] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[scene_params] = identifier[_landsat_parse_scene_id] ( identifier[sceneid] )
identifier[meta_data] = identifier[_landsat_get_mtl] ( identifier[sceneid] ). identifier[get] ( literal[string] )
identifier[path_prefix] = literal[string] . identifier[format] ( identifier[LANDSAT_BUCKET] , identifier[scene_params] [ literal[string] ])
identifier[info] ={ literal[string] : identifier[sceneid] }
identifier[_stats_worker] = identifier[partial] (
identifier[_landsat_stats] ,
identifier[address_prefix] = identifier[path_prefix] ,
identifier[metadata] = identifier[meta_data] ,
identifier[overview_level] = literal[int] ,
identifier[percentiles] =( identifier[pmin] , identifier[pmax] ),
** identifier[kwargs]
)
keyword[with] identifier[futures] . identifier[ThreadPoolExecutor] ( identifier[max_workers] = identifier[MAX_THREADS] ) keyword[as] identifier[executor] :
identifier[responses] = identifier[list] ( identifier[executor] . identifier[map] ( identifier[_stats_worker] , identifier[LANDSAT_BANDS] ))
identifier[info] [ literal[string] ]=[
identifier[r] [ literal[string] ] keyword[for] identifier[b] , identifier[r] keyword[in] identifier[zip] ( identifier[LANDSAT_BANDS] , identifier[responses] ) keyword[if] identifier[b] == literal[string]
][ literal[int] ]
identifier[info] [ literal[string] ]={
identifier[b] : identifier[v]
keyword[for] identifier[b] , identifier[d] keyword[in] identifier[zip] ( identifier[LANDSAT_BANDS] , identifier[responses] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] [ literal[string] ]. identifier[items] ()
}
keyword[return] identifier[info] | def metadata(sceneid, pmin=2, pmax=98, **kwargs):
"""
Retrieve image bounds and band statistics.
Attributes
----------
sceneid : str
Landsat sceneid. For scenes after May 2017,
sceneid have to be LANDSAT_PRODUCT_ID.
pmin : int, optional, (default: 2)
Histogram minimum cut.
pmax : int, optional, (default: 98)
Histogram maximum cut.
kwargs : optional
These are passed to 'rio_tiler.landsat8._landsat_stats'
e.g: histogram_bins=20, dst_crs='epsg:4326'
Returns
-------
out : dict
Dictionary with bounds and bands statistics.
"""
scene_params = _landsat_parse_scene_id(sceneid)
meta_data = _landsat_get_mtl(sceneid).get('L1_METADATA_FILE')
path_prefix = '{}/{}'.format(LANDSAT_BUCKET, scene_params['key'])
info = {'sceneid': sceneid}
_stats_worker = partial(_landsat_stats, address_prefix=path_prefix, metadata=meta_data, overview_level=1, percentiles=(pmin, pmax), **kwargs)
with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
responses = list(executor.map(_stats_worker, LANDSAT_BANDS)) # depends on [control=['with'], data=['executor']]
info['bounds'] = [r['bounds'] for (b, r) in zip(LANDSAT_BANDS, responses) if b == '8'][0]
info['statistics'] = {b: v for (b, d) in zip(LANDSAT_BANDS, responses) for (k, v) in d['statistics'].items()}
return info |
def get_checkerboard_matrix(kernel_width):
"""
example matrix for width = 2
-1 -1 1 1
-1 -1 1 1
1 1 -1 -1
1 1 -1 -1
:param kernel_width:
:return:
"""
return np.vstack((
np.hstack((
-1 * np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), -1 * np.ones((kernel_width, kernel_width))
))
)) | def function[get_checkerboard_matrix, parameter[kernel_width]]:
constant[
example matrix for width = 2
-1 -1 1 1
-1 -1 1 1
1 1 -1 -1
1 1 -1 -1
:param kernel_width:
:return:
]
return[call[name[np].vstack, parameter[tuple[[<ast.Call object at 0x7da1b13bb940>, <ast.Call object at 0x7da1b13b9990>]]]]] | keyword[def] identifier[get_checkerboard_matrix] ( identifier[kernel_width] ):
literal[string]
keyword[return] identifier[np] . identifier[vstack] ((
identifier[np] . identifier[hstack] ((
- literal[int] * identifier[np] . identifier[ones] (( identifier[kernel_width] , identifier[kernel_width] )), identifier[np] . identifier[ones] (( identifier[kernel_width] , identifier[kernel_width] ))
)),
identifier[np] . identifier[hstack] ((
identifier[np] . identifier[ones] (( identifier[kernel_width] , identifier[kernel_width] )),- literal[int] * identifier[np] . identifier[ones] (( identifier[kernel_width] , identifier[kernel_width] ))
))
)) | def get_checkerboard_matrix(kernel_width):
"""
example matrix for width = 2
-1 -1 1 1
-1 -1 1 1
1 1 -1 -1
1 1 -1 -1
:param kernel_width:
:return:
"""
return np.vstack((np.hstack((-1 * np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width)))), np.hstack((np.ones((kernel_width, kernel_width)), -1 * np.ones((kernel_width, kernel_width)))))) |
def _install_one(
repo_url, branch, destination, commit='', patches=None,
exclude_modules=None, include_modules=None, base=False, work_directory=''
):
""" Install a third party odoo add-on
:param string repo_url: url of the repo that contains the patch.
:param string branch: name of the branch to checkout.
:param string destination: the folder where the add-on should end up at.
:param string commit: Optional commit rev to checkout to. If mentioned, that take over the branch
:param string work_directory: the path to the directory of the yaml file.
:param list patches: Optional list of patches to apply.
"""
patches = patches or []
patches = [
core.FilePatch(file=patch['file'], work_directory=work_directory)
if 'file' in patch else core.Patch(**patch)
for patch in patches
]
addon_cls = core.Base if base else core.Addon
addon = addon_cls(
repo_url, branch, commit=commit, patches=patches,
exclude_modules=exclude_modules, include_modules=include_modules)
addon.install(destination) | def function[_install_one, parameter[repo_url, branch, destination, commit, patches, exclude_modules, include_modules, base, work_directory]]:
constant[ Install a third party odoo add-on
:param string repo_url: url of the repo that contains the patch.
:param string branch: name of the branch to checkout.
:param string destination: the folder where the add-on should end up at.
:param string commit: Optional commit rev to checkout to. If mentioned, that take over the branch
:param string work_directory: the path to the directory of the yaml file.
:param list patches: Optional list of patches to apply.
]
variable[patches] assign[=] <ast.BoolOp object at 0x7da18dc99840>
variable[patches] assign[=] <ast.ListComp object at 0x7da20c990d30>
variable[addon_cls] assign[=] <ast.IfExp object at 0x7da20c991d20>
variable[addon] assign[=] call[name[addon_cls], parameter[name[repo_url], name[branch]]]
call[name[addon].install, parameter[name[destination]]] | keyword[def] identifier[_install_one] (
identifier[repo_url] , identifier[branch] , identifier[destination] , identifier[commit] = literal[string] , identifier[patches] = keyword[None] ,
identifier[exclude_modules] = keyword[None] , identifier[include_modules] = keyword[None] , identifier[base] = keyword[False] , identifier[work_directory] = literal[string]
):
literal[string]
identifier[patches] = identifier[patches] keyword[or] []
identifier[patches] =[
identifier[core] . identifier[FilePatch] ( identifier[file] = identifier[patch] [ literal[string] ], identifier[work_directory] = identifier[work_directory] )
keyword[if] literal[string] keyword[in] identifier[patch] keyword[else] identifier[core] . identifier[Patch] (** identifier[patch] )
keyword[for] identifier[patch] keyword[in] identifier[patches]
]
identifier[addon_cls] = identifier[core] . identifier[Base] keyword[if] identifier[base] keyword[else] identifier[core] . identifier[Addon]
identifier[addon] = identifier[addon_cls] (
identifier[repo_url] , identifier[branch] , identifier[commit] = identifier[commit] , identifier[patches] = identifier[patches] ,
identifier[exclude_modules] = identifier[exclude_modules] , identifier[include_modules] = identifier[include_modules] )
identifier[addon] . identifier[install] ( identifier[destination] ) | def _install_one(repo_url, branch, destination, commit='', patches=None, exclude_modules=None, include_modules=None, base=False, work_directory=''):
""" Install a third party odoo add-on
:param string repo_url: url of the repo that contains the patch.
:param string branch: name of the branch to checkout.
:param string destination: the folder where the add-on should end up at.
:param string commit: Optional commit rev to checkout to. If mentioned, that take over the branch
:param string work_directory: the path to the directory of the yaml file.
:param list patches: Optional list of patches to apply.
"""
patches = patches or []
patches = [core.FilePatch(file=patch['file'], work_directory=work_directory) if 'file' in patch else core.Patch(**patch) for patch in patches]
addon_cls = core.Base if base else core.Addon
addon = addon_cls(repo_url, branch, commit=commit, patches=patches, exclude_modules=exclude_modules, include_modules=include_modules)
addon.install(destination) |
async def open(self):
"""Register with the publisher."""
self.store.register(self)
while not self.finished:
message = await self.messages.get()
await self.publish(message) | <ast.AsyncFunctionDef object at 0x7da1b244add0> | keyword[async] keyword[def] identifier[open] ( identifier[self] ):
literal[string]
identifier[self] . identifier[store] . identifier[register] ( identifier[self] )
keyword[while] keyword[not] identifier[self] . identifier[finished] :
identifier[message] = keyword[await] identifier[self] . identifier[messages] . identifier[get] ()
keyword[await] identifier[self] . identifier[publish] ( identifier[message] ) | async def open(self):
"""Register with the publisher."""
self.store.register(self)
while not self.finished:
message = await self.messages.get()
await self.publish(message) # depends on [control=['while'], data=[]] |
def flush_commit(self, commits, repos=tuple()):
"""
Blocks until all of the commits which have a set of commits as
provenance have finished. For commits to be considered they must have
all of the specified commits as provenance. This in effect waits for
all of the jobs that are triggered by a set of commits to complete.
It returns an error if any of the commits it's waiting on are
cancelled due to one of the jobs encountering an error during runtime.
Note that it's never necessary to call FlushCommit to run jobs,
they'll run no matter what, FlushCommit just allows you to wait for
them to complete and see their output once they do. This returns an
iterator of CommitInfo objects.
Params:
* commits: A commit or a list of commits to wait on.
* repos: Optional. Only the commits up to and including those repos.
will be considered, otherwise all repos are considered.
"""
req = proto.FlushCommitRequest(commit=[commit_from(c) for c in commits],
to_repo=[proto.Repo(name=r) for r in repos])
res = self.stub.FlushCommit(req, metadata=self.metadata)
return res | def function[flush_commit, parameter[self, commits, repos]]:
constant[
Blocks until all of the commits which have a set of commits as
provenance have finished. For commits to be considered they must have
all of the specified commits as provenance. This in effect waits for
all of the jobs that are triggered by a set of commits to complete.
It returns an error if any of the commits it's waiting on are
cancelled due to one of the jobs encountering an error during runtime.
Note that it's never necessary to call FlushCommit to run jobs,
they'll run no matter what, FlushCommit just allows you to wait for
them to complete and see their output once they do. This returns an
iterator of CommitInfo objects.
Params:
* commits: A commit or a list of commits to wait on.
* repos: Optional. Only the commits up to and including those repos.
will be considered, otherwise all repos are considered.
]
variable[req] assign[=] call[name[proto].FlushCommitRequest, parameter[]]
variable[res] assign[=] call[name[self].stub.FlushCommit, parameter[name[req]]]
return[name[res]] | keyword[def] identifier[flush_commit] ( identifier[self] , identifier[commits] , identifier[repos] = identifier[tuple] ()):
literal[string]
identifier[req] = identifier[proto] . identifier[FlushCommitRequest] ( identifier[commit] =[ identifier[commit_from] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[commits] ],
identifier[to_repo] =[ identifier[proto] . identifier[Repo] ( identifier[name] = identifier[r] ) keyword[for] identifier[r] keyword[in] identifier[repos] ])
identifier[res] = identifier[self] . identifier[stub] . identifier[FlushCommit] ( identifier[req] , identifier[metadata] = identifier[self] . identifier[metadata] )
keyword[return] identifier[res] | def flush_commit(self, commits, repos=tuple()):
"""
Blocks until all of the commits which have a set of commits as
provenance have finished. For commits to be considered they must have
all of the specified commits as provenance. This in effect waits for
all of the jobs that are triggered by a set of commits to complete.
It returns an error if any of the commits it's waiting on are
cancelled due to one of the jobs encountering an error during runtime.
Note that it's never necessary to call FlushCommit to run jobs,
they'll run no matter what, FlushCommit just allows you to wait for
them to complete and see their output once they do. This returns an
iterator of CommitInfo objects.
Params:
* commits: A commit or a list of commits to wait on.
* repos: Optional. Only the commits up to and including those repos.
will be considered, otherwise all repos are considered.
"""
req = proto.FlushCommitRequest(commit=[commit_from(c) for c in commits], to_repo=[proto.Repo(name=r) for r in repos])
res = self.stub.FlushCommit(req, metadata=self.metadata)
return res |
def connect(self, callback, ref=False, position='first',
before=None, after=None):
"""Connect this emitter to a new callback.
Parameters
----------
callback : function | tuple
*callback* may be either a callable object or a tuple
(object, attr_name) where object.attr_name will point to a
callable object. Note that only a weak reference to ``object``
will be kept.
ref : bool | str
Reference used to identify the callback in ``before``/``after``.
If True, the callback ref will automatically determined (see
Notes). If False, the callback cannot be referred to by a string.
If str, the given string will be used. Note that if ``ref``
is not unique in ``callback_refs``, an error will be thrown.
position : str
If ``'first'``, the first eligible position is used (that
meets the before and after criteria), ``'last'`` will use
the last position.
before : str | callback | list of str or callback | None
List of callbacks that the current callback should precede.
Can be None if no before-criteria should be used.
after : str | callback | list of str or callback | None
List of callbacks that the current callback should follow.
Can be None if no after-criteria should be used.
Notes
-----
If ``ref=True``, the callback reference will be determined from:
1. If ``callback`` is ``tuple``, the secend element in the tuple.
2. The ``__name__`` attribute.
3. The ``__class__.__name__`` attribute.
The current list of callback refs can be obtained using
``event.callback_refs``. Callbacks can be referred to by either
their string reference (if given), or by the actual callback that
was attached (e.g., ``(canvas, 'swap_buffers')``).
If the specified callback is already connected, then the request is
ignored.
If before is None and after is None (default), the new callback will
be added to the beginning of the callback list. Thus the
callback that is connected _last_ will be the _first_ to receive
events from the emitter.
"""
callbacks = self.callbacks
callback_refs = self.callback_refs
callback = self._normalize_cb(callback)
if callback in callbacks:
return
# deal with the ref
if isinstance(ref, bool):
if ref:
if isinstance(callback, tuple):
ref = callback[1]
elif hasattr(callback, '__name__'): # function
ref = callback.__name__
else: # Method, or other
ref = callback.__class__.__name__
else:
ref = None
elif not isinstance(ref, string_types):
raise TypeError('ref must be a bool or string')
if ref is not None and ref in self._callback_refs:
raise ValueError('ref "%s" is not unique' % ref)
# positions
if position not in ('first', 'last'):
raise ValueError('position must be "first" or "last", not %s'
% position)
# bounds
bounds = list() # upper & lower bnds (inclusive) of possible cb locs
for ri, criteria in enumerate((before, after)):
if criteria is None or criteria == []:
bounds.append(len(callback_refs) if ri == 0 else 0)
else:
if not isinstance(criteria, list):
criteria = [criteria]
for c in criteria:
count = sum([(c == cn or c == cc) for cn, cc
in zip(callback_refs, callbacks)])
if count != 1:
raise ValueError('criteria "%s" is in the current '
'callback list %s times:\n%s\n%s'
% (criteria, count,
callback_refs, callbacks))
matches = [ci for ci, (cn, cc) in enumerate(zip(callback_refs,
callbacks))
if (cc in criteria or cn in criteria)]
bounds.append(matches[0] if ri == 0 else (matches[-1] + 1))
if bounds[0] < bounds[1]: # i.e., "place before" < "place after"
raise RuntimeError('cannot place callback before "%s" '
'and after "%s" for callbacks: %s'
% (before, after, callback_refs))
idx = bounds[1] if position == 'first' else bounds[0] # 'last'
# actually add the callback
self._callbacks.insert(idx, callback)
self._callback_refs.insert(idx, ref)
return callback | def function[connect, parameter[self, callback, ref, position, before, after]]:
constant[Connect this emitter to a new callback.
Parameters
----------
callback : function | tuple
*callback* may be either a callable object or a tuple
(object, attr_name) where object.attr_name will point to a
callable object. Note that only a weak reference to ``object``
will be kept.
ref : bool | str
Reference used to identify the callback in ``before``/``after``.
If True, the callback ref will automatically determined (see
Notes). If False, the callback cannot be referred to by a string.
If str, the given string will be used. Note that if ``ref``
is not unique in ``callback_refs``, an error will be thrown.
position : str
If ``'first'``, the first eligible position is used (that
meets the before and after criteria), ``'last'`` will use
the last position.
before : str | callback | list of str or callback | None
List of callbacks that the current callback should precede.
Can be None if no before-criteria should be used.
after : str | callback | list of str or callback | None
List of callbacks that the current callback should follow.
Can be None if no after-criteria should be used.
Notes
-----
If ``ref=True``, the callback reference will be determined from:
1. If ``callback`` is ``tuple``, the secend element in the tuple.
2. The ``__name__`` attribute.
3. The ``__class__.__name__`` attribute.
The current list of callback refs can be obtained using
``event.callback_refs``. Callbacks can be referred to by either
their string reference (if given), or by the actual callback that
was attached (e.g., ``(canvas, 'swap_buffers')``).
If the specified callback is already connected, then the request is
ignored.
If before is None and after is None (default), the new callback will
be added to the beginning of the callback list. Thus the
callback that is connected _last_ will be the _first_ to receive
events from the emitter.
]
variable[callbacks] assign[=] name[self].callbacks
variable[callback_refs] assign[=] name[self].callback_refs
variable[callback] assign[=] call[name[self]._normalize_cb, parameter[name[callback]]]
if compare[name[callback] in name[callbacks]] begin[:]
return[None]
if call[name[isinstance], parameter[name[ref], name[bool]]] begin[:]
if name[ref] begin[:]
if call[name[isinstance], parameter[name[callback], name[tuple]]] begin[:]
variable[ref] assign[=] call[name[callback]][constant[1]]
if <ast.BoolOp object at 0x7da18dc9a380> begin[:]
<ast.Raise object at 0x7da18dc99ea0>
if compare[name[position] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18dc9b550>, <ast.Constant object at 0x7da18dc9a050>]]] begin[:]
<ast.Raise object at 0x7da18dc9a080>
variable[bounds] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18dc99e10>, <ast.Name object at 0x7da18dc984c0>]]] in starred[call[name[enumerate], parameter[tuple[[<ast.Name object at 0x7da18dc98670>, <ast.Name object at 0x7da18dc99c00>]]]]] begin[:]
if <ast.BoolOp object at 0x7da18dc99120> begin[:]
call[name[bounds].append, parameter[<ast.IfExp object at 0x7da18dc991b0>]]
if compare[call[name[bounds]][constant[0]] less[<] call[name[bounds]][constant[1]]] begin[:]
<ast.Raise object at 0x7da1b0fb3190>
variable[idx] assign[=] <ast.IfExp object at 0x7da1b0fb2dd0>
call[name[self]._callbacks.insert, parameter[name[idx], name[callback]]]
call[name[self]._callback_refs.insert, parameter[name[idx], name[ref]]]
return[name[callback]] | keyword[def] identifier[connect] ( identifier[self] , identifier[callback] , identifier[ref] = keyword[False] , identifier[position] = literal[string] ,
identifier[before] = keyword[None] , identifier[after] = keyword[None] ):
literal[string]
identifier[callbacks] = identifier[self] . identifier[callbacks]
identifier[callback_refs] = identifier[self] . identifier[callback_refs]
identifier[callback] = identifier[self] . identifier[_normalize_cb] ( identifier[callback] )
keyword[if] identifier[callback] keyword[in] identifier[callbacks] :
keyword[return]
keyword[if] identifier[isinstance] ( identifier[ref] , identifier[bool] ):
keyword[if] identifier[ref] :
keyword[if] identifier[isinstance] ( identifier[callback] , identifier[tuple] ):
identifier[ref] = identifier[callback] [ literal[int] ]
keyword[elif] identifier[hasattr] ( identifier[callback] , literal[string] ):
identifier[ref] = identifier[callback] . identifier[__name__]
keyword[else] :
identifier[ref] = identifier[callback] . identifier[__class__] . identifier[__name__]
keyword[else] :
identifier[ref] = keyword[None]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[ref] , identifier[string_types] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[ref] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ref] keyword[in] identifier[self] . identifier[_callback_refs] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[ref] )
keyword[if] identifier[position] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[position] )
identifier[bounds] = identifier[list] ()
keyword[for] identifier[ri] , identifier[criteria] keyword[in] identifier[enumerate] (( identifier[before] , identifier[after] )):
keyword[if] identifier[criteria] keyword[is] keyword[None] keyword[or] identifier[criteria] ==[]:
identifier[bounds] . identifier[append] ( identifier[len] ( identifier[callback_refs] ) keyword[if] identifier[ri] == literal[int] keyword[else] literal[int] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[criteria] , identifier[list] ):
identifier[criteria] =[ identifier[criteria] ]
keyword[for] identifier[c] keyword[in] identifier[criteria] :
identifier[count] = identifier[sum] ([( identifier[c] == identifier[cn] keyword[or] identifier[c] == identifier[cc] ) keyword[for] identifier[cn] , identifier[cc]
keyword[in] identifier[zip] ( identifier[callback_refs] , identifier[callbacks] )])
keyword[if] identifier[count] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
%( identifier[criteria] , identifier[count] ,
identifier[callback_refs] , identifier[callbacks] ))
identifier[matches] =[ identifier[ci] keyword[for] identifier[ci] ,( identifier[cn] , identifier[cc] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[callback_refs] ,
identifier[callbacks] ))
keyword[if] ( identifier[cc] keyword[in] identifier[criteria] keyword[or] identifier[cn] keyword[in] identifier[criteria] )]
identifier[bounds] . identifier[append] ( identifier[matches] [ literal[int] ] keyword[if] identifier[ri] == literal[int] keyword[else] ( identifier[matches] [- literal[int] ]+ literal[int] ))
keyword[if] identifier[bounds] [ literal[int] ]< identifier[bounds] [ literal[int] ]:
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
%( identifier[before] , identifier[after] , identifier[callback_refs] ))
identifier[idx] = identifier[bounds] [ literal[int] ] keyword[if] identifier[position] == literal[string] keyword[else] identifier[bounds] [ literal[int] ]
identifier[self] . identifier[_callbacks] . identifier[insert] ( identifier[idx] , identifier[callback] )
identifier[self] . identifier[_callback_refs] . identifier[insert] ( identifier[idx] , identifier[ref] )
keyword[return] identifier[callback] | def connect(self, callback, ref=False, position='first', before=None, after=None):
"""Connect this emitter to a new callback.
Parameters
----------
callback : function | tuple
*callback* may be either a callable object or a tuple
(object, attr_name) where object.attr_name will point to a
callable object. Note that only a weak reference to ``object``
will be kept.
ref : bool | str
Reference used to identify the callback in ``before``/``after``.
If True, the callback ref will automatically determined (see
Notes). If False, the callback cannot be referred to by a string.
If str, the given string will be used. Note that if ``ref``
is not unique in ``callback_refs``, an error will be thrown.
position : str
If ``'first'``, the first eligible position is used (that
meets the before and after criteria), ``'last'`` will use
the last position.
before : str | callback | list of str or callback | None
List of callbacks that the current callback should precede.
Can be None if no before-criteria should be used.
after : str | callback | list of str or callback | None
List of callbacks that the current callback should follow.
Can be None if no after-criteria should be used.
Notes
-----
If ``ref=True``, the callback reference will be determined from:
1. If ``callback`` is ``tuple``, the secend element in the tuple.
2. The ``__name__`` attribute.
3. The ``__class__.__name__`` attribute.
The current list of callback refs can be obtained using
``event.callback_refs``. Callbacks can be referred to by either
their string reference (if given), or by the actual callback that
was attached (e.g., ``(canvas, 'swap_buffers')``).
If the specified callback is already connected, then the request is
ignored.
If before is None and after is None (default), the new callback will
be added to the beginning of the callback list. Thus the
callback that is connected _last_ will be the _first_ to receive
events from the emitter.
"""
callbacks = self.callbacks
callback_refs = self.callback_refs
callback = self._normalize_cb(callback)
if callback in callbacks:
return # depends on [control=['if'], data=[]]
# deal with the ref
if isinstance(ref, bool):
if ref:
if isinstance(callback, tuple):
ref = callback[1] # depends on [control=['if'], data=[]]
elif hasattr(callback, '__name__'): # function
ref = callback.__name__ # depends on [control=['if'], data=[]]
else: # Method, or other
ref = callback.__class__.__name__ # depends on [control=['if'], data=[]]
else:
ref = None # depends on [control=['if'], data=[]]
elif not isinstance(ref, string_types):
raise TypeError('ref must be a bool or string') # depends on [control=['if'], data=[]]
if ref is not None and ref in self._callback_refs:
raise ValueError('ref "%s" is not unique' % ref) # depends on [control=['if'], data=[]]
# positions
if position not in ('first', 'last'):
raise ValueError('position must be "first" or "last", not %s' % position) # depends on [control=['if'], data=['position']]
# bounds
bounds = list() # upper & lower bnds (inclusive) of possible cb locs
for (ri, criteria) in enumerate((before, after)):
if criteria is None or criteria == []:
bounds.append(len(callback_refs) if ri == 0 else 0) # depends on [control=['if'], data=[]]
else:
if not isinstance(criteria, list):
criteria = [criteria] # depends on [control=['if'], data=[]]
for c in criteria:
count = sum([c == cn or c == cc for (cn, cc) in zip(callback_refs, callbacks)])
if count != 1:
raise ValueError('criteria "%s" is in the current callback list %s times:\n%s\n%s' % (criteria, count, callback_refs, callbacks)) # depends on [control=['if'], data=['count']] # depends on [control=['for'], data=['c']]
matches = [ci for (ci, (cn, cc)) in enumerate(zip(callback_refs, callbacks)) if cc in criteria or cn in criteria]
bounds.append(matches[0] if ri == 0 else matches[-1] + 1) # depends on [control=['for'], data=[]]
if bounds[0] < bounds[1]: # i.e., "place before" < "place after"
raise RuntimeError('cannot place callback before "%s" and after "%s" for callbacks: %s' % (before, after, callback_refs)) # depends on [control=['if'], data=[]]
idx = bounds[1] if position == 'first' else bounds[0] # 'last'
# actually add the callback
self._callbacks.insert(idx, callback)
self._callback_refs.insert(idx, ref)
return callback |
def url(value):
"""Validate a URL.
:param string value: The URL to validate
:returns: The URL if valid.
:raises: ValueError
"""
if not url_regex.search(value):
message = u"{0} is not a valid URL".format(value)
if url_regex.search('http://' + value):
message += u". Did you mean: http://{0}".format(value)
raise ValueError(message)
return value | def function[url, parameter[value]]:
constant[Validate a URL.
:param string value: The URL to validate
:returns: The URL if valid.
:raises: ValueError
]
if <ast.UnaryOp object at 0x7da20c76f100> begin[:]
variable[message] assign[=] call[constant[{0} is not a valid URL].format, parameter[name[value]]]
if call[name[url_regex].search, parameter[binary_operation[constant[http://] + name[value]]]] begin[:]
<ast.AugAssign object at 0x7da20c76c7c0>
<ast.Raise object at 0x7da20c76dae0>
return[name[value]] | keyword[def] identifier[url] ( identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[url_regex] . identifier[search] ( identifier[value] ):
identifier[message] = literal[string] . identifier[format] ( identifier[value] )
keyword[if] identifier[url_regex] . identifier[search] ( literal[string] + identifier[value] ):
identifier[message] += literal[string] . identifier[format] ( identifier[value] )
keyword[raise] identifier[ValueError] ( identifier[message] )
keyword[return] identifier[value] | def url(value):
"""Validate a URL.
:param string value: The URL to validate
:returns: The URL if valid.
:raises: ValueError
"""
if not url_regex.search(value):
message = u'{0} is not a valid URL'.format(value)
if url_regex.search('http://' + value):
message += u'. Did you mean: http://{0}'.format(value) # depends on [control=['if'], data=[]]
raise ValueError(message) # depends on [control=['if'], data=[]]
return value |
def fetch_public_key(repo):
"""Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
"""
keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo)
data = json.loads(urlopen(keyurl).read())
if 'key' not in data:
errmsg = "Could not find public key for repo: {}.\n".format(repo)
errmsg += "Have you already added your GitHub repo to Travis?"
raise ValueError(errmsg)
return data['key'] | def function[fetch_public_key, parameter[repo]]:
constant[Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
]
variable[keyurl] assign[=] call[constant[https://api.travis-ci.org/repos/{0}/key].format, parameter[name[repo]]]
variable[data] assign[=] call[name[json].loads, parameter[call[call[name[urlopen], parameter[name[keyurl]]].read, parameter[]]]]
if compare[constant[key] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
variable[errmsg] assign[=] call[constant[Could not find public key for repo: {}.
].format, parameter[name[repo]]]
<ast.AugAssign object at 0x7da1b07bde10>
<ast.Raise object at 0x7da1b07bdc30>
return[call[name[data]][constant[key]]] | keyword[def] identifier[fetch_public_key] ( identifier[repo] ):
literal[string]
identifier[keyurl] = literal[string] . identifier[format] ( identifier[repo] )
identifier[data] = identifier[json] . identifier[loads] ( identifier[urlopen] ( identifier[keyurl] ). identifier[read] ())
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] :
identifier[errmsg] = literal[string] . identifier[format] ( identifier[repo] )
identifier[errmsg] += literal[string]
keyword[raise] identifier[ValueError] ( identifier[errmsg] )
keyword[return] identifier[data] [ literal[string] ] | def fetch_public_key(repo):
"""Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
"""
keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo)
data = json.loads(urlopen(keyurl).read())
if 'key' not in data:
errmsg = 'Could not find public key for repo: {}.\n'.format(repo)
errmsg += 'Have you already added your GitHub repo to Travis?'
raise ValueError(errmsg) # depends on [control=['if'], data=[]]
return data['key'] |
def get_semantic_data(self, path_as_list):
""" Retrieves an entry of the semantic data.
:param list path_as_list: The path in the vividict to retrieve the value from
:return:
"""
target_dict = self.semantic_data
for path_element in path_as_list:
if path_element in target_dict:
target_dict = target_dict[path_element]
else:
raise KeyError("The state with name {1} and id {2} holds no semantic data with path {0}."
"".format(path_as_list[:path_as_list.index(path_element) + 1], self.name, self.state_id))
return target_dict | def function[get_semantic_data, parameter[self, path_as_list]]:
constant[ Retrieves an entry of the semantic data.
:param list path_as_list: The path in the vividict to retrieve the value from
:return:
]
variable[target_dict] assign[=] name[self].semantic_data
for taget[name[path_element]] in starred[name[path_as_list]] begin[:]
if compare[name[path_element] in name[target_dict]] begin[:]
variable[target_dict] assign[=] call[name[target_dict]][name[path_element]]
return[name[target_dict]] | keyword[def] identifier[get_semantic_data] ( identifier[self] , identifier[path_as_list] ):
literal[string]
identifier[target_dict] = identifier[self] . identifier[semantic_data]
keyword[for] identifier[path_element] keyword[in] identifier[path_as_list] :
keyword[if] identifier[path_element] keyword[in] identifier[target_dict] :
identifier[target_dict] = identifier[target_dict] [ identifier[path_element] ]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string]
literal[string] . identifier[format] ( identifier[path_as_list] [: identifier[path_as_list] . identifier[index] ( identifier[path_element] )+ literal[int] ], identifier[self] . identifier[name] , identifier[self] . identifier[state_id] ))
keyword[return] identifier[target_dict] | def get_semantic_data(self, path_as_list):
""" Retrieves an entry of the semantic data.
:param list path_as_list: The path in the vividict to retrieve the value from
:return:
"""
target_dict = self.semantic_data
for path_element in path_as_list:
if path_element in target_dict:
target_dict = target_dict[path_element] # depends on [control=['if'], data=['path_element', 'target_dict']]
else:
raise KeyError('The state with name {1} and id {2} holds no semantic data with path {0}.'.format(path_as_list[:path_as_list.index(path_element) + 1], self.name, self.state_id)) # depends on [control=['for'], data=['path_element']]
return target_dict |
def start(self):
"""Start the scheduler threads."""
# TODO(BMo) having this check is probably a good idea but I've \
# disabled it for now while the PBC is in flux.
# assert sip_pbc.release.__version__ == '1.2.3'
scheduler_threads = [
Thread(target=self._monitor_events, daemon=True),
Thread(target=self._processing_controller_status, daemon=True),
Thread(target=self._schedule_processing_blocks, daemon=True),
Thread(target=self._monitor_pbc_status, daemon=True)
]
for thread in scheduler_threads:
thread.start()
try:
for thread in scheduler_threads:
thread.join()
except KeyboardInterrupt:
LOG.info('Keyboard interrupt!')
sys.exit(0)
finally:
LOG.info('Finally!') | def function[start, parameter[self]]:
constant[Start the scheduler threads.]
variable[scheduler_threads] assign[=] list[[<ast.Call object at 0x7da1b0536f20>, <ast.Call object at 0x7da1b05342b0>, <ast.Call object at 0x7da20c9909d0>, <ast.Call object at 0x7da20c9914e0>]]
for taget[name[thread]] in starred[name[scheduler_threads]] begin[:]
call[name[thread].start, parameter[]]
<ast.Try object at 0x7da18bc73d90> | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[scheduler_threads] =[
identifier[Thread] ( identifier[target] = identifier[self] . identifier[_monitor_events] , identifier[daemon] = keyword[True] ),
identifier[Thread] ( identifier[target] = identifier[self] . identifier[_processing_controller_status] , identifier[daemon] = keyword[True] ),
identifier[Thread] ( identifier[target] = identifier[self] . identifier[_schedule_processing_blocks] , identifier[daemon] = keyword[True] ),
identifier[Thread] ( identifier[target] = identifier[self] . identifier[_monitor_pbc_status] , identifier[daemon] = keyword[True] )
]
keyword[for] identifier[thread] keyword[in] identifier[scheduler_threads] :
identifier[thread] . identifier[start] ()
keyword[try] :
keyword[for] identifier[thread] keyword[in] identifier[scheduler_threads] :
identifier[thread] . identifier[join] ()
keyword[except] identifier[KeyboardInterrupt] :
identifier[LOG] . identifier[info] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[finally] :
identifier[LOG] . identifier[info] ( literal[string] ) | def start(self):
"""Start the scheduler threads."""
# TODO(BMo) having this check is probably a good idea but I've \
# disabled it for now while the PBC is in flux.
# assert sip_pbc.release.__version__ == '1.2.3'
scheduler_threads = [Thread(target=self._monitor_events, daemon=True), Thread(target=self._processing_controller_status, daemon=True), Thread(target=self._schedule_processing_blocks, daemon=True), Thread(target=self._monitor_pbc_status, daemon=True)]
for thread in scheduler_threads:
thread.start() # depends on [control=['for'], data=['thread']]
try:
for thread in scheduler_threads:
thread.join() # depends on [control=['for'], data=['thread']] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
LOG.info('Keyboard interrupt!')
sys.exit(0) # depends on [control=['except'], data=[]]
finally:
LOG.info('Finally!') |
def local(self, fun, tgt, **kwargs):
'''
Wrap LocalClient for running :ref:`execution modules <all-salt.modules>`
'''
self.client_cache['local'].cmd_async(tgt, fun, **kwargs) | def function[local, parameter[self, fun, tgt]]:
constant[
Wrap LocalClient for running :ref:`execution modules <all-salt.modules>`
]
call[call[name[self].client_cache][constant[local]].cmd_async, parameter[name[tgt], name[fun]]] | keyword[def] identifier[local] ( identifier[self] , identifier[fun] , identifier[tgt] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[client_cache] [ literal[string] ]. identifier[cmd_async] ( identifier[tgt] , identifier[fun] ,** identifier[kwargs] ) | def local(self, fun, tgt, **kwargs):
"""
Wrap LocalClient for running :ref:`execution modules <all-salt.modules>`
"""
self.client_cache['local'].cmd_async(tgt, fun, **kwargs) |
def import_i2c_addr(bus, opt="sensors"):
""" import_i2c_addresses will return a list of the
currently connected I2C devices.
This can be used a means to automatically detect
the number of connected sensor modules.
Modules are between int(112) and int(119)
By default, the method will return a list
of sensor addresses.
"""
i2c_list = []
for device in range(128):
try:
bus.read_byte(device)
i2c_list.append((device))
except IOError:
pass
if opt == "sensors":
sensor_list = []
for module in range(112,120):
try:
indx = i2c_list.index(module)
sensor_list.append(module)
except ValueError:
pass
return sensor_list
else:
return i2c_list | def function[import_i2c_addr, parameter[bus, opt]]:
constant[ import_i2c_addresses will return a list of the
currently connected I2C devices.
This can be used a means to automatically detect
the number of connected sensor modules.
Modules are between int(112) and int(119)
By default, the method will return a list
of sensor addresses.
]
variable[i2c_list] assign[=] list[[]]
for taget[name[device]] in starred[call[name[range], parameter[constant[128]]]] begin[:]
<ast.Try object at 0x7da18f09d600>
if compare[name[opt] equal[==] constant[sensors]] begin[:]
variable[sensor_list] assign[=] list[[]]
for taget[name[module]] in starred[call[name[range], parameter[constant[112], constant[120]]]] begin[:]
<ast.Try object at 0x7da18f09fa90>
return[name[sensor_list]] | keyword[def] identifier[import_i2c_addr] ( identifier[bus] , identifier[opt] = literal[string] ):
literal[string]
identifier[i2c_list] =[]
keyword[for] identifier[device] keyword[in] identifier[range] ( literal[int] ):
keyword[try] :
identifier[bus] . identifier[read_byte] ( identifier[device] )
identifier[i2c_list] . identifier[append] (( identifier[device] ))
keyword[except] identifier[IOError] :
keyword[pass]
keyword[if] identifier[opt] == literal[string] :
identifier[sensor_list] =[]
keyword[for] identifier[module] keyword[in] identifier[range] ( literal[int] , literal[int] ):
keyword[try] :
identifier[indx] = identifier[i2c_list] . identifier[index] ( identifier[module] )
identifier[sensor_list] . identifier[append] ( identifier[module] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] identifier[sensor_list]
keyword[else] :
keyword[return] identifier[i2c_list] | def import_i2c_addr(bus, opt='sensors'):
""" import_i2c_addresses will return a list of the
currently connected I2C devices.
This can be used a means to automatically detect
the number of connected sensor modules.
Modules are between int(112) and int(119)
By default, the method will return a list
of sensor addresses.
"""
i2c_list = []
for device in range(128):
try:
bus.read_byte(device)
i2c_list.append(device) # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['device']]
if opt == 'sensors':
sensor_list = []
for module in range(112, 120):
try:
indx = i2c_list.index(module)
sensor_list.append(module) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['module']]
return sensor_list # depends on [control=['if'], data=[]]
else:
return i2c_list |
def create_remote_subnet(self, account_id, identifier, cidr):
"""Creates a remote subnet on the given account.
:param string account_id: The account identifier.
:param string identifier: The network identifier of the remote subnet.
:param string cidr: The CIDR value of the remote subnet.
:return dict: Mapping of properties for the new remote subnet.
"""
return self.remote_subnet.createObject({
'accountId': account_id,
'cidr': cidr,
'networkIdentifier': identifier
}) | def function[create_remote_subnet, parameter[self, account_id, identifier, cidr]]:
constant[Creates a remote subnet on the given account.
:param string account_id: The account identifier.
:param string identifier: The network identifier of the remote subnet.
:param string cidr: The CIDR value of the remote subnet.
:return dict: Mapping of properties for the new remote subnet.
]
return[call[name[self].remote_subnet.createObject, parameter[dictionary[[<ast.Constant object at 0x7da18fe90a90>, <ast.Constant object at 0x7da18fe92620>, <ast.Constant object at 0x7da18fe92ad0>], [<ast.Name object at 0x7da18fe91390>, <ast.Name object at 0x7da20c7962c0>, <ast.Name object at 0x7da20c794eb0>]]]]] | keyword[def] identifier[create_remote_subnet] ( identifier[self] , identifier[account_id] , identifier[identifier] , identifier[cidr] ):
literal[string]
keyword[return] identifier[self] . identifier[remote_subnet] . identifier[createObject] ({
literal[string] : identifier[account_id] ,
literal[string] : identifier[cidr] ,
literal[string] : identifier[identifier]
}) | def create_remote_subnet(self, account_id, identifier, cidr):
"""Creates a remote subnet on the given account.
:param string account_id: The account identifier.
:param string identifier: The network identifier of the remote subnet.
:param string cidr: The CIDR value of the remote subnet.
:return dict: Mapping of properties for the new remote subnet.
"""
return self.remote_subnet.createObject({'accountId': account_id, 'cidr': cidr, 'networkIdentifier': identifier}) |
def run(self, target, args=()):
""" Run a function in a separate thread.
:param target: the function to run.
:param args: the parameters to pass to the function.
"""
run_event = threading.Event()
run_event.set()
thread = threading.Thread(target=target, args=args + (run_event, ))
self.thread_pool.append(thread)
self.run_events.append(run_event)
thread.start() | def function[run, parameter[self, target, args]]:
constant[ Run a function in a separate thread.
:param target: the function to run.
:param args: the parameters to pass to the function.
]
variable[run_event] assign[=] call[name[threading].Event, parameter[]]
call[name[run_event].set, parameter[]]
variable[thread] assign[=] call[name[threading].Thread, parameter[]]
call[name[self].thread_pool.append, parameter[name[thread]]]
call[name[self].run_events.append, parameter[name[run_event]]]
call[name[thread].start, parameter[]] | keyword[def] identifier[run] ( identifier[self] , identifier[target] , identifier[args] =()):
literal[string]
identifier[run_event] = identifier[threading] . identifier[Event] ()
identifier[run_event] . identifier[set] ()
identifier[thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[target] , identifier[args] = identifier[args] +( identifier[run_event] ,))
identifier[self] . identifier[thread_pool] . identifier[append] ( identifier[thread] )
identifier[self] . identifier[run_events] . identifier[append] ( identifier[run_event] )
identifier[thread] . identifier[start] () | def run(self, target, args=()):
""" Run a function in a separate thread.
:param target: the function to run.
:param args: the parameters to pass to the function.
"""
run_event = threading.Event()
run_event.set()
thread = threading.Thread(target=target, args=args + (run_event,))
self.thread_pool.append(thread)
self.run_events.append(run_event)
thread.start() |
def describe_target_health(target_group_arn, targets=None, client=None):
"""
Permission: elasticloadbalancing:DescribeTargetHealth
"""
kwargs = dict(TargetGroupArn=target_group_arn)
if targets:
kwargs.update(Targets=targets)
return client.describe_target_health(**kwargs)['TargetHealthDescriptions'] | def function[describe_target_health, parameter[target_group_arn, targets, client]]:
constant[
Permission: elasticloadbalancing:DescribeTargetHealth
]
variable[kwargs] assign[=] call[name[dict], parameter[]]
if name[targets] begin[:]
call[name[kwargs].update, parameter[]]
return[call[call[name[client].describe_target_health, parameter[]]][constant[TargetHealthDescriptions]]] | keyword[def] identifier[describe_target_health] ( identifier[target_group_arn] , identifier[targets] = keyword[None] , identifier[client] = keyword[None] ):
literal[string]
identifier[kwargs] = identifier[dict] ( identifier[TargetGroupArn] = identifier[target_group_arn] )
keyword[if] identifier[targets] :
identifier[kwargs] . identifier[update] ( identifier[Targets] = identifier[targets] )
keyword[return] identifier[client] . identifier[describe_target_health] (** identifier[kwargs] )[ literal[string] ] | def describe_target_health(target_group_arn, targets=None, client=None):
"""
Permission: elasticloadbalancing:DescribeTargetHealth
"""
kwargs = dict(TargetGroupArn=target_group_arn)
if targets:
kwargs.update(Targets=targets) # depends on [control=['if'], data=[]]
return client.describe_target_health(**kwargs)['TargetHealthDescriptions'] |
def count_ops(self):
"""Count the occurrences of operation names.
Returns a dictionary of counts keyed on the operation name.
"""
op_dict = {}
for node in self.topological_op_nodes():
name = node.name
if name not in op_dict:
op_dict[name] = 1
else:
op_dict[name] += 1
return op_dict | def function[count_ops, parameter[self]]:
constant[Count the occurrences of operation names.
Returns a dictionary of counts keyed on the operation name.
]
variable[op_dict] assign[=] dictionary[[], []]
for taget[name[node]] in starred[call[name[self].topological_op_nodes, parameter[]]] begin[:]
variable[name] assign[=] name[node].name
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[op_dict]] begin[:]
call[name[op_dict]][name[name]] assign[=] constant[1]
return[name[op_dict]] | keyword[def] identifier[count_ops] ( identifier[self] ):
literal[string]
identifier[op_dict] ={}
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[topological_op_nodes] ():
identifier[name] = identifier[node] . identifier[name]
keyword[if] identifier[name] keyword[not] keyword[in] identifier[op_dict] :
identifier[op_dict] [ identifier[name] ]= literal[int]
keyword[else] :
identifier[op_dict] [ identifier[name] ]+= literal[int]
keyword[return] identifier[op_dict] | def count_ops(self):
"""Count the occurrences of operation names.
Returns a dictionary of counts keyed on the operation name.
"""
op_dict = {}
for node in self.topological_op_nodes():
name = node.name
if name not in op_dict:
op_dict[name] = 1 # depends on [control=['if'], data=['name', 'op_dict']]
else:
op_dict[name] += 1 # depends on [control=['for'], data=['node']]
return op_dict |
def style_layout(style_less,
theme='grade3',
cursorwidth=2,
cursorcolor='default',
cellwidth='980',
lineheight=170,
margins='auto',
vimext=False,
toolbar=False,
nbname=False,
kernellogo=False,
altprompt=False,
altmd=False,
altout=False,
hideprompt=False):
"""Set general layout and style properties of text and code cells"""
# write theme name to ~/.jupyter/custom/ (referenced by jtplot.py)
with fileOpen(theme_name_file, 'w') as f:
f.write(theme)
if (os.path.isdir(styles_dir_user) and
'{}.less'.format(theme) in os.listdir(styles_dir_user)):
theme_relpath = os.path.relpath(
os.path.join(styles_dir_user, theme), package_dir)
else:
theme_relpath = os.path.relpath(
os.path.join(styles_dir, theme), package_dir)
style_less += '@import "{}";\n'.format(theme_relpath)
textcell_bg = '@cc-input-bg'
promptText = '@input-prompt'
promptBG = '@cc-input-bg'
promptPadding = '.25em'
promptBorder = '2px solid @prompt-line'
tcPromptBorder = '2px solid @tc-prompt-std'
promptMinWidth = 11.5
outpromptMinWidth = promptMinWidth # remove + 3 since it will overlay output print() text
tcPromptWidth = promptMinWidth + 3
tcPromptFontsize = "@prompt-fontsize"
ccOutputBG = '@cc-output-bg-default'
if theme == 'grade3':
textcell_bg = '@notebook-bg'
if altprompt:
promptPadding = '.1em'
promptMinWidth = 8
outpromptMinWidth = promptMinWidth + 3
tcPromptWidth = promptMinWidth + 3
promptText = 'transparent'
tcPromptBorder = '2px solid transparent'
if altmd:
textcell_bg = '@notebook-bg'
tcPromptBorder = '2px dotted @tc-border-selected'
if altout:
ccOutputBG = '@notebook-bg'
if margins != 'auto':
margins = '{}px'.format(margins)
if '%' not in cellwidth:
cellwidth = str(cellwidth) + 'px'
style_less += '@container-margins: {};\n'.format(margins)
style_less += '@cell-width: {}; \n'.format(cellwidth)
style_less += '@cc-line-height: {}%; \n'.format(lineheight)
style_less += '@text-cell-bg: {}; \n'.format(textcell_bg)
style_less += '@cc-prompt-width: {}ex; \n'.format(promptMinWidth)
style_less += '@cc-prompt-bg: {}; \n'.format(promptBG)
style_less += '@cc-output-bg: {}; \n'.format(ccOutputBG)
style_less += '@prompt-text: {}; \n'.format(promptText)
style_less += '@prompt-padding: {}; \n'.format(promptPadding)
style_less += '@prompt-border: {}; \n'.format(promptBorder)
style_less += '@prompt-min-width: {}ex; \n'.format(promptMinWidth)
style_less += '@out-prompt-min-width: {}ex; \n'.format(outpromptMinWidth)
style_less += '@tc-prompt-width: {}ex; \n'.format(tcPromptWidth)
style_less += '@tc-prompt-border: {}; \n'.format(tcPromptBorder)
style_less += '@cursor-width: {}px; \n'.format(cursorwidth)
style_less += '@cursor-info: @cursor-width solid {}; \n'.format(
cursorcolor)
style_less += '@tc-prompt-fontsize: {}; \n'.format(tcPromptFontsize)
style_less += '\n\n'
# read-in notebook.less (general nb style)
with fileOpen(nb_style, 'r') as notebook:
style_less += notebook.read() + '\n'
# read-in cells.less (cell layout)
with fileOpen(cl_style, 'r') as cells:
style_less += cells.read() + '\n'
# read-in extras.less (misc layout)
with fileOpen(ex_style, 'r') as extras:
style_less += extras.read() + '\n'
# read-in codemirror.less (syntax-highlighting)
with fileOpen(cm_style, 'r') as codemirror:
style_less += codemirror.read() + '\n'
with fileOpen(comp_style, 'r') as codemirror:
style_less += codemirror.read() + '\n'
style_less += toggle_settings(
toolbar, nbname, hideprompt, kernellogo) + '\n'
if vimext:
set_vim_style(theme)
return style_less | def function[style_layout, parameter[style_less, theme, cursorwidth, cursorcolor, cellwidth, lineheight, margins, vimext, toolbar, nbname, kernellogo, altprompt, altmd, altout, hideprompt]]:
constant[Set general layout and style properties of text and code cells]
with call[name[fileOpen], parameter[name[theme_name_file], constant[w]]] begin[:]
call[name[f].write, parameter[name[theme]]]
if <ast.BoolOp object at 0x7da18dc9beb0> begin[:]
variable[theme_relpath] assign[=] call[name[os].path.relpath, parameter[call[name[os].path.join, parameter[name[styles_dir_user], name[theme]]], name[package_dir]]]
<ast.AugAssign object at 0x7da18dc9ace0>
variable[textcell_bg] assign[=] constant[@cc-input-bg]
variable[promptText] assign[=] constant[@input-prompt]
variable[promptBG] assign[=] constant[@cc-input-bg]
variable[promptPadding] assign[=] constant[.25em]
variable[promptBorder] assign[=] constant[2px solid @prompt-line]
variable[tcPromptBorder] assign[=] constant[2px solid @tc-prompt-std]
variable[promptMinWidth] assign[=] constant[11.5]
variable[outpromptMinWidth] assign[=] name[promptMinWidth]
variable[tcPromptWidth] assign[=] binary_operation[name[promptMinWidth] + constant[3]]
variable[tcPromptFontsize] assign[=] constant[@prompt-fontsize]
variable[ccOutputBG] assign[=] constant[@cc-output-bg-default]
if compare[name[theme] equal[==] constant[grade3]] begin[:]
variable[textcell_bg] assign[=] constant[@notebook-bg]
if name[altprompt] begin[:]
variable[promptPadding] assign[=] constant[.1em]
variable[promptMinWidth] assign[=] constant[8]
variable[outpromptMinWidth] assign[=] binary_operation[name[promptMinWidth] + constant[3]]
variable[tcPromptWidth] assign[=] binary_operation[name[promptMinWidth] + constant[3]]
variable[promptText] assign[=] constant[transparent]
variable[tcPromptBorder] assign[=] constant[2px solid transparent]
if name[altmd] begin[:]
variable[textcell_bg] assign[=] constant[@notebook-bg]
variable[tcPromptBorder] assign[=] constant[2px dotted @tc-border-selected]
if name[altout] begin[:]
variable[ccOutputBG] assign[=] constant[@notebook-bg]
if compare[name[margins] not_equal[!=] constant[auto]] begin[:]
variable[margins] assign[=] call[constant[{}px].format, parameter[name[margins]]]
if compare[constant[%] <ast.NotIn object at 0x7da2590d7190> name[cellwidth]] begin[:]
variable[cellwidth] assign[=] binary_operation[call[name[str], parameter[name[cellwidth]]] + constant[px]]
<ast.AugAssign object at 0x7da18dc9ba90>
<ast.AugAssign object at 0x7da18dc99a20>
<ast.AugAssign object at 0x7da18dc98250>
<ast.AugAssign object at 0x7da18dc9a7d0>
<ast.AugAssign object at 0x7da18dc98bb0>
<ast.AugAssign object at 0x7da18dc9b5e0>
<ast.AugAssign object at 0x7da18dc98d60>
<ast.AugAssign object at 0x7da18dc99b70>
<ast.AugAssign object at 0x7da18dc9b970>
<ast.AugAssign object at 0x7da18dc99090>
<ast.AugAssign object at 0x7da18dc9a380>
<ast.AugAssign object at 0x7da18dc98ca0>
<ast.AugAssign object at 0x7da207f00820>
<ast.AugAssign object at 0x7da207f03be0>
<ast.AugAssign object at 0x7da207f02710>
<ast.AugAssign object at 0x7da207f03df0>
<ast.AugAssign object at 0x7da207f00250>
<ast.AugAssign object at 0x7da207f000d0>
with call[name[fileOpen], parameter[name[nb_style], constant[r]]] begin[:]
<ast.AugAssign object at 0x7da207f00ca0>
with call[name[fileOpen], parameter[name[cl_style], constant[r]]] begin[:]
<ast.AugAssign object at 0x7da207f03b50>
with call[name[fileOpen], parameter[name[ex_style], constant[r]]] begin[:]
<ast.AugAssign object at 0x7da207f00760>
with call[name[fileOpen], parameter[name[cm_style], constant[r]]] begin[:]
<ast.AugAssign object at 0x7da207f01000>
with call[name[fileOpen], parameter[name[comp_style], constant[r]]] begin[:]
<ast.AugAssign object at 0x7da207f032b0>
<ast.AugAssign object at 0x7da207f02ad0>
if name[vimext] begin[:]
call[name[set_vim_style], parameter[name[theme]]]
return[name[style_less]] | keyword[def] identifier[style_layout] ( identifier[style_less] ,
identifier[theme] = literal[string] ,
identifier[cursorwidth] = literal[int] ,
identifier[cursorcolor] = literal[string] ,
identifier[cellwidth] = literal[string] ,
identifier[lineheight] = literal[int] ,
identifier[margins] = literal[string] ,
identifier[vimext] = keyword[False] ,
identifier[toolbar] = keyword[False] ,
identifier[nbname] = keyword[False] ,
identifier[kernellogo] = keyword[False] ,
identifier[altprompt] = keyword[False] ,
identifier[altmd] = keyword[False] ,
identifier[altout] = keyword[False] ,
identifier[hideprompt] = keyword[False] ):
literal[string]
keyword[with] identifier[fileOpen] ( identifier[theme_name_file] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[theme] )
keyword[if] ( identifier[os] . identifier[path] . identifier[isdir] ( identifier[styles_dir_user] ) keyword[and]
literal[string] . identifier[format] ( identifier[theme] ) keyword[in] identifier[os] . identifier[listdir] ( identifier[styles_dir_user] )):
identifier[theme_relpath] = identifier[os] . identifier[path] . identifier[relpath] (
identifier[os] . identifier[path] . identifier[join] ( identifier[styles_dir_user] , identifier[theme] ), identifier[package_dir] )
keyword[else] :
identifier[theme_relpath] = identifier[os] . identifier[path] . identifier[relpath] (
identifier[os] . identifier[path] . identifier[join] ( identifier[styles_dir] , identifier[theme] ), identifier[package_dir] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[theme_relpath] )
identifier[textcell_bg] = literal[string]
identifier[promptText] = literal[string]
identifier[promptBG] = literal[string]
identifier[promptPadding] = literal[string]
identifier[promptBorder] = literal[string]
identifier[tcPromptBorder] = literal[string]
identifier[promptMinWidth] = literal[int]
identifier[outpromptMinWidth] = identifier[promptMinWidth]
identifier[tcPromptWidth] = identifier[promptMinWidth] + literal[int]
identifier[tcPromptFontsize] = literal[string]
identifier[ccOutputBG] = literal[string]
keyword[if] identifier[theme] == literal[string] :
identifier[textcell_bg] = literal[string]
keyword[if] identifier[altprompt] :
identifier[promptPadding] = literal[string]
identifier[promptMinWidth] = literal[int]
identifier[outpromptMinWidth] = identifier[promptMinWidth] + literal[int]
identifier[tcPromptWidth] = identifier[promptMinWidth] + literal[int]
identifier[promptText] = literal[string]
identifier[tcPromptBorder] = literal[string]
keyword[if] identifier[altmd] :
identifier[textcell_bg] = literal[string]
identifier[tcPromptBorder] = literal[string]
keyword[if] identifier[altout] :
identifier[ccOutputBG] = literal[string]
keyword[if] identifier[margins] != literal[string] :
identifier[margins] = literal[string] . identifier[format] ( identifier[margins] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[cellwidth] :
identifier[cellwidth] = identifier[str] ( identifier[cellwidth] )+ literal[string]
identifier[style_less] += literal[string] . identifier[format] ( identifier[margins] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[cellwidth] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[lineheight] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[textcell_bg] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[promptMinWidth] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[promptBG] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[ccOutputBG] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[promptText] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[promptPadding] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[promptBorder] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[promptMinWidth] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[outpromptMinWidth] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[tcPromptWidth] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[tcPromptBorder] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[cursorwidth] )
identifier[style_less] += literal[string] . identifier[format] (
identifier[cursorcolor] )
identifier[style_less] += literal[string] . identifier[format] ( identifier[tcPromptFontsize] )
identifier[style_less] += literal[string]
keyword[with] identifier[fileOpen] ( identifier[nb_style] , literal[string] ) keyword[as] identifier[notebook] :
identifier[style_less] += identifier[notebook] . identifier[read] ()+ literal[string]
keyword[with] identifier[fileOpen] ( identifier[cl_style] , literal[string] ) keyword[as] identifier[cells] :
identifier[style_less] += identifier[cells] . identifier[read] ()+ literal[string]
keyword[with] identifier[fileOpen] ( identifier[ex_style] , literal[string] ) keyword[as] identifier[extras] :
identifier[style_less] += identifier[extras] . identifier[read] ()+ literal[string]
keyword[with] identifier[fileOpen] ( identifier[cm_style] , literal[string] ) keyword[as] identifier[codemirror] :
identifier[style_less] += identifier[codemirror] . identifier[read] ()+ literal[string]
keyword[with] identifier[fileOpen] ( identifier[comp_style] , literal[string] ) keyword[as] identifier[codemirror] :
identifier[style_less] += identifier[codemirror] . identifier[read] ()+ literal[string]
identifier[style_less] += identifier[toggle_settings] (
identifier[toolbar] , identifier[nbname] , identifier[hideprompt] , identifier[kernellogo] )+ literal[string]
keyword[if] identifier[vimext] :
identifier[set_vim_style] ( identifier[theme] )
keyword[return] identifier[style_less] | def style_layout(style_less, theme='grade3', cursorwidth=2, cursorcolor='default', cellwidth='980', lineheight=170, margins='auto', vimext=False, toolbar=False, nbname=False, kernellogo=False, altprompt=False, altmd=False, altout=False, hideprompt=False):
"""Set general layout and style properties of text and code cells"""
# write theme name to ~/.jupyter/custom/ (referenced by jtplot.py)
with fileOpen(theme_name_file, 'w') as f:
f.write(theme) # depends on [control=['with'], data=['f']]
if os.path.isdir(styles_dir_user) and '{}.less'.format(theme) in os.listdir(styles_dir_user):
theme_relpath = os.path.relpath(os.path.join(styles_dir_user, theme), package_dir) # depends on [control=['if'], data=[]]
else:
theme_relpath = os.path.relpath(os.path.join(styles_dir, theme), package_dir)
style_less += '@import "{}";\n'.format(theme_relpath)
textcell_bg = '@cc-input-bg'
promptText = '@input-prompt'
promptBG = '@cc-input-bg'
promptPadding = '.25em'
promptBorder = '2px solid @prompt-line'
tcPromptBorder = '2px solid @tc-prompt-std'
promptMinWidth = 11.5
outpromptMinWidth = promptMinWidth # remove + 3 since it will overlay output print() text
tcPromptWidth = promptMinWidth + 3
tcPromptFontsize = '@prompt-fontsize'
ccOutputBG = '@cc-output-bg-default'
if theme == 'grade3':
textcell_bg = '@notebook-bg' # depends on [control=['if'], data=[]]
if altprompt:
promptPadding = '.1em'
promptMinWidth = 8
outpromptMinWidth = promptMinWidth + 3
tcPromptWidth = promptMinWidth + 3
promptText = 'transparent'
tcPromptBorder = '2px solid transparent' # depends on [control=['if'], data=[]]
if altmd:
textcell_bg = '@notebook-bg'
tcPromptBorder = '2px dotted @tc-border-selected' # depends on [control=['if'], data=[]]
if altout:
ccOutputBG = '@notebook-bg' # depends on [control=['if'], data=[]]
if margins != 'auto':
margins = '{}px'.format(margins) # depends on [control=['if'], data=['margins']]
if '%' not in cellwidth:
cellwidth = str(cellwidth) + 'px' # depends on [control=['if'], data=['cellwidth']]
style_less += '@container-margins: {};\n'.format(margins)
style_less += '@cell-width: {}; \n'.format(cellwidth)
style_less += '@cc-line-height: {}%; \n'.format(lineheight)
style_less += '@text-cell-bg: {}; \n'.format(textcell_bg)
style_less += '@cc-prompt-width: {}ex; \n'.format(promptMinWidth)
style_less += '@cc-prompt-bg: {}; \n'.format(promptBG)
style_less += '@cc-output-bg: {}; \n'.format(ccOutputBG)
style_less += '@prompt-text: {}; \n'.format(promptText)
style_less += '@prompt-padding: {}; \n'.format(promptPadding)
style_less += '@prompt-border: {}; \n'.format(promptBorder)
style_less += '@prompt-min-width: {}ex; \n'.format(promptMinWidth)
style_less += '@out-prompt-min-width: {}ex; \n'.format(outpromptMinWidth)
style_less += '@tc-prompt-width: {}ex; \n'.format(tcPromptWidth)
style_less += '@tc-prompt-border: {}; \n'.format(tcPromptBorder)
style_less += '@cursor-width: {}px; \n'.format(cursorwidth)
style_less += '@cursor-info: @cursor-width solid {}; \n'.format(cursorcolor)
style_less += '@tc-prompt-fontsize: {}; \n'.format(tcPromptFontsize)
style_less += '\n\n'
# read-in notebook.less (general nb style)
with fileOpen(nb_style, 'r') as notebook:
style_less += notebook.read() + '\n' # depends on [control=['with'], data=['notebook']]
# read-in cells.less (cell layout)
with fileOpen(cl_style, 'r') as cells:
style_less += cells.read() + '\n' # depends on [control=['with'], data=['cells']]
# read-in extras.less (misc layout)
with fileOpen(ex_style, 'r') as extras:
style_less += extras.read() + '\n' # depends on [control=['with'], data=['extras']]
# read-in codemirror.less (syntax-highlighting)
with fileOpen(cm_style, 'r') as codemirror:
style_less += codemirror.read() + '\n' # depends on [control=['with'], data=['codemirror']]
with fileOpen(comp_style, 'r') as codemirror:
style_less += codemirror.read() + '\n' # depends on [control=['with'], data=['codemirror']]
style_less += toggle_settings(toolbar, nbname, hideprompt, kernellogo) + '\n'
if vimext:
set_vim_style(theme) # depends on [control=['if'], data=[]]
return style_less |
def resolve(self, pointer):
"""Resolve from documents.
:param pointer: foo
:type pointer: DocumentPointer
"""
dp = DocumentPointer(pointer)
obj, fetcher = self.prototype(dp)
for token in dp.pointer:
obj = token.extract(obj, bypass_ref=True)
reference = ref(obj)
if reference:
obj = fetcher.resolve(reference)
return obj | def function[resolve, parameter[self, pointer]]:
constant[Resolve from documents.
:param pointer: foo
:type pointer: DocumentPointer
]
variable[dp] assign[=] call[name[DocumentPointer], parameter[name[pointer]]]
<ast.Tuple object at 0x7da2054a4490> assign[=] call[name[self].prototype, parameter[name[dp]]]
for taget[name[token]] in starred[name[dp].pointer] begin[:]
variable[obj] assign[=] call[name[token].extract, parameter[name[obj]]]
variable[reference] assign[=] call[name[ref], parameter[name[obj]]]
if name[reference] begin[:]
variable[obj] assign[=] call[name[fetcher].resolve, parameter[name[reference]]]
return[name[obj]] | keyword[def] identifier[resolve] ( identifier[self] , identifier[pointer] ):
literal[string]
identifier[dp] = identifier[DocumentPointer] ( identifier[pointer] )
identifier[obj] , identifier[fetcher] = identifier[self] . identifier[prototype] ( identifier[dp] )
keyword[for] identifier[token] keyword[in] identifier[dp] . identifier[pointer] :
identifier[obj] = identifier[token] . identifier[extract] ( identifier[obj] , identifier[bypass_ref] = keyword[True] )
identifier[reference] = identifier[ref] ( identifier[obj] )
keyword[if] identifier[reference] :
identifier[obj] = identifier[fetcher] . identifier[resolve] ( identifier[reference] )
keyword[return] identifier[obj] | def resolve(self, pointer):
"""Resolve from documents.
:param pointer: foo
:type pointer: DocumentPointer
"""
dp = DocumentPointer(pointer)
(obj, fetcher) = self.prototype(dp)
for token in dp.pointer:
obj = token.extract(obj, bypass_ref=True)
reference = ref(obj)
if reference:
obj = fetcher.resolve(reference) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['token']]
return obj |
def hash_distance(left_hash, right_hash):
"""Compute the hamming distance between two hashes"""
if len(left_hash) != len(right_hash):
raise ValueError('Hamming distance requires two strings of equal length')
return sum(map(lambda x: 0 if x[0] == x[1] else 1, zip(left_hash, right_hash))) | def function[hash_distance, parameter[left_hash, right_hash]]:
constant[Compute the hamming distance between two hashes]
if compare[call[name[len], parameter[name[left_hash]]] not_equal[!=] call[name[len], parameter[name[right_hash]]]] begin[:]
<ast.Raise object at 0x7da1b03fab00>
return[call[name[sum], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b03fa560>, call[name[zip], parameter[name[left_hash], name[right_hash]]]]]]]] | keyword[def] identifier[hash_distance] ( identifier[left_hash] , identifier[right_hash] ):
literal[string]
keyword[if] identifier[len] ( identifier[left_hash] )!= identifier[len] ( identifier[right_hash] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[sum] ( identifier[map] ( keyword[lambda] identifier[x] : literal[int] keyword[if] identifier[x] [ literal[int] ]== identifier[x] [ literal[int] ] keyword[else] literal[int] , identifier[zip] ( identifier[left_hash] , identifier[right_hash] ))) | def hash_distance(left_hash, right_hash):
"""Compute the hamming distance between two hashes"""
if len(left_hash) != len(right_hash):
raise ValueError('Hamming distance requires two strings of equal length') # depends on [control=['if'], data=[]]
return sum(map(lambda x: 0 if x[0] == x[1] else 1, zip(left_hash, right_hash))) |
def matching_number_line_and_regex(source_lines, generated_regexes, max_line_count=15):
"""
The first line and its number (starting with 0) in the source code that
indicated that the source code is generated.
:param source_lines: lines of text to scan
:param generated_regexes: regular expressions a line must match to indicate
the source code is generated.
:param max_line_count: maximum number of lines to scan
:return: a tuple of the form ``(number, line, regex)`` or ``None`` if the
source lines do not match any ``generated_regexes``.
"""
initial_numbers_and_lines = enumerate(itertools.islice(source_lines, max_line_count))
matching_number_line_and_regexps = (
(number, line, matching_regex)
for number, line in initial_numbers_and_lines
for matching_regex in generated_regexes
if matching_regex.match(line)
)
possible_first_matching_number_line_and_regexp = list(
itertools.islice(matching_number_line_and_regexps, 1))
result = (possible_first_matching_number_line_and_regexp + [None])[0]
return result | def function[matching_number_line_and_regex, parameter[source_lines, generated_regexes, max_line_count]]:
constant[
The first line and its number (starting with 0) in the source code that
indicated that the source code is generated.
:param source_lines: lines of text to scan
:param generated_regexes: regular expressions a line must match to indicate
the source code is generated.
:param max_line_count: maximum number of lines to scan
:return: a tuple of the form ``(number, line, regex)`` or ``None`` if the
source lines do not match any ``generated_regexes``.
]
variable[initial_numbers_and_lines] assign[=] call[name[enumerate], parameter[call[name[itertools].islice, parameter[name[source_lines], name[max_line_count]]]]]
variable[matching_number_line_and_regexps] assign[=] <ast.GeneratorExp object at 0x7da1b26ae110>
variable[possible_first_matching_number_line_and_regexp] assign[=] call[name[list], parameter[call[name[itertools].islice, parameter[name[matching_number_line_and_regexps], constant[1]]]]]
variable[result] assign[=] call[binary_operation[name[possible_first_matching_number_line_and_regexp] + list[[<ast.Constant object at 0x7da2046230d0>]]]][constant[0]]
return[name[result]] | keyword[def] identifier[matching_number_line_and_regex] ( identifier[source_lines] , identifier[generated_regexes] , identifier[max_line_count] = literal[int] ):
literal[string]
identifier[initial_numbers_and_lines] = identifier[enumerate] ( identifier[itertools] . identifier[islice] ( identifier[source_lines] , identifier[max_line_count] ))
identifier[matching_number_line_and_regexps] =(
( identifier[number] , identifier[line] , identifier[matching_regex] )
keyword[for] identifier[number] , identifier[line] keyword[in] identifier[initial_numbers_and_lines]
keyword[for] identifier[matching_regex] keyword[in] identifier[generated_regexes]
keyword[if] identifier[matching_regex] . identifier[match] ( identifier[line] )
)
identifier[possible_first_matching_number_line_and_regexp] = identifier[list] (
identifier[itertools] . identifier[islice] ( identifier[matching_number_line_and_regexps] , literal[int] ))
identifier[result] =( identifier[possible_first_matching_number_line_and_regexp] +[ keyword[None] ])[ literal[int] ]
keyword[return] identifier[result] | def matching_number_line_and_regex(source_lines, generated_regexes, max_line_count=15):
"""
The first line and its number (starting with 0) in the source code that
indicated that the source code is generated.
:param source_lines: lines of text to scan
:param generated_regexes: regular expressions a line must match to indicate
the source code is generated.
:param max_line_count: maximum number of lines to scan
:return: a tuple of the form ``(number, line, regex)`` or ``None`` if the
source lines do not match any ``generated_regexes``.
"""
initial_numbers_and_lines = enumerate(itertools.islice(source_lines, max_line_count))
matching_number_line_and_regexps = ((number, line, matching_regex) for (number, line) in initial_numbers_and_lines for matching_regex in generated_regexes if matching_regex.match(line))
possible_first_matching_number_line_and_regexp = list(itertools.islice(matching_number_line_and_regexps, 1))
result = (possible_first_matching_number_line_and_regexp + [None])[0]
return result |
def shutdown(cls):
"""Close all connections on in all pools"""
for pid in list(cls._pools.keys()):
cls._pools[pid].shutdown()
LOGGER.info('Shutdown complete, all pooled connections closed') | def function[shutdown, parameter[cls]]:
constant[Close all connections on in all pools]
for taget[name[pid]] in starred[call[name[list], parameter[call[name[cls]._pools.keys, parameter[]]]]] begin[:]
call[call[name[cls]._pools][name[pid]].shutdown, parameter[]]
call[name[LOGGER].info, parameter[constant[Shutdown complete, all pooled connections closed]]] | keyword[def] identifier[shutdown] ( identifier[cls] ):
literal[string]
keyword[for] identifier[pid] keyword[in] identifier[list] ( identifier[cls] . identifier[_pools] . identifier[keys] ()):
identifier[cls] . identifier[_pools] [ identifier[pid] ]. identifier[shutdown] ()
identifier[LOGGER] . identifier[info] ( literal[string] ) | def shutdown(cls):
"""Close all connections on in all pools"""
for pid in list(cls._pools.keys()):
cls._pools[pid].shutdown() # depends on [control=['for'], data=['pid']]
LOGGER.info('Shutdown complete, all pooled connections closed') |
def get_data_files_dir():
"""
Find directory with data_files (sys.prefix or local PmagPy/data_files)
and return the path.
"""
if 'data_files' in os.listdir(sys.prefix):
return os.path.join(sys.prefix, 'data_files')
else:
return os.path.join(get_pmag_dir(), 'data_files') | def function[get_data_files_dir, parameter[]]:
constant[
Find directory with data_files (sys.prefix or local PmagPy/data_files)
and return the path.
]
if compare[constant[data_files] in call[name[os].listdir, parameter[name[sys].prefix]]] begin[:]
return[call[name[os].path.join, parameter[name[sys].prefix, constant[data_files]]]] | keyword[def] identifier[get_data_files_dir] ():
literal[string]
keyword[if] literal[string] keyword[in] identifier[os] . identifier[listdir] ( identifier[sys] . identifier[prefix] ):
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[sys] . identifier[prefix] , literal[string] )
keyword[else] :
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[get_pmag_dir] (), literal[string] ) | def get_data_files_dir():
"""
Find directory with data_files (sys.prefix or local PmagPy/data_files)
and return the path.
"""
if 'data_files' in os.listdir(sys.prefix):
return os.path.join(sys.prefix, 'data_files') # depends on [control=['if'], data=[]]
else:
return os.path.join(get_pmag_dir(), 'data_files') |
def write(self, output_filepath):
"""
serialize the ExmaraldaFile instance and write it to a file.
Parameters
----------
output_filepath : str
relative or absolute path to the Exmaralda file to be created
"""
with open(output_filepath, 'w') as out_file:
out_file.write(self.__str__()) | def function[write, parameter[self, output_filepath]]:
constant[
serialize the ExmaraldaFile instance and write it to a file.
Parameters
----------
output_filepath : str
relative or absolute path to the Exmaralda file to be created
]
with call[name[open], parameter[name[output_filepath], constant[w]]] begin[:]
call[name[out_file].write, parameter[call[name[self].__str__, parameter[]]]] | keyword[def] identifier[write] ( identifier[self] , identifier[output_filepath] ):
literal[string]
keyword[with] identifier[open] ( identifier[output_filepath] , literal[string] ) keyword[as] identifier[out_file] :
identifier[out_file] . identifier[write] ( identifier[self] . identifier[__str__] ()) | def write(self, output_filepath):
"""
serialize the ExmaraldaFile instance and write it to a file.
Parameters
----------
output_filepath : str
relative or absolute path to the Exmaralda file to be created
"""
with open(output_filepath, 'w') as out_file:
out_file.write(self.__str__()) # depends on [control=['with'], data=['out_file']] |
def double_typos(self):
"""letter combinations two typos away from word"""
return {e2 for e1 in self.typos()
for e2 in Word(e1).typos()} | def function[double_typos, parameter[self]]:
constant[letter combinations two typos away from word]
return[<ast.SetComp object at 0x7da20c992260>] | keyword[def] identifier[double_typos] ( identifier[self] ):
literal[string]
keyword[return] { identifier[e2] keyword[for] identifier[e1] keyword[in] identifier[self] . identifier[typos] ()
keyword[for] identifier[e2] keyword[in] identifier[Word] ( identifier[e1] ). identifier[typos] ()} | def double_typos(self):
"""letter combinations two typos away from word"""
return {e2 for e1 in self.typos() for e2 in Word(e1).typos()} |
def average_convergence_of_1_radius_in_units(self, unit_length='arcsec', kpc_per_arcsec=None):
"""The radius a critical curve forms for this mass profile, e.g. where the mean convergence is equal to 1.0.
In case of ellipitical mass profiles, the 'average' critical curve is used, whereby the convergence is \
rescaled into a circle using the axis ratio.
This radius corresponds to the Einstein radius of the mass profile, and is a property of a number of \
mass profiles below.
"""
def func(radius):
radius = dim.Length(radius, unit_length=unit_length)
return self.mass_within_circle_in_units(radius=radius) - np.pi * radius ** 2.0
radius = self.ellipticity_rescale * root_scalar(func, bracket=[1e-4, 1000.0]).root
radius = dim.Length(radius, unit_length)
return radius.convert(unit_length=unit_length, kpc_per_arcsec=kpc_per_arcsec) | def function[average_convergence_of_1_radius_in_units, parameter[self, unit_length, kpc_per_arcsec]]:
constant[The radius a critical curve forms for this mass profile, e.g. where the mean convergence is equal to 1.0.
In case of ellipitical mass profiles, the 'average' critical curve is used, whereby the convergence is rescaled into a circle using the axis ratio.
This radius corresponds to the Einstein radius of the mass profile, and is a property of a number of mass profiles below.
]
def function[func, parameter[radius]]:
variable[radius] assign[=] call[name[dim].Length, parameter[name[radius]]]
return[binary_operation[call[name[self].mass_within_circle_in_units, parameter[]] - binary_operation[name[np].pi * binary_operation[name[radius] ** constant[2.0]]]]]
variable[radius] assign[=] binary_operation[name[self].ellipticity_rescale * call[name[root_scalar], parameter[name[func]]].root]
variable[radius] assign[=] call[name[dim].Length, parameter[name[radius], name[unit_length]]]
return[call[name[radius].convert, parameter[]]] | keyword[def] identifier[average_convergence_of_1_radius_in_units] ( identifier[self] , identifier[unit_length] = literal[string] , identifier[kpc_per_arcsec] = keyword[None] ):
literal[string]
keyword[def] identifier[func] ( identifier[radius] ):
identifier[radius] = identifier[dim] . identifier[Length] ( identifier[radius] , identifier[unit_length] = identifier[unit_length] )
keyword[return] identifier[self] . identifier[mass_within_circle_in_units] ( identifier[radius] = identifier[radius] )- identifier[np] . identifier[pi] * identifier[radius] ** literal[int]
identifier[radius] = identifier[self] . identifier[ellipticity_rescale] * identifier[root_scalar] ( identifier[func] , identifier[bracket] =[ literal[int] , literal[int] ]). identifier[root]
identifier[radius] = identifier[dim] . identifier[Length] ( identifier[radius] , identifier[unit_length] )
keyword[return] identifier[radius] . identifier[convert] ( identifier[unit_length] = identifier[unit_length] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ) | def average_convergence_of_1_radius_in_units(self, unit_length='arcsec', kpc_per_arcsec=None):
"""The radius a critical curve forms for this mass profile, e.g. where the mean convergence is equal to 1.0.
In case of ellipitical mass profiles, the 'average' critical curve is used, whereby the convergence is rescaled into a circle using the axis ratio.
This radius corresponds to the Einstein radius of the mass profile, and is a property of a number of mass profiles below.
"""
def func(radius):
radius = dim.Length(radius, unit_length=unit_length)
return self.mass_within_circle_in_units(radius=radius) - np.pi * radius ** 2.0
radius = self.ellipticity_rescale * root_scalar(func, bracket=[0.0001, 1000.0]).root
radius = dim.Length(radius, unit_length)
return radius.convert(unit_length=unit_length, kpc_per_arcsec=kpc_per_arcsec) |
def call(self, *values, **named_values):
"""
Call an expression to evaluate it at the given point.
Future improvements: I would like if func and signature could be buffered after the
first call so they don't have to be recalculated for every call. However, nothing
can be stored on self as sympy uses __slots__ for efficiency. This means there is no
instance dict to put stuff in! And I'm pretty sure it's ill advised to hack into the
__slots__ of Expr.
However, for the moment I don't really notice a performance penalty in running tests.
p.s. In the current setup signature is not even needed since no introspection is possible
on the Expr before calling it anyway, which makes calculating the signature absolutely useless.
However, I hope that someday some monkey patching expert in shining armour comes by and finds
a way to store it in __signature__ upon __init__ of any ``symfit`` expr such that calling
inspect_sig.signature on a symbolic expression will tell you which arguments to provide.
:param self: Any subclass of sympy.Expr
:param values: Values for the Parameters and Variables of the Expr.
:param named_values: Values for the vars and params by name. ``named_values`` is
allowed to contain too many values, as this sometimes happens when using
\*\*fit_result.params on a submodel. The irrelevant params are simply ignored.
:return: The function evaluated at ``values``. The type depends entirely on the input.
Typically an array or a float but nothing is enforced.
"""
independent_vars, params = seperate_symbols(self)
# Convert to a pythonic function
func = sympy_to_py(self, independent_vars + params)
# Handle args and kwargs according to the allowed names.
parameters = [ # Note that these are inspect_sig.Parameter's, not symfit parameters!
inspect_sig.Parameter(arg.name, inspect_sig.Parameter.POSITIONAL_OR_KEYWORD)
for arg in independent_vars + params
]
arg_names = [arg.name for arg in independent_vars + params]
relevant_named_values = {
name: value for name, value in named_values.items() if name in arg_names
}
signature = inspect_sig.Signature(parameters=parameters)
bound_arguments = signature.bind(*values, **relevant_named_values)
return func(**bound_arguments.arguments) | def function[call, parameter[self]]:
constant[
Call an expression to evaluate it at the given point.
Future improvements: I would like if func and signature could be buffered after the
first call so they don't have to be recalculated for every call. However, nothing
can be stored on self as sympy uses __slots__ for efficiency. This means there is no
instance dict to put stuff in! And I'm pretty sure it's ill advised to hack into the
__slots__ of Expr.
However, for the moment I don't really notice a performance penalty in running tests.
p.s. In the current setup signature is not even needed since no introspection is possible
on the Expr before calling it anyway, which makes calculating the signature absolutely useless.
However, I hope that someday some monkey patching expert in shining armour comes by and finds
a way to store it in __signature__ upon __init__ of any ``symfit`` expr such that calling
inspect_sig.signature on a symbolic expression will tell you which arguments to provide.
:param self: Any subclass of sympy.Expr
:param values: Values for the Parameters and Variables of the Expr.
:param named_values: Values for the vars and params by name. ``named_values`` is
allowed to contain too many values, as this sometimes happens when using
\*\*fit_result.params on a submodel. The irrelevant params are simply ignored.
:return: The function evaluated at ``values``. The type depends entirely on the input.
Typically an array or a float but nothing is enforced.
]
<ast.Tuple object at 0x7da20e74b6a0> assign[=] call[name[seperate_symbols], parameter[name[self]]]
variable[func] assign[=] call[name[sympy_to_py], parameter[name[self], binary_operation[name[independent_vars] + name[params]]]]
variable[parameters] assign[=] <ast.ListComp object at 0x7da20e7484f0>
variable[arg_names] assign[=] <ast.ListComp object at 0x7da207f029e0>
variable[relevant_named_values] assign[=] <ast.DictComp object at 0x7da207f003d0>
variable[signature] assign[=] call[name[inspect_sig].Signature, parameter[]]
variable[bound_arguments] assign[=] call[name[signature].bind, parameter[<ast.Starred object at 0x7da207f00a60>]]
return[call[name[func], parameter[]]] | keyword[def] identifier[call] ( identifier[self] ,* identifier[values] ,** identifier[named_values] ):
literal[string]
identifier[independent_vars] , identifier[params] = identifier[seperate_symbols] ( identifier[self] )
identifier[func] = identifier[sympy_to_py] ( identifier[self] , identifier[independent_vars] + identifier[params] )
identifier[parameters] =[
identifier[inspect_sig] . identifier[Parameter] ( identifier[arg] . identifier[name] , identifier[inspect_sig] . identifier[Parameter] . identifier[POSITIONAL_OR_KEYWORD] )
keyword[for] identifier[arg] keyword[in] identifier[independent_vars] + identifier[params]
]
identifier[arg_names] =[ identifier[arg] . identifier[name] keyword[for] identifier[arg] keyword[in] identifier[independent_vars] + identifier[params] ]
identifier[relevant_named_values] ={
identifier[name] : identifier[value] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[named_values] . identifier[items] () keyword[if] identifier[name] keyword[in] identifier[arg_names]
}
identifier[signature] = identifier[inspect_sig] . identifier[Signature] ( identifier[parameters] = identifier[parameters] )
identifier[bound_arguments] = identifier[signature] . identifier[bind] (* identifier[values] ,** identifier[relevant_named_values] )
keyword[return] identifier[func] (** identifier[bound_arguments] . identifier[arguments] ) | def call(self, *values, **named_values):
"""
Call an expression to evaluate it at the given point.
Future improvements: I would like if func and signature could be buffered after the
first call so they don't have to be recalculated for every call. However, nothing
can be stored on self as sympy uses __slots__ for efficiency. This means there is no
instance dict to put stuff in! And I'm pretty sure it's ill advised to hack into the
__slots__ of Expr.
However, for the moment I don't really notice a performance penalty in running tests.
p.s. In the current setup signature is not even needed since no introspection is possible
on the Expr before calling it anyway, which makes calculating the signature absolutely useless.
However, I hope that someday some monkey patching expert in shining armour comes by and finds
a way to store it in __signature__ upon __init__ of any ``symfit`` expr such that calling
inspect_sig.signature on a symbolic expression will tell you which arguments to provide.
:param self: Any subclass of sympy.Expr
:param values: Values for the Parameters and Variables of the Expr.
:param named_values: Values for the vars and params by name. ``named_values`` is
allowed to contain too many values, as this sometimes happens when using
\\*\\*fit_result.params on a submodel. The irrelevant params are simply ignored.
:return: The function evaluated at ``values``. The type depends entirely on the input.
Typically an array or a float but nothing is enforced.
"""
(independent_vars, params) = seperate_symbols(self)
# Convert to a pythonic function
func = sympy_to_py(self, independent_vars + params)
# Handle args and kwargs according to the allowed names.
# Note that these are inspect_sig.Parameter's, not symfit parameters!
parameters = [inspect_sig.Parameter(arg.name, inspect_sig.Parameter.POSITIONAL_OR_KEYWORD) for arg in independent_vars + params]
arg_names = [arg.name for arg in independent_vars + params]
relevant_named_values = {name: value for (name, value) in named_values.items() if name in arg_names}
signature = inspect_sig.Signature(parameters=parameters)
bound_arguments = signature.bind(*values, **relevant_named_values)
return func(**bound_arguments.arguments) |
def service(self):
""" Returns a Splunk service object for this command invocation or None.
The service object is created from the Splunkd URI and authentication
token passed to the command invocation in the search results info file.
This data is not passed to a command invocation by default. You must
request it by specifying this pair of configuration settings in
commands.conf:
.. code-block:: python
enableheader=true
requires_srinfo=true
The :code:`enableheader` setting is :code:`true` by default. Hence, you
need not set it. The :code:`requires_srinfo` setting is false by
default. Hence, you must set it.
:return: :class:`splunklib.client.Service`, if :code:`enableheader` and
:code:`requires_srinfo` are both :code:`true`. Otherwise, if either
:code:`enableheader` or :code:`requires_srinfo` are :code:`false`,
a value of :code:`None` is returned.
"""
if self._service is not None:
return self._service
info = self.search_results_info
if info is None:
return None
splunkd = urlsplit(info.splunkd_uri, info.splunkd_protocol, allow_fragments=False)
self._service = Service(
scheme=splunkd.scheme, host=splunkd.hostname, port=splunkd.port, token=info.auth_token, app=info.ppc_app)
return self._service | def function[service, parameter[self]]:
constant[ Returns a Splunk service object for this command invocation or None.
The service object is created from the Splunkd URI and authentication
token passed to the command invocation in the search results info file.
This data is not passed to a command invocation by default. You must
request it by specifying this pair of configuration settings in
commands.conf:
.. code-block:: python
enableheader=true
requires_srinfo=true
The :code:`enableheader` setting is :code:`true` by default. Hence, you
need not set it. The :code:`requires_srinfo` setting is false by
default. Hence, you must set it.
:return: :class:`splunklib.client.Service`, if :code:`enableheader` and
:code:`requires_srinfo` are both :code:`true`. Otherwise, if either
:code:`enableheader` or :code:`requires_srinfo` are :code:`false`,
a value of :code:`None` is returned.
]
if compare[name[self]._service is_not constant[None]] begin[:]
return[name[self]._service]
variable[info] assign[=] name[self].search_results_info
if compare[name[info] is constant[None]] begin[:]
return[constant[None]]
variable[splunkd] assign[=] call[name[urlsplit], parameter[name[info].splunkd_uri, name[info].splunkd_protocol]]
name[self]._service assign[=] call[name[Service], parameter[]]
return[name[self]._service] | keyword[def] identifier[service] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_service] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_service]
identifier[info] = identifier[self] . identifier[search_results_info]
keyword[if] identifier[info] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[splunkd] = identifier[urlsplit] ( identifier[info] . identifier[splunkd_uri] , identifier[info] . identifier[splunkd_protocol] , identifier[allow_fragments] = keyword[False] )
identifier[self] . identifier[_service] = identifier[Service] (
identifier[scheme] = identifier[splunkd] . identifier[scheme] , identifier[host] = identifier[splunkd] . identifier[hostname] , identifier[port] = identifier[splunkd] . identifier[port] , identifier[token] = identifier[info] . identifier[auth_token] , identifier[app] = identifier[info] . identifier[ppc_app] )
keyword[return] identifier[self] . identifier[_service] | def service(self):
""" Returns a Splunk service object for this command invocation or None.
The service object is created from the Splunkd URI and authentication
token passed to the command invocation in the search results info file.
This data is not passed to a command invocation by default. You must
request it by specifying this pair of configuration settings in
commands.conf:
.. code-block:: python
enableheader=true
requires_srinfo=true
The :code:`enableheader` setting is :code:`true` by default. Hence, you
need not set it. The :code:`requires_srinfo` setting is false by
default. Hence, you must set it.
:return: :class:`splunklib.client.Service`, if :code:`enableheader` and
:code:`requires_srinfo` are both :code:`true`. Otherwise, if either
:code:`enableheader` or :code:`requires_srinfo` are :code:`false`,
a value of :code:`None` is returned.
"""
if self._service is not None:
return self._service # depends on [control=['if'], data=[]]
info = self.search_results_info
if info is None:
return None # depends on [control=['if'], data=[]]
splunkd = urlsplit(info.splunkd_uri, info.splunkd_protocol, allow_fragments=False)
self._service = Service(scheme=splunkd.scheme, host=splunkd.hostname, port=splunkd.port, token=info.auth_token, app=info.ppc_app)
return self._service |
def error(message, code='ERROR'):
"""Display Error.
Method prints the error message, message being given
as an input.
Arguments:
message {string} -- The message to be displayed.
"""
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
output = now + ' [' + torn.plugins.colors.FAIL + \
code + torn.plugins.colors.ENDC + '] \t' + \
message
print(output) | def function[error, parameter[message, code]]:
constant[Display Error.
Method prints the error message, message being given
as an input.
Arguments:
message {string} -- The message to be displayed.
]
variable[now] assign[=] call[call[name[datetime].now, parameter[]].strftime, parameter[constant[%Y-%m-%d %H:%M:%S]]]
variable[output] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[now] + constant[ []] + name[torn].plugins.colors.FAIL] + name[code]] + name[torn].plugins.colors.ENDC] + constant[] ]] + name[message]]
call[name[print], parameter[name[output]]] | keyword[def] identifier[error] ( identifier[message] , identifier[code] = literal[string] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] )
identifier[output] = identifier[now] + literal[string] + identifier[torn] . identifier[plugins] . identifier[colors] . identifier[FAIL] + identifier[code] + identifier[torn] . identifier[plugins] . identifier[colors] . identifier[ENDC] + literal[string] + identifier[message]
identifier[print] ( identifier[output] ) | def error(message, code='ERROR'):
"""Display Error.
Method prints the error message, message being given
as an input.
Arguments:
message {string} -- The message to be displayed.
"""
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
output = now + ' [' + torn.plugins.colors.FAIL + code + torn.plugins.colors.ENDC + '] \t' + message
print(output) |
def make_slice_key(cls, start_string, size_string):
"""
Converts the given start and size query parts to a slice key.
:return: slice key
:rtype: slice
"""
try:
start = int(start_string)
except ValueError:
raise ValueError('Query parameter "start" must be a number.')
if start < 0:
raise ValueError('Query parameter "start" must be zero or '
'a positive number.')
try:
size = int(size_string)
except ValueError:
raise ValueError('Query parameter "size" must be a number.')
if size < 1:
raise ValueError('Query parameter "size" must be a positive '
'number.')
return slice(start, start + size) | def function[make_slice_key, parameter[cls, start_string, size_string]]:
constant[
Converts the given start and size query parts to a slice key.
:return: slice key
:rtype: slice
]
<ast.Try object at 0x7da2041daad0>
if compare[name[start] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da2041d8fd0>
<ast.Try object at 0x7da2041da740>
if compare[name[size] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da207f014b0>
return[call[name[slice], parameter[name[start], binary_operation[name[start] + name[size]]]]] | keyword[def] identifier[make_slice_key] ( identifier[cls] , identifier[start_string] , identifier[size_string] ):
literal[string]
keyword[try] :
identifier[start] = identifier[int] ( identifier[start_string] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[start] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[try] :
identifier[size] = identifier[int] ( identifier[size_string] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[size] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[return] identifier[slice] ( identifier[start] , identifier[start] + identifier[size] ) | def make_slice_key(cls, start_string, size_string):
"""
Converts the given start and size query parts to a slice key.
:return: slice key
:rtype: slice
"""
try:
start = int(start_string) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('Query parameter "start" must be a number.') # depends on [control=['except'], data=[]]
if start < 0:
raise ValueError('Query parameter "start" must be zero or a positive number.') # depends on [control=['if'], data=[]]
try:
size = int(size_string) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('Query parameter "size" must be a number.') # depends on [control=['except'], data=[]]
if size < 1:
raise ValueError('Query parameter "size" must be a positive number.') # depends on [control=['if'], data=[]]
return slice(start, start + size) |
def mtf_slice(x, begin, size, slice_dim_name, name=None):
"""Slice operation.
Call externally as mtf.slice()
Args:
x: a list of Tensors
begin: integer, where to begin slicing from along the axis
size: integer, size to slice from axis.
slice_dim_name: string, dimension name of slicing axis.
name: an optional string
Returns:
a Tensor with shape extended by output_shape for the last axis.
"""
return SliceOperation(
x, begin, size, slice_dim_name, name=name).outputs[0] | def function[mtf_slice, parameter[x, begin, size, slice_dim_name, name]]:
constant[Slice operation.
Call externally as mtf.slice()
Args:
x: a list of Tensors
begin: integer, where to begin slicing from along the axis
size: integer, size to slice from axis.
slice_dim_name: string, dimension name of slicing axis.
name: an optional string
Returns:
a Tensor with shape extended by output_shape for the last axis.
]
return[call[call[name[SliceOperation], parameter[name[x], name[begin], name[size], name[slice_dim_name]]].outputs][constant[0]]] | keyword[def] identifier[mtf_slice] ( identifier[x] , identifier[begin] , identifier[size] , identifier[slice_dim_name] , identifier[name] = keyword[None] ):
literal[string]
keyword[return] identifier[SliceOperation] (
identifier[x] , identifier[begin] , identifier[size] , identifier[slice_dim_name] , identifier[name] = identifier[name] ). identifier[outputs] [ literal[int] ] | def mtf_slice(x, begin, size, slice_dim_name, name=None):
"""Slice operation.
Call externally as mtf.slice()
Args:
x: a list of Tensors
begin: integer, where to begin slicing from along the axis
size: integer, size to slice from axis.
slice_dim_name: string, dimension name of slicing axis.
name: an optional string
Returns:
a Tensor with shape extended by output_shape for the last axis.
"""
return SliceOperation(x, begin, size, slice_dim_name, name=name).outputs[0] |
def editors(self):
"""Legacy access to editor role.
DEPRECATED: use ``policy["roles/editors"]`` instead."""
result = set()
for role in self._EDITOR_ROLES:
for member in self._bindings.get(role, ()):
result.add(member)
return frozenset(result) | def function[editors, parameter[self]]:
constant[Legacy access to editor role.
DEPRECATED: use ``policy["roles/editors"]`` instead.]
variable[result] assign[=] call[name[set], parameter[]]
for taget[name[role]] in starred[name[self]._EDITOR_ROLES] begin[:]
for taget[name[member]] in starred[call[name[self]._bindings.get, parameter[name[role], tuple[[]]]]] begin[:]
call[name[result].add, parameter[name[member]]]
return[call[name[frozenset], parameter[name[result]]]] | keyword[def] identifier[editors] ( identifier[self] ):
literal[string]
identifier[result] = identifier[set] ()
keyword[for] identifier[role] keyword[in] identifier[self] . identifier[_EDITOR_ROLES] :
keyword[for] identifier[member] keyword[in] identifier[self] . identifier[_bindings] . identifier[get] ( identifier[role] ,()):
identifier[result] . identifier[add] ( identifier[member] )
keyword[return] identifier[frozenset] ( identifier[result] ) | def editors(self):
"""Legacy access to editor role.
DEPRECATED: use ``policy["roles/editors"]`` instead."""
result = set()
for role in self._EDITOR_ROLES:
for member in self._bindings.get(role, ()):
result.add(member) # depends on [control=['for'], data=['member']] # depends on [control=['for'], data=['role']]
return frozenset(result) |
def OnRightClickListctrl(self, event):
"""
Edits the logger and the Zeq_GUI parent object so that the selected interpretation is now marked as bad
@param: event -> wx.ListCtrlEvent that triggered this function
"""
i = event.GetIndex()
fit,spec = self.fit_list[i][0],self.fit_list[i][1]
if fit in self.parent.bad_fits:
if not self.parent.mark_fit_good(fit,spec=spec): return
if i == self.current_fit_index:
self.logger.SetItemBackgroundColour(i,"LIGHT BLUE")
else:
self.logger.SetItemBackgroundColour(i,"WHITE")
else:
if not self.parent.mark_fit_bad(fit): return
if i == self.current_fit_index:
self.logger.SetItemBackgroundColour(i,"red")
else:
self.logger.SetItemBackgroundColour(i,"red")
self.parent.calculate_high_levels_data()
self.parent.plot_high_levels_data()
self.logger_focus(i) | def function[OnRightClickListctrl, parameter[self, event]]:
constant[
Edits the logger and the Zeq_GUI parent object so that the selected interpretation is now marked as bad
@param: event -> wx.ListCtrlEvent that triggered this function
]
variable[i] assign[=] call[name[event].GetIndex, parameter[]]
<ast.Tuple object at 0x7da18bcc86a0> assign[=] tuple[[<ast.Subscript object at 0x7da18bccb0a0>, <ast.Subscript object at 0x7da18bcc84f0>]]
if compare[name[fit] in name[self].parent.bad_fits] begin[:]
if <ast.UnaryOp object at 0x7da18bcc90f0> begin[:]
return[None]
if compare[name[i] equal[==] name[self].current_fit_index] begin[:]
call[name[self].logger.SetItemBackgroundColour, parameter[name[i], constant[LIGHT BLUE]]]
call[name[self].parent.calculate_high_levels_data, parameter[]]
call[name[self].parent.plot_high_levels_data, parameter[]]
call[name[self].logger_focus, parameter[name[i]]] | keyword[def] identifier[OnRightClickListctrl] ( identifier[self] , identifier[event] ):
literal[string]
identifier[i] = identifier[event] . identifier[GetIndex] ()
identifier[fit] , identifier[spec] = identifier[self] . identifier[fit_list] [ identifier[i] ][ literal[int] ], identifier[self] . identifier[fit_list] [ identifier[i] ][ literal[int] ]
keyword[if] identifier[fit] keyword[in] identifier[self] . identifier[parent] . identifier[bad_fits] :
keyword[if] keyword[not] identifier[self] . identifier[parent] . identifier[mark_fit_good] ( identifier[fit] , identifier[spec] = identifier[spec] ): keyword[return]
keyword[if] identifier[i] == identifier[self] . identifier[current_fit_index] :
identifier[self] . identifier[logger] . identifier[SetItemBackgroundColour] ( identifier[i] , literal[string] )
keyword[else] :
identifier[self] . identifier[logger] . identifier[SetItemBackgroundColour] ( identifier[i] , literal[string] )
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[parent] . identifier[mark_fit_bad] ( identifier[fit] ): keyword[return]
keyword[if] identifier[i] == identifier[self] . identifier[current_fit_index] :
identifier[self] . identifier[logger] . identifier[SetItemBackgroundColour] ( identifier[i] , literal[string] )
keyword[else] :
identifier[self] . identifier[logger] . identifier[SetItemBackgroundColour] ( identifier[i] , literal[string] )
identifier[self] . identifier[parent] . identifier[calculate_high_levels_data] ()
identifier[self] . identifier[parent] . identifier[plot_high_levels_data] ()
identifier[self] . identifier[logger_focus] ( identifier[i] ) | def OnRightClickListctrl(self, event):
"""
Edits the logger and the Zeq_GUI parent object so that the selected interpretation is now marked as bad
@param: event -> wx.ListCtrlEvent that triggered this function
"""
i = event.GetIndex()
(fit, spec) = (self.fit_list[i][0], self.fit_list[i][1])
if fit in self.parent.bad_fits:
if not self.parent.mark_fit_good(fit, spec=spec):
return # depends on [control=['if'], data=[]]
if i == self.current_fit_index:
self.logger.SetItemBackgroundColour(i, 'LIGHT BLUE') # depends on [control=['if'], data=['i']]
else:
self.logger.SetItemBackgroundColour(i, 'WHITE') # depends on [control=['if'], data=['fit']]
else:
if not self.parent.mark_fit_bad(fit):
return # depends on [control=['if'], data=[]]
if i == self.current_fit_index:
self.logger.SetItemBackgroundColour(i, 'red') # depends on [control=['if'], data=['i']]
else:
self.logger.SetItemBackgroundColour(i, 'red')
self.parent.calculate_high_levels_data()
self.parent.plot_high_levels_data()
self.logger_focus(i) |
def record_manifest(self):
"""
Called after a deployment to record any data necessary to detect changes
for a future deployment.
"""
manifest = super(TarballSatchel, self).record_manifest()
manifest['timestamp'] = self.timestamp
return manifest | def function[record_manifest, parameter[self]]:
constant[
Called after a deployment to record any data necessary to detect changes
for a future deployment.
]
variable[manifest] assign[=] call[call[name[super], parameter[name[TarballSatchel], name[self]]].record_manifest, parameter[]]
call[name[manifest]][constant[timestamp]] assign[=] name[self].timestamp
return[name[manifest]] | keyword[def] identifier[record_manifest] ( identifier[self] ):
literal[string]
identifier[manifest] = identifier[super] ( identifier[TarballSatchel] , identifier[self] ). identifier[record_manifest] ()
identifier[manifest] [ literal[string] ]= identifier[self] . identifier[timestamp]
keyword[return] identifier[manifest] | def record_manifest(self):
"""
Called after a deployment to record any data necessary to detect changes
for a future deployment.
"""
manifest = super(TarballSatchel, self).record_manifest()
manifest['timestamp'] = self.timestamp
return manifest |
def density_matrix(self):
"""Returns the density matrix at this step in the simulation.
The density matrix that is stored in this result is returned in the
computational basis with these basis states defined by the qubit_map.
In particular the value in the qubit_map is the index of the qubit,
and these are translated into binary vectors where the last qubit is
the 1s bit of the index, the second-to-last is the 2s bit of the index,
and so forth (i.e. big endian ordering). The density matrix is a
`2 ** num_qubits` square matrix, with rows and columns ordered by
the computational basis as just described.
Example:
qubit_map: {QubitA: 0, QubitB: 1, QubitC: 2}
Then the returned density matrix will have (row and column) indices
mapped to qubit basis states like the following table
| QubitA | QubitB | QubitC
:-: | :----: | :----: | :----:
0 | 0 | 0 | 0
1 | 0 | 0 | 1
2 | 0 | 1 | 0
3 | 0 | 1 | 1
4 | 1 | 0 | 0
5 | 1 | 0 | 1
6 | 1 | 1 | 0
7 | 1 | 1 | 1
"""
size = 2 ** len(self._qubit_map)
return np.reshape(self._density_matrix, (size, size)) | def function[density_matrix, parameter[self]]:
constant[Returns the density matrix at this step in the simulation.
The density matrix that is stored in this result is returned in the
computational basis with these basis states defined by the qubit_map.
In particular the value in the qubit_map is the index of the qubit,
and these are translated into binary vectors where the last qubit is
the 1s bit of the index, the second-to-last is the 2s bit of the index,
and so forth (i.e. big endian ordering). The density matrix is a
`2 ** num_qubits` square matrix, with rows and columns ordered by
the computational basis as just described.
Example:
qubit_map: {QubitA: 0, QubitB: 1, QubitC: 2}
Then the returned density matrix will have (row and column) indices
mapped to qubit basis states like the following table
| QubitA | QubitB | QubitC
:-: | :----: | :----: | :----:
0 | 0 | 0 | 0
1 | 0 | 0 | 1
2 | 0 | 1 | 0
3 | 0 | 1 | 1
4 | 1 | 0 | 0
5 | 1 | 0 | 1
6 | 1 | 1 | 0
7 | 1 | 1 | 1
]
variable[size] assign[=] binary_operation[constant[2] ** call[name[len], parameter[name[self]._qubit_map]]]
return[call[name[np].reshape, parameter[name[self]._density_matrix, tuple[[<ast.Name object at 0x7da1b1c3fbe0>, <ast.Name object at 0x7da1b1c3cbe0>]]]]] | keyword[def] identifier[density_matrix] ( identifier[self] ):
literal[string]
identifier[size] = literal[int] ** identifier[len] ( identifier[self] . identifier[_qubit_map] )
keyword[return] identifier[np] . identifier[reshape] ( identifier[self] . identifier[_density_matrix] ,( identifier[size] , identifier[size] )) | def density_matrix(self):
"""Returns the density matrix at this step in the simulation.
The density matrix that is stored in this result is returned in the
computational basis with these basis states defined by the qubit_map.
In particular the value in the qubit_map is the index of the qubit,
and these are translated into binary vectors where the last qubit is
the 1s bit of the index, the second-to-last is the 2s bit of the index,
and so forth (i.e. big endian ordering). The density matrix is a
`2 ** num_qubits` square matrix, with rows and columns ordered by
the computational basis as just described.
Example:
qubit_map: {QubitA: 0, QubitB: 1, QubitC: 2}
Then the returned density matrix will have (row and column) indices
mapped to qubit basis states like the following table
| QubitA | QubitB | QubitC
:-: | :----: | :----: | :----:
0 | 0 | 0 | 0
1 | 0 | 0 | 1
2 | 0 | 1 | 0
3 | 0 | 1 | 1
4 | 1 | 0 | 0
5 | 1 | 0 | 1
6 | 1 | 1 | 0
7 | 1 | 1 | 1
"""
size = 2 ** len(self._qubit_map)
return np.reshape(self._density_matrix, (size, size)) |
def override (overrider_id, overridee_id):
"""Make generator 'overrider-id' be preferred to
'overridee-id'. If, when searching for generators
that could produce a target of certain type,
both those generators are amoung viable generators,
the overridden generator is immediately discarded.
The overridden generators are discarded immediately
after computing the list of viable generators, before
running any of them."""
assert isinstance(overrider_id, basestring)
assert isinstance(overridee_id, basestring)
__overrides.setdefault(overrider_id, []).append(overridee_id) | def function[override, parameter[overrider_id, overridee_id]]:
constant[Make generator 'overrider-id' be preferred to
'overridee-id'. If, when searching for generators
that could produce a target of certain type,
both those generators are amoung viable generators,
the overridden generator is immediately discarded.
The overridden generators are discarded immediately
after computing the list of viable generators, before
running any of them.]
assert[call[name[isinstance], parameter[name[overrider_id], name[basestring]]]]
assert[call[name[isinstance], parameter[name[overridee_id], name[basestring]]]]
call[call[name[__overrides].setdefault, parameter[name[overrider_id], list[[]]]].append, parameter[name[overridee_id]]] | keyword[def] identifier[override] ( identifier[overrider_id] , identifier[overridee_id] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[overrider_id] , identifier[basestring] )
keyword[assert] identifier[isinstance] ( identifier[overridee_id] , identifier[basestring] )
identifier[__overrides] . identifier[setdefault] ( identifier[overrider_id] ,[]). identifier[append] ( identifier[overridee_id] ) | def override(overrider_id, overridee_id):
"""Make generator 'overrider-id' be preferred to
'overridee-id'. If, when searching for generators
that could produce a target of certain type,
both those generators are amoung viable generators,
the overridden generator is immediately discarded.
The overridden generators are discarded immediately
after computing the list of viable generators, before
running any of them."""
assert isinstance(overrider_id, basestring)
assert isinstance(overridee_id, basestring)
__overrides.setdefault(overrider_id, []).append(overridee_id) |
def check_garden_requirements(self):
'''Ensure required garden packages are available to be included.
'''
garden_requirements = self.config.getlist('app',
'garden_requirements', '')
# have we installed the garden packages?
if exists(self.gardenlibs_dir) and \
self.state.get('cache.gardenlibs', '') == garden_requirements:
self.debug('Garden requirements already installed, pass')
return
# we're going to reinstall all the garden libs.
self.rmdir(self.gardenlibs_dir)
# but if we don't have requirements, or if the user removed everything,
# don't do anything.
if not garden_requirements:
self.state['cache.gardenlibs'] = garden_requirements
return
self._ensure_virtualenv()
self.cmd('pip install Kivy-Garden==0.1.1', env=self.env_venv)
# recreate gardenlibs
self.mkdir(self.gardenlibs_dir)
for requirement in garden_requirements:
self._install_garden_package(requirement)
# save gardenlibs state
self.state['cache.gardenlibs'] = garden_requirements | def function[check_garden_requirements, parameter[self]]:
constant[Ensure required garden packages are available to be included.
]
variable[garden_requirements] assign[=] call[name[self].config.getlist, parameter[constant[app], constant[garden_requirements], constant[]]]
if <ast.BoolOp object at 0x7da1b167dd50> begin[:]
call[name[self].debug, parameter[constant[Garden requirements already installed, pass]]]
return[None]
call[name[self].rmdir, parameter[name[self].gardenlibs_dir]]
if <ast.UnaryOp object at 0x7da1b167c5e0> begin[:]
call[name[self].state][constant[cache.gardenlibs]] assign[=] name[garden_requirements]
return[None]
call[name[self]._ensure_virtualenv, parameter[]]
call[name[self].cmd, parameter[constant[pip install Kivy-Garden==0.1.1]]]
call[name[self].mkdir, parameter[name[self].gardenlibs_dir]]
for taget[name[requirement]] in starred[name[garden_requirements]] begin[:]
call[name[self]._install_garden_package, parameter[name[requirement]]]
call[name[self].state][constant[cache.gardenlibs]] assign[=] name[garden_requirements] | keyword[def] identifier[check_garden_requirements] ( identifier[self] ):
literal[string]
identifier[garden_requirements] = identifier[self] . identifier[config] . identifier[getlist] ( literal[string] ,
literal[string] , literal[string] )
keyword[if] identifier[exists] ( identifier[self] . identifier[gardenlibs_dir] ) keyword[and] identifier[self] . identifier[state] . identifier[get] ( literal[string] , literal[string] )== identifier[garden_requirements] :
identifier[self] . identifier[debug] ( literal[string] )
keyword[return]
identifier[self] . identifier[rmdir] ( identifier[self] . identifier[gardenlibs_dir] )
keyword[if] keyword[not] identifier[garden_requirements] :
identifier[self] . identifier[state] [ literal[string] ]= identifier[garden_requirements]
keyword[return]
identifier[self] . identifier[_ensure_virtualenv] ()
identifier[self] . identifier[cmd] ( literal[string] , identifier[env] = identifier[self] . identifier[env_venv] )
identifier[self] . identifier[mkdir] ( identifier[self] . identifier[gardenlibs_dir] )
keyword[for] identifier[requirement] keyword[in] identifier[garden_requirements] :
identifier[self] . identifier[_install_garden_package] ( identifier[requirement] )
identifier[self] . identifier[state] [ literal[string] ]= identifier[garden_requirements] | def check_garden_requirements(self):
"""Ensure required garden packages are available to be included.
"""
garden_requirements = self.config.getlist('app', 'garden_requirements', '')
# have we installed the garden packages?
if exists(self.gardenlibs_dir) and self.state.get('cache.gardenlibs', '') == garden_requirements:
self.debug('Garden requirements already installed, pass')
return # depends on [control=['if'], data=[]]
# we're going to reinstall all the garden libs.
self.rmdir(self.gardenlibs_dir)
# but if we don't have requirements, or if the user removed everything,
# don't do anything.
if not garden_requirements:
self.state['cache.gardenlibs'] = garden_requirements
return # depends on [control=['if'], data=[]]
self._ensure_virtualenv()
self.cmd('pip install Kivy-Garden==0.1.1', env=self.env_venv)
# recreate gardenlibs
self.mkdir(self.gardenlibs_dir)
for requirement in garden_requirements:
self._install_garden_package(requirement) # depends on [control=['for'], data=['requirement']]
# save gardenlibs state
self.state['cache.gardenlibs'] = garden_requirements |
def _call(self, x):
"""Return wavelet transform of ``x``."""
if self.impl == 'pywt':
coeffs = pywt.wavedecn(
x, wavelet=self.pywt_wavelet, level=self.nlevels,
mode=self.pywt_pad_mode, axes=self.axes)
return pywt.ravel_coeffs(coeffs, axes=self.axes)[0]
else:
raise RuntimeError("bad `impl` '{}'".format(self.impl)) | def function[_call, parameter[self, x]]:
constant[Return wavelet transform of ``x``.]
if compare[name[self].impl equal[==] constant[pywt]] begin[:]
variable[coeffs] assign[=] call[name[pywt].wavedecn, parameter[name[x]]]
return[call[call[name[pywt].ravel_coeffs, parameter[name[coeffs]]]][constant[0]]] | keyword[def] identifier[_call] ( identifier[self] , identifier[x] ):
literal[string]
keyword[if] identifier[self] . identifier[impl] == literal[string] :
identifier[coeffs] = identifier[pywt] . identifier[wavedecn] (
identifier[x] , identifier[wavelet] = identifier[self] . identifier[pywt_wavelet] , identifier[level] = identifier[self] . identifier[nlevels] ,
identifier[mode] = identifier[self] . identifier[pywt_pad_mode] , identifier[axes] = identifier[self] . identifier[axes] )
keyword[return] identifier[pywt] . identifier[ravel_coeffs] ( identifier[coeffs] , identifier[axes] = identifier[self] . identifier[axes] )[ literal[int] ]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[self] . identifier[impl] )) | def _call(self, x):
"""Return wavelet transform of ``x``."""
if self.impl == 'pywt':
coeffs = pywt.wavedecn(x, wavelet=self.pywt_wavelet, level=self.nlevels, mode=self.pywt_pad_mode, axes=self.axes)
return pywt.ravel_coeffs(coeffs, axes=self.axes)[0] # depends on [control=['if'], data=[]]
else:
raise RuntimeError("bad `impl` '{}'".format(self.impl)) |
def from_web(network_id: int, host: Optional[str] = None) -> BELGraph:
"""Retrieve a public network from BEL Commons.
In the future, this function may be extended to support authentication.
:param int network_id: The BEL Commons network identifier
:param Optional[str] host: The location of the BEL Commons server. Alternatively, looks up in PyBEL config with
``PYBEL_REMOTE_HOST`` or the environment as ``PYBEL_REMOTE_HOST`` Defaults to
:data:`pybel.constants.DEFAULT_SERVICE_URL`
:rtype: pybel.BELGraph
"""
if host is None:
host = _get_host()
url = host + GET_ENDPOINT.format(network_id)
res = requests.get(url)
graph_json = res.json()
graph = from_json(graph_json)
return graph | def function[from_web, parameter[network_id, host]]:
constant[Retrieve a public network from BEL Commons.
In the future, this function may be extended to support authentication.
:param int network_id: The BEL Commons network identifier
:param Optional[str] host: The location of the BEL Commons server. Alternatively, looks up in PyBEL config with
``PYBEL_REMOTE_HOST`` or the environment as ``PYBEL_REMOTE_HOST`` Defaults to
:data:`pybel.constants.DEFAULT_SERVICE_URL`
:rtype: pybel.BELGraph
]
if compare[name[host] is constant[None]] begin[:]
variable[host] assign[=] call[name[_get_host], parameter[]]
variable[url] assign[=] binary_operation[name[host] + call[name[GET_ENDPOINT].format, parameter[name[network_id]]]]
variable[res] assign[=] call[name[requests].get, parameter[name[url]]]
variable[graph_json] assign[=] call[name[res].json, parameter[]]
variable[graph] assign[=] call[name[from_json], parameter[name[graph_json]]]
return[name[graph]] | keyword[def] identifier[from_web] ( identifier[network_id] : identifier[int] , identifier[host] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[BELGraph] :
literal[string]
keyword[if] identifier[host] keyword[is] keyword[None] :
identifier[host] = identifier[_get_host] ()
identifier[url] = identifier[host] + identifier[GET_ENDPOINT] . identifier[format] ( identifier[network_id] )
identifier[res] = identifier[requests] . identifier[get] ( identifier[url] )
identifier[graph_json] = identifier[res] . identifier[json] ()
identifier[graph] = identifier[from_json] ( identifier[graph_json] )
keyword[return] identifier[graph] | def from_web(network_id: int, host: Optional[str]=None) -> BELGraph:
"""Retrieve a public network from BEL Commons.
In the future, this function may be extended to support authentication.
:param int network_id: The BEL Commons network identifier
:param Optional[str] host: The location of the BEL Commons server. Alternatively, looks up in PyBEL config with
``PYBEL_REMOTE_HOST`` or the environment as ``PYBEL_REMOTE_HOST`` Defaults to
:data:`pybel.constants.DEFAULT_SERVICE_URL`
:rtype: pybel.BELGraph
"""
if host is None:
host = _get_host() # depends on [control=['if'], data=['host']]
url = host + GET_ENDPOINT.format(network_id)
res = requests.get(url)
graph_json = res.json()
graph = from_json(graph_json)
return graph |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.