code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get(self, transport, robj, r=None, pr=None, timeout=None,
basic_quorum=None, notfound_ok=None, head_only=False):
"""
get(robj, r=None, pr=None, timeout=None)
Fetches the contents of a Riak object.
.. note:: This request is automatically retried :attr:`retries`
times if it fails due to network error.
:param robj: the object to fetch
:type robj: RiakObject
:param r: the read quorum
:type r: integer, string, None
:param pr: the primary read quorum
:type pr: integer, string, None
:param timeout: a timeout value in milliseconds
:type timeout: int
:param basic_quorum: whether to use the "basic quorum" policy
for not-founds
:type basic_quorum: bool
:param notfound_ok: whether to treat not-found responses as successful
:type notfound_ok: bool
:param head_only: whether to fetch without value, so only metadata
(only available on PB transport)
:type head_only: bool
"""
_validate_timeout(timeout)
if not isinstance(robj.key, six.string_types):
raise TypeError(
'key must be a string, instead got {0}'.format(repr(robj.key)))
return transport.get(robj, r=r, pr=pr, timeout=timeout,
basic_quorum=basic_quorum,
notfound_ok=notfound_ok,
head_only=head_only) | def function[get, parameter[self, transport, robj, r, pr, timeout, basic_quorum, notfound_ok, head_only]]:
constant[
get(robj, r=None, pr=None, timeout=None)
Fetches the contents of a Riak object.
.. note:: This request is automatically retried :attr:`retries`
times if it fails due to network error.
:param robj: the object to fetch
:type robj: RiakObject
:param r: the read quorum
:type r: integer, string, None
:param pr: the primary read quorum
:type pr: integer, string, None
:param timeout: a timeout value in milliseconds
:type timeout: int
:param basic_quorum: whether to use the "basic quorum" policy
for not-founds
:type basic_quorum: bool
:param notfound_ok: whether to treat not-found responses as successful
:type notfound_ok: bool
:param head_only: whether to fetch without value, so only metadata
(only available on PB transport)
:type head_only: bool
]
call[name[_validate_timeout], parameter[name[timeout]]]
if <ast.UnaryOp object at 0x7da2047eae60> begin[:]
<ast.Raise object at 0x7da2047ea1a0>
return[call[name[transport].get, parameter[name[robj]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[transport] , identifier[robj] , identifier[r] = keyword[None] , identifier[pr] = keyword[None] , identifier[timeout] = keyword[None] ,
identifier[basic_quorum] = keyword[None] , identifier[notfound_ok] = keyword[None] , identifier[head_only] = keyword[False] ):
literal[string]
identifier[_validate_timeout] ( identifier[timeout] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[robj] . identifier[key] , identifier[six] . identifier[string_types] ):
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[repr] ( identifier[robj] . identifier[key] )))
keyword[return] identifier[transport] . identifier[get] ( identifier[robj] , identifier[r] = identifier[r] , identifier[pr] = identifier[pr] , identifier[timeout] = identifier[timeout] ,
identifier[basic_quorum] = identifier[basic_quorum] ,
identifier[notfound_ok] = identifier[notfound_ok] ,
identifier[head_only] = identifier[head_only] ) | def get(self, transport, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False):
"""
get(robj, r=None, pr=None, timeout=None)
Fetches the contents of a Riak object.
.. note:: This request is automatically retried :attr:`retries`
times if it fails due to network error.
:param robj: the object to fetch
:type robj: RiakObject
:param r: the read quorum
:type r: integer, string, None
:param pr: the primary read quorum
:type pr: integer, string, None
:param timeout: a timeout value in milliseconds
:type timeout: int
:param basic_quorum: whether to use the "basic quorum" policy
for not-founds
:type basic_quorum: bool
:param notfound_ok: whether to treat not-found responses as successful
:type notfound_ok: bool
:param head_only: whether to fetch without value, so only metadata
(only available on PB transport)
:type head_only: bool
"""
_validate_timeout(timeout)
if not isinstance(robj.key, six.string_types):
raise TypeError('key must be a string, instead got {0}'.format(repr(robj.key))) # depends on [control=['if'], data=[]]
return transport.get(robj, r=r, pr=pr, timeout=timeout, basic_quorum=basic_quorum, notfound_ok=notfound_ok, head_only=head_only) |
def convert_flatten(builder, layer, input_names, output_names, keras_layer):
"""
Convert a flatten layer from keras to coreml.
----------
Parameters
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
"""
input_name, output_name = (input_names[0], output_names[0])
# blob_order == 0 if the input blob needs not be rearranged
# blob_order == 1 if the input blob needs to be rearranged
blob_order = 0
# using keras_layer.input.shape have a "?" (Dimension[None] at the front),
# making a 3D tensor with unknown batch size 4D
try:
in_shape = keras_layer.input_shape
if len(in_shape) == 4:
blob_order = 1
if len(in_shape) == 3 and in_shape[0] is None:
# handling Keras rank-3 tensor (Batch, Sequence, Channels)
permute_output_name = output_name + '__permute__'
builder.add_permute(name=layer+'__permute__', dim=(2,1,0,3),
input_name=input_name, output_name=permute_output_name)
builder.add_flatten(name=layer, mode=1,
input_name=permute_output_name, output_name=output_name)
else:
builder.add_flatten(name=layer, mode=blob_order, input_name=input_name,
output_name=output_name)
except:
builder.add_flatten(name=layer, mode=1, input_name=input_name, output_name=output_name) | def function[convert_flatten, parameter[builder, layer, input_names, output_names, keras_layer]]:
constant[
Convert a flatten layer from keras to coreml.
----------
Parameters
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
]
<ast.Tuple object at 0x7da204960bb0> assign[=] tuple[[<ast.Subscript object at 0x7da204963ca0>, <ast.Subscript object at 0x7da2049620e0>]]
variable[blob_order] assign[=] constant[0]
<ast.Try object at 0x7da1b1ef1990> | keyword[def] identifier[convert_flatten] ( identifier[builder] , identifier[layer] , identifier[input_names] , identifier[output_names] , identifier[keras_layer] ):
literal[string]
identifier[input_name] , identifier[output_name] =( identifier[input_names] [ literal[int] ], identifier[output_names] [ literal[int] ])
identifier[blob_order] = literal[int]
keyword[try] :
identifier[in_shape] = identifier[keras_layer] . identifier[input_shape]
keyword[if] identifier[len] ( identifier[in_shape] )== literal[int] :
identifier[blob_order] = literal[int]
keyword[if] identifier[len] ( identifier[in_shape] )== literal[int] keyword[and] identifier[in_shape] [ literal[int] ] keyword[is] keyword[None] :
identifier[permute_output_name] = identifier[output_name] + literal[string]
identifier[builder] . identifier[add_permute] ( identifier[name] = identifier[layer] + literal[string] , identifier[dim] =( literal[int] , literal[int] , literal[int] , literal[int] ),
identifier[input_name] = identifier[input_name] , identifier[output_name] = identifier[permute_output_name] )
identifier[builder] . identifier[add_flatten] ( identifier[name] = identifier[layer] , identifier[mode] = literal[int] ,
identifier[input_name] = identifier[permute_output_name] , identifier[output_name] = identifier[output_name] )
keyword[else] :
identifier[builder] . identifier[add_flatten] ( identifier[name] = identifier[layer] , identifier[mode] = identifier[blob_order] , identifier[input_name] = identifier[input_name] ,
identifier[output_name] = identifier[output_name] )
keyword[except] :
identifier[builder] . identifier[add_flatten] ( identifier[name] = identifier[layer] , identifier[mode] = literal[int] , identifier[input_name] = identifier[input_name] , identifier[output_name] = identifier[output_name] ) | def convert_flatten(builder, layer, input_names, output_names, keras_layer):
"""
Convert a flatten layer from keras to coreml.
----------
Parameters
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
"""
(input_name, output_name) = (input_names[0], output_names[0])
# blob_order == 0 if the input blob needs not be rearranged
# blob_order == 1 if the input blob needs to be rearranged
blob_order = 0
# using keras_layer.input.shape have a "?" (Dimension[None] at the front),
# making a 3D tensor with unknown batch size 4D
try:
in_shape = keras_layer.input_shape
if len(in_shape) == 4:
blob_order = 1 # depends on [control=['if'], data=[]]
if len(in_shape) == 3 and in_shape[0] is None:
# handling Keras rank-3 tensor (Batch, Sequence, Channels)
permute_output_name = output_name + '__permute__'
builder.add_permute(name=layer + '__permute__', dim=(2, 1, 0, 3), input_name=input_name, output_name=permute_output_name)
builder.add_flatten(name=layer, mode=1, input_name=permute_output_name, output_name=output_name) # depends on [control=['if'], data=[]]
else:
builder.add_flatten(name=layer, mode=blob_order, input_name=input_name, output_name=output_name) # depends on [control=['try'], data=[]]
except:
builder.add_flatten(name=layer, mode=1, input_name=input_name, output_name=output_name) # depends on [control=['except'], data=[]] |
def at(self, timestamp):
"""
Force the create date of an object to be at a certain time; This
method can be invoked only on a freshly created Versionable object.
It must not have been cloned yet. Raises a SuspiciousOperation
exception, otherwise.
:param timestamp: a datetime.datetime instance
"""
# Ensure, it's not a historic item
if not self.is_current:
raise SuspiciousOperation(
"Cannot relocate this Versionable instance in time, since it "
"is a historical item")
# Ensure it's not a versioned item (that would lead to some ugly
# situations...
if not self.version_birth_date == self.version_start_date:
raise SuspiciousOperation(
"Cannot relocate this Versionable instance in time, since it "
"is a versioned instance")
# Ensure the argument is really a timestamp
if not isinstance(timestamp, datetime.datetime):
raise ValueError("This is not a datetime.datetime timestamp")
self.version_birth_date = self.version_start_date = timestamp
return self | def function[at, parameter[self, timestamp]]:
constant[
Force the create date of an object to be at a certain time; This
method can be invoked only on a freshly created Versionable object.
It must not have been cloned yet. Raises a SuspiciousOperation
exception, otherwise.
:param timestamp: a datetime.datetime instance
]
if <ast.UnaryOp object at 0x7da1b11392d0> begin[:]
<ast.Raise object at 0x7da1b1139c90>
if <ast.UnaryOp object at 0x7da1b1139360> begin[:]
<ast.Raise object at 0x7da1b113a9b0>
if <ast.UnaryOp object at 0x7da1b1071a50> begin[:]
<ast.Raise object at 0x7da1b1073f10>
name[self].version_birth_date assign[=] name[timestamp]
return[name[self]] | keyword[def] identifier[at] ( identifier[self] , identifier[timestamp] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_current] :
keyword[raise] identifier[SuspiciousOperation] (
literal[string]
literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[version_birth_date] == identifier[self] . identifier[version_start_date] :
keyword[raise] identifier[SuspiciousOperation] (
literal[string]
literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[timestamp] , identifier[datetime] . identifier[datetime] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[version_birth_date] = identifier[self] . identifier[version_start_date] = identifier[timestamp]
keyword[return] identifier[self] | def at(self, timestamp):
"""
Force the create date of an object to be at a certain time; This
method can be invoked only on a freshly created Versionable object.
It must not have been cloned yet. Raises a SuspiciousOperation
exception, otherwise.
:param timestamp: a datetime.datetime instance
"""
# Ensure, it's not a historic item
if not self.is_current:
raise SuspiciousOperation('Cannot relocate this Versionable instance in time, since it is a historical item') # depends on [control=['if'], data=[]]
# Ensure it's not a versioned item (that would lead to some ugly
# situations...
if not self.version_birth_date == self.version_start_date:
raise SuspiciousOperation('Cannot relocate this Versionable instance in time, since it is a versioned instance') # depends on [control=['if'], data=[]]
# Ensure the argument is really a timestamp
if not isinstance(timestamp, datetime.datetime):
raise ValueError('This is not a datetime.datetime timestamp') # depends on [control=['if'], data=[]]
self.version_birth_date = self.version_start_date = timestamp
return self |
def axisinfo(unit, axis):
"""
Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
"""
tz = unit
majloc = PandasAutoDateLocator(tz=tz)
majfmt = PandasAutoDateFormatter(majloc, tz=tz)
datemin = pydt.date(2000, 1, 1)
datemax = pydt.date(2010, 1, 1)
return units.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
default_limits=(datemin, datemax)) | def function[axisinfo, parameter[unit, axis]]:
constant[
Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
]
variable[tz] assign[=] name[unit]
variable[majloc] assign[=] call[name[PandasAutoDateLocator], parameter[]]
variable[majfmt] assign[=] call[name[PandasAutoDateFormatter], parameter[name[majloc]]]
variable[datemin] assign[=] call[name[pydt].date, parameter[constant[2000], constant[1], constant[1]]]
variable[datemax] assign[=] call[name[pydt].date, parameter[constant[2010], constant[1], constant[1]]]
return[call[name[units].AxisInfo, parameter[]]] | keyword[def] identifier[axisinfo] ( identifier[unit] , identifier[axis] ):
literal[string]
identifier[tz] = identifier[unit]
identifier[majloc] = identifier[PandasAutoDateLocator] ( identifier[tz] = identifier[tz] )
identifier[majfmt] = identifier[PandasAutoDateFormatter] ( identifier[majloc] , identifier[tz] = identifier[tz] )
identifier[datemin] = identifier[pydt] . identifier[date] ( literal[int] , literal[int] , literal[int] )
identifier[datemax] = identifier[pydt] . identifier[date] ( literal[int] , literal[int] , literal[int] )
keyword[return] identifier[units] . identifier[AxisInfo] ( identifier[majloc] = identifier[majloc] , identifier[majfmt] = identifier[majfmt] , identifier[label] = literal[string] ,
identifier[default_limits] =( identifier[datemin] , identifier[datemax] )) | def axisinfo(unit, axis):
"""
Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
"""
tz = unit
majloc = PandasAutoDateLocator(tz=tz)
majfmt = PandasAutoDateFormatter(majloc, tz=tz)
datemin = pydt.date(2000, 1, 1)
datemax = pydt.date(2010, 1, 1)
return units.AxisInfo(majloc=majloc, majfmt=majfmt, label='', default_limits=(datemin, datemax)) |
def bump(self):
""" Fix indicator in case of unnanounced departments. """
# read client
values = self.client.mget(self.keys.indicator, self.keys.dispenser)
indicator, dispenser = map(int, values)
# determine active users
numbers = range(indicator, dispenser + 1)
keys = [self.keys.key(n) for n in numbers]
pairs = zip(keys, self.client.mget(*keys))
try:
# determine number of first active user
number = next(self.keys.number(key)
for key, value in pairs if value is not None)
except:
# set number to next result of incr on dispenser
number = dispenser + 1
# set indicator to it if necessary
if number != indicator:
self.client.set(self.keys.indicator, number)
# announce and return it anyway
self.announce(number)
return number | def function[bump, parameter[self]]:
constant[ Fix indicator in case of unnanounced departments. ]
variable[values] assign[=] call[name[self].client.mget, parameter[name[self].keys.indicator, name[self].keys.dispenser]]
<ast.Tuple object at 0x7da1b133d9c0> assign[=] call[name[map], parameter[name[int], name[values]]]
variable[numbers] assign[=] call[name[range], parameter[name[indicator], binary_operation[name[dispenser] + constant[1]]]]
variable[keys] assign[=] <ast.ListComp object at 0x7da1b133cca0>
variable[pairs] assign[=] call[name[zip], parameter[name[keys], call[name[self].client.mget, parameter[<ast.Starred object at 0x7da1b133fb50>]]]]
<ast.Try object at 0x7da1b133c5b0>
if compare[name[number] not_equal[!=] name[indicator]] begin[:]
call[name[self].client.set, parameter[name[self].keys.indicator, name[number]]]
call[name[self].announce, parameter[name[number]]]
return[name[number]] | keyword[def] identifier[bump] ( identifier[self] ):
literal[string]
identifier[values] = identifier[self] . identifier[client] . identifier[mget] ( identifier[self] . identifier[keys] . identifier[indicator] , identifier[self] . identifier[keys] . identifier[dispenser] )
identifier[indicator] , identifier[dispenser] = identifier[map] ( identifier[int] , identifier[values] )
identifier[numbers] = identifier[range] ( identifier[indicator] , identifier[dispenser] + literal[int] )
identifier[keys] =[ identifier[self] . identifier[keys] . identifier[key] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[numbers] ]
identifier[pairs] = identifier[zip] ( identifier[keys] , identifier[self] . identifier[client] . identifier[mget] (* identifier[keys] ))
keyword[try] :
identifier[number] = identifier[next] ( identifier[self] . identifier[keys] . identifier[number] ( identifier[key] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[pairs] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] )
keyword[except] :
identifier[number] = identifier[dispenser] + literal[int]
keyword[if] identifier[number] != identifier[indicator] :
identifier[self] . identifier[client] . identifier[set] ( identifier[self] . identifier[keys] . identifier[indicator] , identifier[number] )
identifier[self] . identifier[announce] ( identifier[number] )
keyword[return] identifier[number] | def bump(self):
""" Fix indicator in case of unnanounced departments. """
# read client
values = self.client.mget(self.keys.indicator, self.keys.dispenser)
(indicator, dispenser) = map(int, values)
# determine active users
numbers = range(indicator, dispenser + 1)
keys = [self.keys.key(n) for n in numbers]
pairs = zip(keys, self.client.mget(*keys))
try:
# determine number of first active user
number = next((self.keys.number(key) for (key, value) in pairs if value is not None)) # depends on [control=['try'], data=[]]
except:
# set number to next result of incr on dispenser
number = dispenser + 1 # depends on [control=['except'], data=[]]
# set indicator to it if necessary
if number != indicator:
self.client.set(self.keys.indicator, number) # depends on [control=['if'], data=['number']]
# announce and return it anyway
self.announce(number)
return number |
def has_task(self, task_instance):
"""
Checks if a task is either queued or running in this executor
:param task_instance: TaskInstance
:return: True if the task is known to this executor
"""
if task_instance.key in self.queued_tasks or task_instance.key in self.running:
return True | def function[has_task, parameter[self, task_instance]]:
constant[
Checks if a task is either queued or running in this executor
:param task_instance: TaskInstance
:return: True if the task is known to this executor
]
if <ast.BoolOp object at 0x7da18bccbd90> begin[:]
return[constant[True]] | keyword[def] identifier[has_task] ( identifier[self] , identifier[task_instance] ):
literal[string]
keyword[if] identifier[task_instance] . identifier[key] keyword[in] identifier[self] . identifier[queued_tasks] keyword[or] identifier[task_instance] . identifier[key] keyword[in] identifier[self] . identifier[running] :
keyword[return] keyword[True] | def has_task(self, task_instance):
"""
Checks if a task is either queued or running in this executor
:param task_instance: TaskInstance
:return: True if the task is known to this executor
"""
if task_instance.key in self.queued_tasks or task_instance.key in self.running:
return True # depends on [control=['if'], data=[]] |
def _gotitem(self, key, ndim, subset=None):
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : str / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
# create a new object to prevent aliasing
if subset is None:
subset = self.obj
self = self._shallow_copy(subset)
self._reset_cache()
if subset.ndim == 2:
if is_scalar(key) and key in subset or is_list_like(key):
self._selection = key
return self | def function[_gotitem, parameter[self, key, ndim, subset]]:
constant[
Sub-classes to define. Return a sliced object.
Parameters
----------
key : str / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
]
if compare[name[subset] is constant[None]] begin[:]
variable[subset] assign[=] name[self].obj
variable[self] assign[=] call[name[self]._shallow_copy, parameter[name[subset]]]
call[name[self]._reset_cache, parameter[]]
if compare[name[subset].ndim equal[==] constant[2]] begin[:]
if <ast.BoolOp object at 0x7da1b26ae5c0> begin[:]
name[self]._selection assign[=] name[key]
return[name[self]] | keyword[def] identifier[_gotitem] ( identifier[self] , identifier[key] , identifier[ndim] , identifier[subset] = keyword[None] ):
literal[string]
keyword[if] identifier[subset] keyword[is] keyword[None] :
identifier[subset] = identifier[self] . identifier[obj]
identifier[self] = identifier[self] . identifier[_shallow_copy] ( identifier[subset] )
identifier[self] . identifier[_reset_cache] ()
keyword[if] identifier[subset] . identifier[ndim] == literal[int] :
keyword[if] identifier[is_scalar] ( identifier[key] ) keyword[and] identifier[key] keyword[in] identifier[subset] keyword[or] identifier[is_list_like] ( identifier[key] ):
identifier[self] . identifier[_selection] = identifier[key]
keyword[return] identifier[self] | def _gotitem(self, key, ndim, subset=None):
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : str / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
# create a new object to prevent aliasing
if subset is None:
subset = self.obj # depends on [control=['if'], data=['subset']]
self = self._shallow_copy(subset)
self._reset_cache()
if subset.ndim == 2:
if is_scalar(key) and key in subset or is_list_like(key):
self._selection = key # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self |
def h(self):
r"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently.
"""
if np.size(self._h) > 1:
assert np.size(self._h) == self.n_modelparams
return self._h
else:
return self._h * np.ones(self.n_modelparams) | def function[h, parameter[self]]:
constant[
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently.
]
if compare[call[name[np].size, parameter[name[self]._h]] greater[>] constant[1]] begin[:]
assert[compare[call[name[np].size, parameter[name[self]._h]] equal[==] name[self].n_modelparams]]
return[name[self]._h] | keyword[def] identifier[h] ( identifier[self] ):
literal[string]
keyword[if] identifier[np] . identifier[size] ( identifier[self] . identifier[_h] )> literal[int] :
keyword[assert] identifier[np] . identifier[size] ( identifier[self] . identifier[_h] )== identifier[self] . identifier[n_modelparams]
keyword[return] identifier[self] . identifier[_h]
keyword[else] :
keyword[return] identifier[self] . identifier[_h] * identifier[np] . identifier[ones] ( identifier[self] . identifier[n_modelparams] ) | def h(self):
"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently.
"""
if np.size(self._h) > 1:
assert np.size(self._h) == self.n_modelparams
return self._h # depends on [control=['if'], data=[]]
else:
return self._h * np.ones(self.n_modelparams) |
def send_mass_card(self, group_or_users, card_id,
is_to_all=False, preview=False,
send_ignore_reprint=0, client_msg_id=None):
"""
群发卡券消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param card_id: 卡券 ID
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:param send_ignore_reprint: 指定待群发的文章被判定为转载时,是否继续群发。
当 send_ignore_reprint 参数设置为1时,文章被判定为转载时,且原创文允许转载时,将继续进行群发操作。
当 send_ignore_reprint 参数设置为0时,文章被判定为转载时,将停止群发操作。
send_ignore_reprint 默认为0。
:type send_ignore_reprint: int
:param client_msg_id: 开发者侧群发 msgid,长度限制 64 字节
:type client_msg_id: str
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'wxcard',
{
'wxcard': {
'card_id': card_id
}
},
is_to_all,
preview,
send_ignore_reprint,
client_msg_id,
) | def function[send_mass_card, parameter[self, group_or_users, card_id, is_to_all, preview, send_ignore_reprint, client_msg_id]]:
constant[
群发卡券消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param card_id: 卡券 ID
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:param send_ignore_reprint: 指定待群发的文章被判定为转载时,是否继续群发。
当 send_ignore_reprint 参数设置为1时,文章被判定为转载时,且原创文允许转载时,将继续进行群发操作。
当 send_ignore_reprint 参数设置为0时,文章被判定为转载时,将停止群发操作。
send_ignore_reprint 默认为0。
:type send_ignore_reprint: int
:param client_msg_id: 开发者侧群发 msgid,长度限制 64 字节
:type client_msg_id: str
:return: 返回的 JSON 数据包
]
return[call[name[self]._send_mass_message, parameter[name[group_or_users], constant[wxcard], dictionary[[<ast.Constant object at 0x7da20c7c9180>], [<ast.Dict object at 0x7da20c7c9c90>]], name[is_to_all], name[preview], name[send_ignore_reprint], name[client_msg_id]]]] | keyword[def] identifier[send_mass_card] ( identifier[self] , identifier[group_or_users] , identifier[card_id] ,
identifier[is_to_all] = keyword[False] , identifier[preview] = keyword[False] ,
identifier[send_ignore_reprint] = literal[int] , identifier[client_msg_id] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_send_mass_message] (
identifier[group_or_users] ,
literal[string] ,
{
literal[string] :{
literal[string] : identifier[card_id]
}
},
identifier[is_to_all] ,
identifier[preview] ,
identifier[send_ignore_reprint] ,
identifier[client_msg_id] ,
) | def send_mass_card(self, group_or_users, card_id, is_to_all=False, preview=False, send_ignore_reprint=0, client_msg_id=None):
"""
群发卡券消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param card_id: 卡券 ID
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:param send_ignore_reprint: 指定待群发的文章被判定为转载时,是否继续群发。
当 send_ignore_reprint 参数设置为1时,文章被判定为转载时,且原创文允许转载时,将继续进行群发操作。
当 send_ignore_reprint 参数设置为0时,文章被判定为转载时,将停止群发操作。
send_ignore_reprint 默认为0。
:type send_ignore_reprint: int
:param client_msg_id: 开发者侧群发 msgid,长度限制 64 字节
:type client_msg_id: str
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(group_or_users, 'wxcard', {'wxcard': {'card_id': card_id}}, is_to_all, preview, send_ignore_reprint, client_msg_id) |
def handle_one_response(self):
"""This function deals with *ONE INCOMING REQUEST* from the web.
It will wire and exchange message to the queues for long-polling
methods, otherwise, will stay alive for websockets.
"""
path = self.environ.get('PATH_INFO')
# Kick non-socket.io requests to our superclass
if not path.lstrip('/').startswith(self.server.resource + '/'):
return super(SocketIOHandler, self).handle_one_response()
self.status = None
self.headers_sent = False
self.result = None
self.response_length = 0
self.response_use_chunked = False
# This is analyzed for each and every HTTP requests involved
# in the Socket.IO protocol, whether long-running or long-polling
# (read: websocket or xhr-polling methods)
request_method = self.environ.get("REQUEST_METHOD")
request_tokens = self.RE_REQUEST_URL.match(path)
handshake_tokens = self.RE_HANDSHAKE_URL.match(path)
disconnect_tokens = self.RE_DISCONNECT_URL.match(path)
if handshake_tokens:
# Deal with first handshake here, create the Socket and push
# the config up.
return self._do_handshake(handshake_tokens.groupdict())
elif disconnect_tokens:
# it's a disconnect request via XHR
tokens = disconnect_tokens.groupdict()
elif request_tokens:
tokens = request_tokens.groupdict()
# and continue...
else:
# This is no socket.io request. Let the WSGI app handle it.
return super(SocketIOHandler, self).handle_one_response()
# Setup socket
sessid = tokens["sessid"]
socket = self.server.get_socket(sessid)
if not socket:
self.handle_bad_request()
return [] # Do not say the session is not found, just bad request
# so they don't start brute forcing to find open sessions
if self.environ['QUERY_STRING'].startswith('disconnect'):
# according to socket.io specs disconnect requests
# have a `disconnect` query string
# https://github.com/LearnBoost/socket.io-spec#forced-socket-disconnection
socket.disconnect()
self.handle_disconnect_request()
return []
# Setup transport
transport = self.handler_types.get(tokens["transport_id"])
# In case this is WebSocket request, switch to the WebSocketHandler
# FIXME: fix this ugly class change
old_class = None
if issubclass(transport, (transports.WebsocketTransport,
transports.FlashSocketTransport)):
old_class = self.__class__
self.__class__ = self.server.ws_handler_class
self.prevent_wsgi_call = True # thank you
# TODO: any errors, treat them ??
self.handle_one_response() # does the Websocket dance before we continue
# Make the socket object available for WSGI apps
self.environ['socketio'] = socket
# Create a transport and handle the request likewise
self.transport = transport(self, self.config)
# transports register their own spawn'd jobs now
self.transport.do_exchange(socket, request_method)
if not socket.connection_established:
# This is executed only on the *first* packet of the establishment
# of the virtual Socket connection.
socket.connection_established = True
socket.state = socket.STATE_CONNECTED
socket._spawn_heartbeat()
socket._spawn_watcher()
try:
# We'll run the WSGI app if it wasn't already done.
if socket.wsgi_app_greenlet is None:
# TODO: why don't we spawn a call to handle_one_response here ?
# why call directly the WSGI machinery ?
start_response = lambda status, headers, exc=None: None
socket.wsgi_app_greenlet = gevent.spawn(self.application,
self.environ,
start_response)
except:
self.handle_error(*sys.exc_info())
# we need to keep the connection open if we are an open socket
if tokens['transport_id'] in ['flashsocket', 'websocket']:
# wait here for all jobs to finished, when they are done
gevent.joinall(socket.jobs)
# Switch back to the old class so references to this don't use the
# incorrect class. Useful for debugging.
if old_class:
self.__class__ = old_class
# Clean up circular references so they can be garbage collected.
if hasattr(self, 'websocket') and self.websocket:
if hasattr(self.websocket, 'environ'):
del self.websocket.environ
del self.websocket
if self.environ:
del self.environ | def function[handle_one_response, parameter[self]]:
constant[This function deals with *ONE INCOMING REQUEST* from the web.
It will wire and exchange message to the queues for long-polling
methods, otherwise, will stay alive for websockets.
]
variable[path] assign[=] call[name[self].environ.get, parameter[constant[PATH_INFO]]]
if <ast.UnaryOp object at 0x7da1b01e6e90> begin[:]
return[call[call[name[super], parameter[name[SocketIOHandler], name[self]]].handle_one_response, parameter[]]]
name[self].status assign[=] constant[None]
name[self].headers_sent assign[=] constant[False]
name[self].result assign[=] constant[None]
name[self].response_length assign[=] constant[0]
name[self].response_use_chunked assign[=] constant[False]
variable[request_method] assign[=] call[name[self].environ.get, parameter[constant[REQUEST_METHOD]]]
variable[request_tokens] assign[=] call[name[self].RE_REQUEST_URL.match, parameter[name[path]]]
variable[handshake_tokens] assign[=] call[name[self].RE_HANDSHAKE_URL.match, parameter[name[path]]]
variable[disconnect_tokens] assign[=] call[name[self].RE_DISCONNECT_URL.match, parameter[name[path]]]
if name[handshake_tokens] begin[:]
return[call[name[self]._do_handshake, parameter[call[name[handshake_tokens].groupdict, parameter[]]]]]
variable[sessid] assign[=] call[name[tokens]][constant[sessid]]
variable[socket] assign[=] call[name[self].server.get_socket, parameter[name[sessid]]]
if <ast.UnaryOp object at 0x7da1b01e6440> begin[:]
call[name[self].handle_bad_request, parameter[]]
return[list[[]]]
if call[call[name[self].environ][constant[QUERY_STRING]].startswith, parameter[constant[disconnect]]] begin[:]
call[name[socket].disconnect, parameter[]]
call[name[self].handle_disconnect_request, parameter[]]
return[list[[]]]
variable[transport] assign[=] call[name[self].handler_types.get, parameter[call[name[tokens]][constant[transport_id]]]]
variable[old_class] assign[=] constant[None]
if call[name[issubclass], parameter[name[transport], tuple[[<ast.Attribute object at 0x7da1b01e6ce0>, <ast.Attribute object at 0x7da1b01e5a80>]]]] begin[:]
variable[old_class] assign[=] name[self].__class__
name[self].__class__ assign[=] name[self].server.ws_handler_class
name[self].prevent_wsgi_call assign[=] constant[True]
call[name[self].handle_one_response, parameter[]]
call[name[self].environ][constant[socketio]] assign[=] name[socket]
name[self].transport assign[=] call[name[transport], parameter[name[self], name[self].config]]
call[name[self].transport.do_exchange, parameter[name[socket], name[request_method]]]
if <ast.UnaryOp object at 0x7da1b01e6c20> begin[:]
name[socket].connection_established assign[=] constant[True]
name[socket].state assign[=] name[socket].STATE_CONNECTED
call[name[socket]._spawn_heartbeat, parameter[]]
call[name[socket]._spawn_watcher, parameter[]]
<ast.Try object at 0x7da1b01e4c40>
if compare[call[name[tokens]][constant[transport_id]] in list[[<ast.Constant object at 0x7da1b0063df0>, <ast.Constant object at 0x7da1b0063640>]]] begin[:]
call[name[gevent].joinall, parameter[name[socket].jobs]]
if name[old_class] begin[:]
name[self].__class__ assign[=] name[old_class]
if <ast.BoolOp object at 0x7da1b00623b0> begin[:]
if call[name[hasattr], parameter[name[self].websocket, constant[environ]]] begin[:]
<ast.Delete object at 0x7da1b00611e0>
<ast.Delete object at 0x7da1b0063550>
if name[self].environ begin[:]
<ast.Delete object at 0x7da1b0062ad0> | keyword[def] identifier[handle_one_response] ( identifier[self] ):
literal[string]
identifier[path] = identifier[self] . identifier[environ] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[path] . identifier[lstrip] ( literal[string] ). identifier[startswith] ( identifier[self] . identifier[server] . identifier[resource] + literal[string] ):
keyword[return] identifier[super] ( identifier[SocketIOHandler] , identifier[self] ). identifier[handle_one_response] ()
identifier[self] . identifier[status] = keyword[None]
identifier[self] . identifier[headers_sent] = keyword[False]
identifier[self] . identifier[result] = keyword[None]
identifier[self] . identifier[response_length] = literal[int]
identifier[self] . identifier[response_use_chunked] = keyword[False]
identifier[request_method] = identifier[self] . identifier[environ] . identifier[get] ( literal[string] )
identifier[request_tokens] = identifier[self] . identifier[RE_REQUEST_URL] . identifier[match] ( identifier[path] )
identifier[handshake_tokens] = identifier[self] . identifier[RE_HANDSHAKE_URL] . identifier[match] ( identifier[path] )
identifier[disconnect_tokens] = identifier[self] . identifier[RE_DISCONNECT_URL] . identifier[match] ( identifier[path] )
keyword[if] identifier[handshake_tokens] :
keyword[return] identifier[self] . identifier[_do_handshake] ( identifier[handshake_tokens] . identifier[groupdict] ())
keyword[elif] identifier[disconnect_tokens] :
identifier[tokens] = identifier[disconnect_tokens] . identifier[groupdict] ()
keyword[elif] identifier[request_tokens] :
identifier[tokens] = identifier[request_tokens] . identifier[groupdict] ()
keyword[else] :
keyword[return] identifier[super] ( identifier[SocketIOHandler] , identifier[self] ). identifier[handle_one_response] ()
identifier[sessid] = identifier[tokens] [ literal[string] ]
identifier[socket] = identifier[self] . identifier[server] . identifier[get_socket] ( identifier[sessid] )
keyword[if] keyword[not] identifier[socket] :
identifier[self] . identifier[handle_bad_request] ()
keyword[return] []
keyword[if] identifier[self] . identifier[environ] [ literal[string] ]. identifier[startswith] ( literal[string] ):
identifier[socket] . identifier[disconnect] ()
identifier[self] . identifier[handle_disconnect_request] ()
keyword[return] []
identifier[transport] = identifier[self] . identifier[handler_types] . identifier[get] ( identifier[tokens] [ literal[string] ])
identifier[old_class] = keyword[None]
keyword[if] identifier[issubclass] ( identifier[transport] ,( identifier[transports] . identifier[WebsocketTransport] ,
identifier[transports] . identifier[FlashSocketTransport] )):
identifier[old_class] = identifier[self] . identifier[__class__]
identifier[self] . identifier[__class__] = identifier[self] . identifier[server] . identifier[ws_handler_class]
identifier[self] . identifier[prevent_wsgi_call] = keyword[True]
identifier[self] . identifier[handle_one_response] ()
identifier[self] . identifier[environ] [ literal[string] ]= identifier[socket]
identifier[self] . identifier[transport] = identifier[transport] ( identifier[self] , identifier[self] . identifier[config] )
identifier[self] . identifier[transport] . identifier[do_exchange] ( identifier[socket] , identifier[request_method] )
keyword[if] keyword[not] identifier[socket] . identifier[connection_established] :
identifier[socket] . identifier[connection_established] = keyword[True]
identifier[socket] . identifier[state] = identifier[socket] . identifier[STATE_CONNECTED]
identifier[socket] . identifier[_spawn_heartbeat] ()
identifier[socket] . identifier[_spawn_watcher] ()
keyword[try] :
keyword[if] identifier[socket] . identifier[wsgi_app_greenlet] keyword[is] keyword[None] :
identifier[start_response] = keyword[lambda] identifier[status] , identifier[headers] , identifier[exc] = keyword[None] : keyword[None]
identifier[socket] . identifier[wsgi_app_greenlet] = identifier[gevent] . identifier[spawn] ( identifier[self] . identifier[application] ,
identifier[self] . identifier[environ] ,
identifier[start_response] )
keyword[except] :
identifier[self] . identifier[handle_error] (* identifier[sys] . identifier[exc_info] ())
keyword[if] identifier[tokens] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]:
identifier[gevent] . identifier[joinall] ( identifier[socket] . identifier[jobs] )
keyword[if] identifier[old_class] :
identifier[self] . identifier[__class__] = identifier[old_class]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[websocket] :
keyword[if] identifier[hasattr] ( identifier[self] . identifier[websocket] , literal[string] ):
keyword[del] identifier[self] . identifier[websocket] . identifier[environ]
keyword[del] identifier[self] . identifier[websocket]
keyword[if] identifier[self] . identifier[environ] :
keyword[del] identifier[self] . identifier[environ] | def handle_one_response(self):
"""This function deals with *ONE INCOMING REQUEST* from the web.
It will wire and exchange message to the queues for long-polling
methods, otherwise, will stay alive for websockets.
"""
path = self.environ.get('PATH_INFO')
# Kick non-socket.io requests to our superclass
if not path.lstrip('/').startswith(self.server.resource + '/'):
return super(SocketIOHandler, self).handle_one_response() # depends on [control=['if'], data=[]]
self.status = None
self.headers_sent = False
self.result = None
self.response_length = 0
self.response_use_chunked = False
# This is analyzed for each and every HTTP requests involved
# in the Socket.IO protocol, whether long-running or long-polling
# (read: websocket or xhr-polling methods)
request_method = self.environ.get('REQUEST_METHOD')
request_tokens = self.RE_REQUEST_URL.match(path)
handshake_tokens = self.RE_HANDSHAKE_URL.match(path)
disconnect_tokens = self.RE_DISCONNECT_URL.match(path)
if handshake_tokens:
# Deal with first handshake here, create the Socket and push
# the config up.
return self._do_handshake(handshake_tokens.groupdict()) # depends on [control=['if'], data=[]]
elif disconnect_tokens:
# it's a disconnect request via XHR
tokens = disconnect_tokens.groupdict() # depends on [control=['if'], data=[]]
elif request_tokens:
tokens = request_tokens.groupdict() # depends on [control=['if'], data=[]]
else:
# and continue...
# This is no socket.io request. Let the WSGI app handle it.
return super(SocketIOHandler, self).handle_one_response()
# Setup socket
sessid = tokens['sessid']
socket = self.server.get_socket(sessid)
if not socket:
self.handle_bad_request()
return [] # Do not say the session is not found, just bad request # depends on [control=['if'], data=[]]
# so they don't start brute forcing to find open sessions
if self.environ['QUERY_STRING'].startswith('disconnect'):
# according to socket.io specs disconnect requests
# have a `disconnect` query string
# https://github.com/LearnBoost/socket.io-spec#forced-socket-disconnection
socket.disconnect()
self.handle_disconnect_request()
return [] # depends on [control=['if'], data=[]]
# Setup transport
transport = self.handler_types.get(tokens['transport_id'])
# In case this is WebSocket request, switch to the WebSocketHandler
# FIXME: fix this ugly class change
old_class = None
if issubclass(transport, (transports.WebsocketTransport, transports.FlashSocketTransport)):
old_class = self.__class__
self.__class__ = self.server.ws_handler_class
self.prevent_wsgi_call = True # thank you
# TODO: any errors, treat them ??
self.handle_one_response() # does the Websocket dance before we continue # depends on [control=['if'], data=[]]
# Make the socket object available for WSGI apps
self.environ['socketio'] = socket
# Create a transport and handle the request likewise
self.transport = transport(self, self.config)
# transports register their own spawn'd jobs now
self.transport.do_exchange(socket, request_method)
if not socket.connection_established:
# This is executed only on the *first* packet of the establishment
# of the virtual Socket connection.
socket.connection_established = True
socket.state = socket.STATE_CONNECTED
socket._spawn_heartbeat()
socket._spawn_watcher()
try:
# We'll run the WSGI app if it wasn't already done.
if socket.wsgi_app_greenlet is None:
# TODO: why don't we spawn a call to handle_one_response here ?
# why call directly the WSGI machinery ?
start_response = lambda status, headers, exc=None: None
socket.wsgi_app_greenlet = gevent.spawn(self.application, self.environ, start_response) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
self.handle_error(*sys.exc_info()) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# we need to keep the connection open if we are an open socket
if tokens['transport_id'] in ['flashsocket', 'websocket']:
# wait here for all jobs to finished, when they are done
gevent.joinall(socket.jobs) # depends on [control=['if'], data=[]]
# Switch back to the old class so references to this don't use the
# incorrect class. Useful for debugging.
if old_class:
self.__class__ = old_class # depends on [control=['if'], data=[]]
# Clean up circular references so they can be garbage collected.
if hasattr(self, 'websocket') and self.websocket:
if hasattr(self.websocket, 'environ'):
del self.websocket.environ # depends on [control=['if'], data=[]]
del self.websocket # depends on [control=['if'], data=[]]
if self.environ:
del self.environ # depends on [control=['if'], data=[]] |
def set_as_object(self, value):
"""
Sets a new value to map element
:param value: a new element or map value.
"""
self.clear()
map = MapConverter.to_map(value)
self.append(map) | def function[set_as_object, parameter[self, value]]:
constant[
Sets a new value to map element
:param value: a new element or map value.
]
call[name[self].clear, parameter[]]
variable[map] assign[=] call[name[MapConverter].to_map, parameter[name[value]]]
call[name[self].append, parameter[name[map]]] | keyword[def] identifier[set_as_object] ( identifier[self] , identifier[value] ):
literal[string]
identifier[self] . identifier[clear] ()
identifier[map] = identifier[MapConverter] . identifier[to_map] ( identifier[value] )
identifier[self] . identifier[append] ( identifier[map] ) | def set_as_object(self, value):
"""
Sets a new value to map element
:param value: a new element or map value.
"""
self.clear()
map = MapConverter.to_map(value)
self.append(map) |
def scale_lim(lim,factor=1.05):
r'''
Scale limits to be 5% wider, to have a nice plot.
:arguments:
**lim** (``<list>`` | ``<str>``)
The limits. May be a string "[...,...]", which is converted to a list.
:options:
**factor** ([``1.05``] | ``<float>``)
Scale factor.
'''
# convert string "[...,...]"
if type(lim) == str: lim = eval(lim)
# scale limits
D = lim[1] - lim[0]
lim[0] -= (factor-1.)/2. * D
lim[1] += (factor-1.)/2. * D
return lim | def function[scale_lim, parameter[lim, factor]]:
constant[
Scale limits to be 5% wider, to have a nice plot.
:arguments:
**lim** (``<list>`` | ``<str>``)
The limits. May be a string "[...,...]", which is converted to a list.
:options:
**factor** ([``1.05``] | ``<float>``)
Scale factor.
]
if compare[call[name[type], parameter[name[lim]]] equal[==] name[str]] begin[:]
variable[lim] assign[=] call[name[eval], parameter[name[lim]]]
variable[D] assign[=] binary_operation[call[name[lim]][constant[1]] - call[name[lim]][constant[0]]]
<ast.AugAssign object at 0x7da1b20d6710>
<ast.AugAssign object at 0x7da1b20d62f0>
return[name[lim]] | keyword[def] identifier[scale_lim] ( identifier[lim] , identifier[factor] = literal[int] ):
literal[string]
keyword[if] identifier[type] ( identifier[lim] )== identifier[str] : identifier[lim] = identifier[eval] ( identifier[lim] )
identifier[D] = identifier[lim] [ literal[int] ]- identifier[lim] [ literal[int] ]
identifier[lim] [ literal[int] ]-=( identifier[factor] - literal[int] )/ literal[int] * identifier[D]
identifier[lim] [ literal[int] ]+=( identifier[factor] - literal[int] )/ literal[int] * identifier[D]
keyword[return] identifier[lim] | def scale_lim(lim, factor=1.05):
"""
Scale limits to be 5% wider, to have a nice plot.
:arguments:
**lim** (``<list>`` | ``<str>``)
The limits. May be a string "[...,...]", which is converted to a list.
:options:
**factor** ([``1.05``] | ``<float>``)
Scale factor.
"""
# convert string "[...,...]"
if type(lim) == str:
lim = eval(lim) # depends on [control=['if'], data=[]]
# scale limits
D = lim[1] - lim[0]
lim[0] -= (factor - 1.0) / 2.0 * D
lim[1] += (factor - 1.0) / 2.0 * D
return lim |
def build_year(self, dt):
"""
Build the page for the provided year.
"""
self.year = str(dt.year)
logger.debug("Building %s" % self.year)
self.request = self.create_request(self.get_url())
target_path = self.get_build_path()
self.build_file(target_path, self.get_content()) | def function[build_year, parameter[self, dt]]:
constant[
Build the page for the provided year.
]
name[self].year assign[=] call[name[str], parameter[name[dt].year]]
call[name[logger].debug, parameter[binary_operation[constant[Building %s] <ast.Mod object at 0x7da2590d6920> name[self].year]]]
name[self].request assign[=] call[name[self].create_request, parameter[call[name[self].get_url, parameter[]]]]
variable[target_path] assign[=] call[name[self].get_build_path, parameter[]]
call[name[self].build_file, parameter[name[target_path], call[name[self].get_content, parameter[]]]] | keyword[def] identifier[build_year] ( identifier[self] , identifier[dt] ):
literal[string]
identifier[self] . identifier[year] = identifier[str] ( identifier[dt] . identifier[year] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[self] . identifier[year] )
identifier[self] . identifier[request] = identifier[self] . identifier[create_request] ( identifier[self] . identifier[get_url] ())
identifier[target_path] = identifier[self] . identifier[get_build_path] ()
identifier[self] . identifier[build_file] ( identifier[target_path] , identifier[self] . identifier[get_content] ()) | def build_year(self, dt):
"""
Build the page for the provided year.
"""
self.year = str(dt.year)
logger.debug('Building %s' % self.year)
self.request = self.create_request(self.get_url())
target_path = self.get_build_path()
self.build_file(target_path, self.get_content()) |
def safe_unicode(value):
""" Returns:
* a `unicode` instance in Python 2.x, or
* a `str` instance in Python 3.x.
"""
if sys.version_info < (3,0):
if isinstance(value, str):
return value.decode('utf-8')
else:
return unicode(value)
else:
return str(value) | def function[safe_unicode, parameter[value]]:
constant[ Returns:
* a `unicode` instance in Python 2.x, or
* a `str` instance in Python 3.x.
]
if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b2331270>, <ast.Constant object at 0x7da1b2330e20>]]] begin[:]
if call[name[isinstance], parameter[name[value], name[str]]] begin[:]
return[call[name[value].decode, parameter[constant[utf-8]]]] | keyword[def] identifier[safe_unicode] ( identifier[value] ):
literal[string]
keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ):
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ):
keyword[return] identifier[value] . identifier[decode] ( literal[string] )
keyword[else] :
keyword[return] identifier[unicode] ( identifier[value] )
keyword[else] :
keyword[return] identifier[str] ( identifier[value] ) | def safe_unicode(value):
""" Returns:
* a `unicode` instance in Python 2.x, or
* a `str` instance in Python 3.x.
"""
if sys.version_info < (3, 0):
if isinstance(value, str):
return value.decode('utf-8') # depends on [control=['if'], data=[]]
else:
return unicode(value) # depends on [control=['if'], data=[]]
else:
return str(value) |
def cli(env, context_id, static_ip, remote_ip, note):
"""Add an address translation to an IPSEC tunnel context.
A separate configuration request should be made to realize changes on
network devices.
"""
manager = SoftLayer.IPSECManager(env.client)
# ensure context can be retrieved by given id
manager.get_tunnel_context(context_id)
translation = manager.create_translation(context_id,
static_ip=static_ip,
remote_ip=remote_ip,
notes=note)
env.out('Created translation from {} to {} #{}'
.format(static_ip, remote_ip, translation['id'])) | def function[cli, parameter[env, context_id, static_ip, remote_ip, note]]:
constant[Add an address translation to an IPSEC tunnel context.
A separate configuration request should be made to realize changes on
network devices.
]
variable[manager] assign[=] call[name[SoftLayer].IPSECManager, parameter[name[env].client]]
call[name[manager].get_tunnel_context, parameter[name[context_id]]]
variable[translation] assign[=] call[name[manager].create_translation, parameter[name[context_id]]]
call[name[env].out, parameter[call[constant[Created translation from {} to {} #{}].format, parameter[name[static_ip], name[remote_ip], call[name[translation]][constant[id]]]]]] | keyword[def] identifier[cli] ( identifier[env] , identifier[context_id] , identifier[static_ip] , identifier[remote_ip] , identifier[note] ):
literal[string]
identifier[manager] = identifier[SoftLayer] . identifier[IPSECManager] ( identifier[env] . identifier[client] )
identifier[manager] . identifier[get_tunnel_context] ( identifier[context_id] )
identifier[translation] = identifier[manager] . identifier[create_translation] ( identifier[context_id] ,
identifier[static_ip] = identifier[static_ip] ,
identifier[remote_ip] = identifier[remote_ip] ,
identifier[notes] = identifier[note] )
identifier[env] . identifier[out] ( literal[string]
. identifier[format] ( identifier[static_ip] , identifier[remote_ip] , identifier[translation] [ literal[string] ])) | def cli(env, context_id, static_ip, remote_ip, note):
"""Add an address translation to an IPSEC tunnel context.
A separate configuration request should be made to realize changes on
network devices.
"""
manager = SoftLayer.IPSECManager(env.client)
# ensure context can be retrieved by given id
manager.get_tunnel_context(context_id)
translation = manager.create_translation(context_id, static_ip=static_ip, remote_ip=remote_ip, notes=note)
env.out('Created translation from {} to {} #{}'.format(static_ip, remote_ip, translation['id'])) |
def unique_combs(df):
"""
Return data frame with all possible combinations
of the values in the columns
"""
# List of unique values from every column
lst = (x.unique() for x in (df[c] for c in df))
rows = list(itertools.product(*lst))
_df = pd.DataFrame(rows, columns=df.columns)
# preserve the column dtypes
for col in df:
_df[col] = _df[col].astype(df[col].dtype, copy=False)
return _df | def function[unique_combs, parameter[df]]:
constant[
Return data frame with all possible combinations
of the values in the columns
]
variable[lst] assign[=] <ast.GeneratorExp object at 0x7da207f007f0>
variable[rows] assign[=] call[name[list], parameter[call[name[itertools].product, parameter[<ast.Starred object at 0x7da204566620>]]]]
variable[_df] assign[=] call[name[pd].DataFrame, parameter[name[rows]]]
for taget[name[col]] in starred[name[df]] begin[:]
call[name[_df]][name[col]] assign[=] call[call[name[_df]][name[col]].astype, parameter[call[name[df]][name[col]].dtype]]
return[name[_df]] | keyword[def] identifier[unique_combs] ( identifier[df] ):
literal[string]
identifier[lst] =( identifier[x] . identifier[unique] () keyword[for] identifier[x] keyword[in] ( identifier[df] [ identifier[c] ] keyword[for] identifier[c] keyword[in] identifier[df] ))
identifier[rows] = identifier[list] ( identifier[itertools] . identifier[product] (* identifier[lst] ))
identifier[_df] = identifier[pd] . identifier[DataFrame] ( identifier[rows] , identifier[columns] = identifier[df] . identifier[columns] )
keyword[for] identifier[col] keyword[in] identifier[df] :
identifier[_df] [ identifier[col] ]= identifier[_df] [ identifier[col] ]. identifier[astype] ( identifier[df] [ identifier[col] ]. identifier[dtype] , identifier[copy] = keyword[False] )
keyword[return] identifier[_df] | def unique_combs(df):
"""
Return data frame with all possible combinations
of the values in the columns
"""
# List of unique values from every column
lst = (x.unique() for x in (df[c] for c in df))
rows = list(itertools.product(*lst))
_df = pd.DataFrame(rows, columns=df.columns)
# preserve the column dtypes
for col in df:
_df[col] = _df[col].astype(df[col].dtype, copy=False) # depends on [control=['for'], data=['col']]
return _df |
def generate_table_results(output=None, without_header=None):
"""Convert returned data from non-list actions into a nice table for command line usage"""
array = []
str_output = str(output)
if not without_header:
array.append('RESULT')
array.append('-' * max(6, len(str_output)))
array.append(str_output)
return os.linesep.join(array) | def function[generate_table_results, parameter[output, without_header]]:
constant[Convert returned data from non-list actions into a nice table for command line usage]
variable[array] assign[=] list[[]]
variable[str_output] assign[=] call[name[str], parameter[name[output]]]
if <ast.UnaryOp object at 0x7da1b1d35240> begin[:]
call[name[array].append, parameter[constant[RESULT]]]
call[name[array].append, parameter[binary_operation[constant[-] * call[name[max], parameter[constant[6], call[name[len], parameter[name[str_output]]]]]]]]
call[name[array].append, parameter[name[str_output]]]
return[call[name[os].linesep.join, parameter[name[array]]]] | keyword[def] identifier[generate_table_results] ( identifier[output] = keyword[None] , identifier[without_header] = keyword[None] ):
literal[string]
identifier[array] =[]
identifier[str_output] = identifier[str] ( identifier[output] )
keyword[if] keyword[not] identifier[without_header] :
identifier[array] . identifier[append] ( literal[string] )
identifier[array] . identifier[append] ( literal[string] * identifier[max] ( literal[int] , identifier[len] ( identifier[str_output] )))
identifier[array] . identifier[append] ( identifier[str_output] )
keyword[return] identifier[os] . identifier[linesep] . identifier[join] ( identifier[array] ) | def generate_table_results(output=None, without_header=None):
"""Convert returned data from non-list actions into a nice table for command line usage"""
array = []
str_output = str(output)
if not without_header:
array.append('RESULT')
array.append('-' * max(6, len(str_output))) # depends on [control=['if'], data=[]]
array.append(str_output)
return os.linesep.join(array) |
def get_stp_mst_detail_output_cist_port_configured_root_guard(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_stp_mst_detail = ET.Element("get_stp_mst_detail")
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, "output")
cist = ET.SubElement(output, "cist")
port = ET.SubElement(cist, "port")
configured_root_guard = ET.SubElement(port, "configured-root-guard")
configured_root_guard.text = kwargs.pop('configured_root_guard')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_stp_mst_detail_output_cist_port_configured_root_guard, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_stp_mst_detail] assign[=] call[name[ET].Element, parameter[constant[get_stp_mst_detail]]]
variable[config] assign[=] name[get_stp_mst_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_stp_mst_detail], constant[output]]]
variable[cist] assign[=] call[name[ET].SubElement, parameter[name[output], constant[cist]]]
variable[port] assign[=] call[name[ET].SubElement, parameter[name[cist], constant[port]]]
variable[configured_root_guard] assign[=] call[name[ET].SubElement, parameter[name[port], constant[configured-root-guard]]]
name[configured_root_guard].text assign[=] call[name[kwargs].pop, parameter[constant[configured_root_guard]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_stp_mst_detail_output_cist_port_configured_root_guard] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_stp_mst_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_stp_mst_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_stp_mst_detail] , literal[string] )
identifier[cist] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[port] = identifier[ET] . identifier[SubElement] ( identifier[cist] , literal[string] )
identifier[configured_root_guard] = identifier[ET] . identifier[SubElement] ( identifier[port] , literal[string] )
identifier[configured_root_guard] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_stp_mst_detail_output_cist_port_configured_root_guard(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_stp_mst_detail = ET.Element('get_stp_mst_detail')
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, 'output')
cist = ET.SubElement(output, 'cist')
port = ET.SubElement(cist, 'port')
configured_root_guard = ET.SubElement(port, 'configured-root-guard')
configured_root_guard.text = kwargs.pop('configured_root_guard')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def check_statement(self, stmt, max_paths=1, max_path_length=5):
"""Check a single Statement against the model.
Parameters
----------
stmt : indra.statements.Statement
The Statement to check.
max_paths : Optional[int]
The maximum number of specific paths to return for each Statement
to be explained. Default: 1
max_path_length : Optional[int]
The maximum length of specific paths to return. Default: 5
Returns
-------
boolean
True if the model satisfies the Statement.
"""
# Make sure the influence map is initialized
self.get_im()
# Check if this is one of the statement types that we can check
if not isinstance(stmt, (Modification, RegulateAmount,
RegulateActivity, Influence)):
return PathResult(False, 'STATEMENT_TYPE_NOT_HANDLED',
max_paths, max_path_length)
# Get the polarity for the statement
if isinstance(stmt, Modification):
target_polarity = -1 if isinstance(stmt, RemoveModification) else 1
elif isinstance(stmt, RegulateActivity):
target_polarity = 1 if stmt.is_activation else -1
elif isinstance(stmt, RegulateAmount):
target_polarity = -1 if isinstance(stmt, DecreaseAmount) else 1
elif isinstance(stmt, Influence):
target_polarity = -1 if stmt.overall_polarity() == -1 else 1
# Get the subject and object (works also for Modifications)
subj, obj = stmt.agent_list()
# Get a list of monomer patterns matching the subject FIXME Currently
# this will match rules with the corresponding monomer pattern on it.
# In future, this statement should (possibly) also match rules in which
# 1) the agent is in its active form, or 2) the agent is tagged as the
# enzyme in a rule of the appropriate activity (e.g., a phosphorylation
# rule) FIXME
if subj is not None:
subj_mps = list(pa.grounded_monomer_patterns(self.model, subj,
ignore_activities=True))
if not subj_mps:
logger.debug('No monomers found corresponding to agent %s' %
subj)
return PathResult(False, 'SUBJECT_MONOMERS_NOT_FOUND',
max_paths, max_path_length)
else:
subj_mps = [None]
# Observables may not be found for an activation since there may be no
# rule in the model activating the object, and the object may not have
# an "active" site of the appropriate type
obs_names = self.stmt_to_obs[stmt]
if not obs_names:
logger.debug("No observables for stmt %s, returning False" % stmt)
return PathResult(False, 'OBSERVABLES_NOT_FOUND',
max_paths, max_path_length)
for subj_mp, obs_name in itertools.product(subj_mps, obs_names):
# NOTE: Returns on the path found for the first enz_mp/obs combo
result = self._find_im_paths(subj_mp, obs_name, target_polarity,
max_paths, max_path_length)
# If a path was found, then we return it; otherwise, that means
# there was no path for this observable, so we have to try the next
# one
if result.path_found:
return result
# If we got here, then there was no path for any observable
return PathResult(False, 'NO_PATHS_FOUND',
max_paths, max_path_length) | def function[check_statement, parameter[self, stmt, max_paths, max_path_length]]:
constant[Check a single Statement against the model.
Parameters
----------
stmt : indra.statements.Statement
The Statement to check.
max_paths : Optional[int]
The maximum number of specific paths to return for each Statement
to be explained. Default: 1
max_path_length : Optional[int]
The maximum length of specific paths to return. Default: 5
Returns
-------
boolean
True if the model satisfies the Statement.
]
call[name[self].get_im, parameter[]]
if <ast.UnaryOp object at 0x7da2041d9e40> begin[:]
return[call[name[PathResult], parameter[constant[False], constant[STATEMENT_TYPE_NOT_HANDLED], name[max_paths], name[max_path_length]]]]
if call[name[isinstance], parameter[name[stmt], name[Modification]]] begin[:]
variable[target_polarity] assign[=] <ast.IfExp object at 0x7da2041d9d20>
<ast.Tuple object at 0x7da2041db6a0> assign[=] call[name[stmt].agent_list, parameter[]]
if compare[name[subj] is_not constant[None]] begin[:]
variable[subj_mps] assign[=] call[name[list], parameter[call[name[pa].grounded_monomer_patterns, parameter[name[self].model, name[subj]]]]]
if <ast.UnaryOp object at 0x7da2041d9cf0> begin[:]
call[name[logger].debug, parameter[binary_operation[constant[No monomers found corresponding to agent %s] <ast.Mod object at 0x7da2590d6920> name[subj]]]]
return[call[name[PathResult], parameter[constant[False], constant[SUBJECT_MONOMERS_NOT_FOUND], name[max_paths], name[max_path_length]]]]
variable[obs_names] assign[=] call[name[self].stmt_to_obs][name[stmt]]
if <ast.UnaryOp object at 0x7da2044c1cf0> begin[:]
call[name[logger].debug, parameter[binary_operation[constant[No observables for stmt %s, returning False] <ast.Mod object at 0x7da2590d6920> name[stmt]]]]
return[call[name[PathResult], parameter[constant[False], constant[OBSERVABLES_NOT_FOUND], name[max_paths], name[max_path_length]]]]
for taget[tuple[[<ast.Name object at 0x7da2044c1720>, <ast.Name object at 0x7da2044c0e50>]]] in starred[call[name[itertools].product, parameter[name[subj_mps], name[obs_names]]]] begin[:]
variable[result] assign[=] call[name[self]._find_im_paths, parameter[name[subj_mp], name[obs_name], name[target_polarity], name[max_paths], name[max_path_length]]]
if name[result].path_found begin[:]
return[name[result]]
return[call[name[PathResult], parameter[constant[False], constant[NO_PATHS_FOUND], name[max_paths], name[max_path_length]]]] | keyword[def] identifier[check_statement] ( identifier[self] , identifier[stmt] , identifier[max_paths] = literal[int] , identifier[max_path_length] = literal[int] ):
literal[string]
identifier[self] . identifier[get_im] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[stmt] ,( identifier[Modification] , identifier[RegulateAmount] ,
identifier[RegulateActivity] , identifier[Influence] )):
keyword[return] identifier[PathResult] ( keyword[False] , literal[string] ,
identifier[max_paths] , identifier[max_path_length] )
keyword[if] identifier[isinstance] ( identifier[stmt] , identifier[Modification] ):
identifier[target_polarity] =- literal[int] keyword[if] identifier[isinstance] ( identifier[stmt] , identifier[RemoveModification] ) keyword[else] literal[int]
keyword[elif] identifier[isinstance] ( identifier[stmt] , identifier[RegulateActivity] ):
identifier[target_polarity] = literal[int] keyword[if] identifier[stmt] . identifier[is_activation] keyword[else] - literal[int]
keyword[elif] identifier[isinstance] ( identifier[stmt] , identifier[RegulateAmount] ):
identifier[target_polarity] =- literal[int] keyword[if] identifier[isinstance] ( identifier[stmt] , identifier[DecreaseAmount] ) keyword[else] literal[int]
keyword[elif] identifier[isinstance] ( identifier[stmt] , identifier[Influence] ):
identifier[target_polarity] =- literal[int] keyword[if] identifier[stmt] . identifier[overall_polarity] ()==- literal[int] keyword[else] literal[int]
identifier[subj] , identifier[obj] = identifier[stmt] . identifier[agent_list] ()
keyword[if] identifier[subj] keyword[is] keyword[not] keyword[None] :
identifier[subj_mps] = identifier[list] ( identifier[pa] . identifier[grounded_monomer_patterns] ( identifier[self] . identifier[model] , identifier[subj] ,
identifier[ignore_activities] = keyword[True] ))
keyword[if] keyword[not] identifier[subj_mps] :
identifier[logger] . identifier[debug] ( literal[string] %
identifier[subj] )
keyword[return] identifier[PathResult] ( keyword[False] , literal[string] ,
identifier[max_paths] , identifier[max_path_length] )
keyword[else] :
identifier[subj_mps] =[ keyword[None] ]
identifier[obs_names] = identifier[self] . identifier[stmt_to_obs] [ identifier[stmt] ]
keyword[if] keyword[not] identifier[obs_names] :
identifier[logger] . identifier[debug] ( literal[string] % identifier[stmt] )
keyword[return] identifier[PathResult] ( keyword[False] , literal[string] ,
identifier[max_paths] , identifier[max_path_length] )
keyword[for] identifier[subj_mp] , identifier[obs_name] keyword[in] identifier[itertools] . identifier[product] ( identifier[subj_mps] , identifier[obs_names] ):
identifier[result] = identifier[self] . identifier[_find_im_paths] ( identifier[subj_mp] , identifier[obs_name] , identifier[target_polarity] ,
identifier[max_paths] , identifier[max_path_length] )
keyword[if] identifier[result] . identifier[path_found] :
keyword[return] identifier[result]
keyword[return] identifier[PathResult] ( keyword[False] , literal[string] ,
identifier[max_paths] , identifier[max_path_length] ) | def check_statement(self, stmt, max_paths=1, max_path_length=5):
"""Check a single Statement against the model.
Parameters
----------
stmt : indra.statements.Statement
The Statement to check.
max_paths : Optional[int]
The maximum number of specific paths to return for each Statement
to be explained. Default: 1
max_path_length : Optional[int]
The maximum length of specific paths to return. Default: 5
Returns
-------
boolean
True if the model satisfies the Statement.
"""
# Make sure the influence map is initialized
self.get_im()
# Check if this is one of the statement types that we can check
if not isinstance(stmt, (Modification, RegulateAmount, RegulateActivity, Influence)):
return PathResult(False, 'STATEMENT_TYPE_NOT_HANDLED', max_paths, max_path_length) # depends on [control=['if'], data=[]]
# Get the polarity for the statement
if isinstance(stmt, Modification):
target_polarity = -1 if isinstance(stmt, RemoveModification) else 1 # depends on [control=['if'], data=[]]
elif isinstance(stmt, RegulateActivity):
target_polarity = 1 if stmt.is_activation else -1 # depends on [control=['if'], data=[]]
elif isinstance(stmt, RegulateAmount):
target_polarity = -1 if isinstance(stmt, DecreaseAmount) else 1 # depends on [control=['if'], data=[]]
elif isinstance(stmt, Influence):
target_polarity = -1 if stmt.overall_polarity() == -1 else 1 # depends on [control=['if'], data=[]]
# Get the subject and object (works also for Modifications)
(subj, obj) = stmt.agent_list()
# Get a list of monomer patterns matching the subject FIXME Currently
# this will match rules with the corresponding monomer pattern on it.
# In future, this statement should (possibly) also match rules in which
# 1) the agent is in its active form, or 2) the agent is tagged as the
# enzyme in a rule of the appropriate activity (e.g., a phosphorylation
# rule) FIXME
if subj is not None:
subj_mps = list(pa.grounded_monomer_patterns(self.model, subj, ignore_activities=True))
if not subj_mps:
logger.debug('No monomers found corresponding to agent %s' % subj)
return PathResult(False, 'SUBJECT_MONOMERS_NOT_FOUND', max_paths, max_path_length) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['subj']]
else:
subj_mps = [None]
# Observables may not be found for an activation since there may be no
# rule in the model activating the object, and the object may not have
# an "active" site of the appropriate type
obs_names = self.stmt_to_obs[stmt]
if not obs_names:
logger.debug('No observables for stmt %s, returning False' % stmt)
return PathResult(False, 'OBSERVABLES_NOT_FOUND', max_paths, max_path_length) # depends on [control=['if'], data=[]]
for (subj_mp, obs_name) in itertools.product(subj_mps, obs_names):
# NOTE: Returns on the path found for the first enz_mp/obs combo
result = self._find_im_paths(subj_mp, obs_name, target_polarity, max_paths, max_path_length)
# If a path was found, then we return it; otherwise, that means
# there was no path for this observable, so we have to try the next
# one
if result.path_found:
return result # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# If we got here, then there was no path for any observable
return PathResult(False, 'NO_PATHS_FOUND', max_paths, max_path_length) |
def delete(
self
):
"""*delete a project from the document*
**Return:**
- None
**Usage:**
.. code-block:: python
myProject.delete()
"""
projectTitle = self.title
theseProjects = self.parent.projects[:]
for p in theseProjects:
if p.title == projectTitle:
theseProjects.remove(p)
break
self.parent.projects = theseProjects
doc = self
while doc:
if not doc.parent:
break
else:
doc = doc.parent
doc.content = doc.to_string(indentLevel=0, title=False)
return None | def function[delete, parameter[self]]:
constant[*delete a project from the document*
**Return:**
- None
**Usage:**
.. code-block:: python
myProject.delete()
]
variable[projectTitle] assign[=] name[self].title
variable[theseProjects] assign[=] call[name[self].parent.projects][<ast.Slice object at 0x7da1b16ab5b0>]
for taget[name[p]] in starred[name[theseProjects]] begin[:]
if compare[name[p].title equal[==] name[projectTitle]] begin[:]
call[name[theseProjects].remove, parameter[name[p]]]
break
name[self].parent.projects assign[=] name[theseProjects]
variable[doc] assign[=] name[self]
while name[doc] begin[:]
if <ast.UnaryOp object at 0x7da1b1641f30> begin[:]
break
name[doc].content assign[=] call[name[doc].to_string, parameter[]]
return[constant[None]] | keyword[def] identifier[delete] (
identifier[self]
):
literal[string]
identifier[projectTitle] = identifier[self] . identifier[title]
identifier[theseProjects] = identifier[self] . identifier[parent] . identifier[projects] [:]
keyword[for] identifier[p] keyword[in] identifier[theseProjects] :
keyword[if] identifier[p] . identifier[title] == identifier[projectTitle] :
identifier[theseProjects] . identifier[remove] ( identifier[p] )
keyword[break]
identifier[self] . identifier[parent] . identifier[projects] = identifier[theseProjects]
identifier[doc] = identifier[self]
keyword[while] identifier[doc] :
keyword[if] keyword[not] identifier[doc] . identifier[parent] :
keyword[break]
keyword[else] :
identifier[doc] = identifier[doc] . identifier[parent]
identifier[doc] . identifier[content] = identifier[doc] . identifier[to_string] ( identifier[indentLevel] = literal[int] , identifier[title] = keyword[False] )
keyword[return] keyword[None] | def delete(self):
"""*delete a project from the document*
**Return:**
- None
**Usage:**
.. code-block:: python
myProject.delete()
"""
projectTitle = self.title
theseProjects = self.parent.projects[:]
for p in theseProjects:
if p.title == projectTitle:
theseProjects.remove(p)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
self.parent.projects = theseProjects
doc = self
while doc:
if not doc.parent:
break # depends on [control=['if'], data=[]]
else:
doc = doc.parent # depends on [control=['while'], data=[]]
doc.content = doc.to_string(indentLevel=0, title=False)
return None |
def list_domains():
'''
Return a list of virtual machine names on the minion
CLI Example:
.. code-block:: bash
salt '*' virt.list_domains
'''
with _get_xapi_session() as xapi:
hosts = xapi.VM.get_all()
ret = []
for _host in hosts:
if xapi.VM.get_record(_host)['is_control_domain'] is False:
ret.append(xapi.VM.get_name_label(_host))
return ret | def function[list_domains, parameter[]]:
constant[
Return a list of virtual machine names on the minion
CLI Example:
.. code-block:: bash
salt '*' virt.list_domains
]
with call[name[_get_xapi_session], parameter[]] begin[:]
variable[hosts] assign[=] call[name[xapi].VM.get_all, parameter[]]
variable[ret] assign[=] list[[]]
for taget[name[_host]] in starred[name[hosts]] begin[:]
if compare[call[call[name[xapi].VM.get_record, parameter[name[_host]]]][constant[is_control_domain]] is constant[False]] begin[:]
call[name[ret].append, parameter[call[name[xapi].VM.get_name_label, parameter[name[_host]]]]]
return[name[ret]] | keyword[def] identifier[list_domains] ():
literal[string]
keyword[with] identifier[_get_xapi_session] () keyword[as] identifier[xapi] :
identifier[hosts] = identifier[xapi] . identifier[VM] . identifier[get_all] ()
identifier[ret] =[]
keyword[for] identifier[_host] keyword[in] identifier[hosts] :
keyword[if] identifier[xapi] . identifier[VM] . identifier[get_record] ( identifier[_host] )[ literal[string] ] keyword[is] keyword[False] :
identifier[ret] . identifier[append] ( identifier[xapi] . identifier[VM] . identifier[get_name_label] ( identifier[_host] ))
keyword[return] identifier[ret] | def list_domains():
"""
Return a list of virtual machine names on the minion
CLI Example:
.. code-block:: bash
salt '*' virt.list_domains
"""
with _get_xapi_session() as xapi:
hosts = xapi.VM.get_all()
ret = []
for _host in hosts:
if xapi.VM.get_record(_host)['is_control_domain'] is False:
ret.append(xapi.VM.get_name_label(_host)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_host']]
return ret # depends on [control=['with'], data=['xapi']] |
def create_router(self, context, router):
"""Create a new router entry in DB, and create it Arista HW."""
# Add router to the DB
new_router = super(AristaL3ServicePlugin, self).create_router(
context,
router)
# create router on the Arista Hw
try:
self.driver.create_router(context, new_router)
return new_router
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error creating router on Arista HW router=%s "),
new_router)
super(AristaL3ServicePlugin, self).delete_router(
context,
new_router['id']
) | def function[create_router, parameter[self, context, router]]:
constant[Create a new router entry in DB, and create it Arista HW.]
variable[new_router] assign[=] call[call[name[super], parameter[name[AristaL3ServicePlugin], name[self]]].create_router, parameter[name[context], name[router]]]
<ast.Try object at 0x7da1b195a380> | keyword[def] identifier[create_router] ( identifier[self] , identifier[context] , identifier[router] ):
literal[string]
identifier[new_router] = identifier[super] ( identifier[AristaL3ServicePlugin] , identifier[self] ). identifier[create_router] (
identifier[context] ,
identifier[router] )
keyword[try] :
identifier[self] . identifier[driver] . identifier[create_router] ( identifier[context] , identifier[new_router] )
keyword[return] identifier[new_router]
keyword[except] identifier[Exception] :
keyword[with] identifier[excutils] . identifier[save_and_reraise_exception] ():
identifier[LOG] . identifier[error] ( identifier[_LE] ( literal[string] ),
identifier[new_router] )
identifier[super] ( identifier[AristaL3ServicePlugin] , identifier[self] ). identifier[delete_router] (
identifier[context] ,
identifier[new_router] [ literal[string] ]
) | def create_router(self, context, router):
"""Create a new router entry in DB, and create it Arista HW."""
# Add router to the DB
new_router = super(AristaL3ServicePlugin, self).create_router(context, router)
# create router on the Arista Hw
try:
self.driver.create_router(context, new_router)
return new_router # depends on [control=['try'], data=[]]
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error creating router on Arista HW router=%s '), new_router)
super(AristaL3ServicePlugin, self).delete_router(context, new_router['id']) # depends on [control=['with'], data=[]] # depends on [control=['except'], data=[]] |
def setModel(self,model):
"""
Sets the model this actor should use when drawing.
This method also automatically initializes the new model and removes the old, if any.
"""
if self.model is not None:
self.model.cleanup(self)
self.model = model
model.create(self) | def function[setModel, parameter[self, model]]:
constant[
Sets the model this actor should use when drawing.
This method also automatically initializes the new model and removes the old, if any.
]
if compare[name[self].model is_not constant[None]] begin[:]
call[name[self].model.cleanup, parameter[name[self]]]
name[self].model assign[=] name[model]
call[name[model].create, parameter[name[self]]] | keyword[def] identifier[setModel] ( identifier[self] , identifier[model] ):
literal[string]
keyword[if] identifier[self] . identifier[model] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[model] . identifier[cleanup] ( identifier[self] )
identifier[self] . identifier[model] = identifier[model]
identifier[model] . identifier[create] ( identifier[self] ) | def setModel(self, model):
"""
Sets the model this actor should use when drawing.
This method also automatically initializes the new model and removes the old, if any.
"""
if self.model is not None:
self.model.cleanup(self) # depends on [control=['if'], data=[]]
self.model = model
model.create(self) |
def browse(self, endpoint="hot", category_path="", seed="", q="", timerange="24hr", tag="", offset=0, limit=10):
"""Fetch deviations from public endpoints
:param endpoint: The endpoint from which the deviations will be fetched (hot/morelikethis/newest/undiscovered/popular/tags)
:param category_path: category path to fetch from
:param q: Search query term
:param timerange: The timerange
:param tag: The tag to browse
:param offset: the pagination offset
:param limit: the pagination limit
"""
if endpoint == "hot":
response = self._req('/browse/hot', {
"category_path":category_path,
"offset":offset,
"limit":limit
})
elif endpoint == "morelikethis":
if seed:
response = self._req('/browse/morelikethis', {
"seed":seed,
"category_path":category_path,
"offset":offset,
"limit":limit
})
else:
raise DeviantartError("No seed defined.")
elif endpoint == "newest":
response = self._req('/browse/newest', {
"category_path":category_path,
"q":q,
"offset":offset,
"limit":limit
})
elif endpoint == "undiscovered":
response = self._req('/browse/undiscovered', {
"category_path":category_path,
"offset":offset,
"limit":limit
})
elif endpoint == "popular":
response = self._req('/browse/popular', {
"category_path":category_path,
"q":q,
"timerange":timerange,
"offset":offset,
"limit":limit
})
elif endpoint == "tags":
if tag:
response = self._req('/browse/tags', {
"tag":tag,
"offset":offset,
"limit":limit
})
else:
raise DeviantartError("No tag defined.")
else:
raise DeviantartError("Unknown endpoint.")
deviations = []
for item in response['results']:
d = Deviation()
d.from_dict(item)
deviations.append(d)
return {
"results" : deviations,
"has_more" : response['has_more'],
"next_offset" : response['next_offset']
} | def function[browse, parameter[self, endpoint, category_path, seed, q, timerange, tag, offset, limit]]:
constant[Fetch deviations from public endpoints
:param endpoint: The endpoint from which the deviations will be fetched (hot/morelikethis/newest/undiscovered/popular/tags)
:param category_path: category path to fetch from
:param q: Search query term
:param timerange: The timerange
:param tag: The tag to browse
:param offset: the pagination offset
:param limit: the pagination limit
]
if compare[name[endpoint] equal[==] constant[hot]] begin[:]
variable[response] assign[=] call[name[self]._req, parameter[constant[/browse/hot], dictionary[[<ast.Constant object at 0x7da18bc71e40>, <ast.Constant object at 0x7da18bc73e50>, <ast.Constant object at 0x7da18bc73f10>], [<ast.Name object at 0x7da18bc731c0>, <ast.Name object at 0x7da18bc71660>, <ast.Name object at 0x7da18bc70c40>]]]]
variable[deviations] assign[=] list[[]]
for taget[name[item]] in starred[call[name[response]][constant[results]]] begin[:]
variable[d] assign[=] call[name[Deviation], parameter[]]
call[name[d].from_dict, parameter[name[item]]]
call[name[deviations].append, parameter[name[d]]]
return[dictionary[[<ast.Constant object at 0x7da20c6e4fa0>, <ast.Constant object at 0x7da20c6e6d70>, <ast.Constant object at 0x7da20c6e4340>], [<ast.Name object at 0x7da20c6e6aa0>, <ast.Subscript object at 0x7da20c6e6080>, <ast.Subscript object at 0x7da20c6e59f0>]]] | keyword[def] identifier[browse] ( identifier[self] , identifier[endpoint] = literal[string] , identifier[category_path] = literal[string] , identifier[seed] = literal[string] , identifier[q] = literal[string] , identifier[timerange] = literal[string] , identifier[tag] = literal[string] , identifier[offset] = literal[int] , identifier[limit] = literal[int] ):
literal[string]
keyword[if] identifier[endpoint] == literal[string] :
identifier[response] = identifier[self] . identifier[_req] ( literal[string] ,{
literal[string] : identifier[category_path] ,
literal[string] : identifier[offset] ,
literal[string] : identifier[limit]
})
keyword[elif] identifier[endpoint] == literal[string] :
keyword[if] identifier[seed] :
identifier[response] = identifier[self] . identifier[_req] ( literal[string] ,{
literal[string] : identifier[seed] ,
literal[string] : identifier[category_path] ,
literal[string] : identifier[offset] ,
literal[string] : identifier[limit]
})
keyword[else] :
keyword[raise] identifier[DeviantartError] ( literal[string] )
keyword[elif] identifier[endpoint] == literal[string] :
identifier[response] = identifier[self] . identifier[_req] ( literal[string] ,{
literal[string] : identifier[category_path] ,
literal[string] : identifier[q] ,
literal[string] : identifier[offset] ,
literal[string] : identifier[limit]
})
keyword[elif] identifier[endpoint] == literal[string] :
identifier[response] = identifier[self] . identifier[_req] ( literal[string] ,{
literal[string] : identifier[category_path] ,
literal[string] : identifier[offset] ,
literal[string] : identifier[limit]
})
keyword[elif] identifier[endpoint] == literal[string] :
identifier[response] = identifier[self] . identifier[_req] ( literal[string] ,{
literal[string] : identifier[category_path] ,
literal[string] : identifier[q] ,
literal[string] : identifier[timerange] ,
literal[string] : identifier[offset] ,
literal[string] : identifier[limit]
})
keyword[elif] identifier[endpoint] == literal[string] :
keyword[if] identifier[tag] :
identifier[response] = identifier[self] . identifier[_req] ( literal[string] ,{
literal[string] : identifier[tag] ,
literal[string] : identifier[offset] ,
literal[string] : identifier[limit]
})
keyword[else] :
keyword[raise] identifier[DeviantartError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[DeviantartError] ( literal[string] )
identifier[deviations] =[]
keyword[for] identifier[item] keyword[in] identifier[response] [ literal[string] ]:
identifier[d] = identifier[Deviation] ()
identifier[d] . identifier[from_dict] ( identifier[item] )
identifier[deviations] . identifier[append] ( identifier[d] )
keyword[return] {
literal[string] : identifier[deviations] ,
literal[string] : identifier[response] [ literal[string] ],
literal[string] : identifier[response] [ literal[string] ]
} | def browse(self, endpoint='hot', category_path='', seed='', q='', timerange='24hr', tag='', offset=0, limit=10):
"""Fetch deviations from public endpoints
:param endpoint: The endpoint from which the deviations will be fetched (hot/morelikethis/newest/undiscovered/popular/tags)
:param category_path: category path to fetch from
:param q: Search query term
:param timerange: The timerange
:param tag: The tag to browse
:param offset: the pagination offset
:param limit: the pagination limit
"""
if endpoint == 'hot':
response = self._req('/browse/hot', {'category_path': category_path, 'offset': offset, 'limit': limit}) # depends on [control=['if'], data=[]]
elif endpoint == 'morelikethis':
if seed:
response = self._req('/browse/morelikethis', {'seed': seed, 'category_path': category_path, 'offset': offset, 'limit': limit}) # depends on [control=['if'], data=[]]
else:
raise DeviantartError('No seed defined.') # depends on [control=['if'], data=[]]
elif endpoint == 'newest':
response = self._req('/browse/newest', {'category_path': category_path, 'q': q, 'offset': offset, 'limit': limit}) # depends on [control=['if'], data=[]]
elif endpoint == 'undiscovered':
response = self._req('/browse/undiscovered', {'category_path': category_path, 'offset': offset, 'limit': limit}) # depends on [control=['if'], data=[]]
elif endpoint == 'popular':
response = self._req('/browse/popular', {'category_path': category_path, 'q': q, 'timerange': timerange, 'offset': offset, 'limit': limit}) # depends on [control=['if'], data=[]]
elif endpoint == 'tags':
if tag:
response = self._req('/browse/tags', {'tag': tag, 'offset': offset, 'limit': limit}) # depends on [control=['if'], data=[]]
else:
raise DeviantartError('No tag defined.') # depends on [control=['if'], data=[]]
else:
raise DeviantartError('Unknown endpoint.')
deviations = []
for item in response['results']:
d = Deviation()
d.from_dict(item)
deviations.append(d) # depends on [control=['for'], data=['item']]
return {'results': deviations, 'has_more': response['has_more'], 'next_offset': response['next_offset']} |
def get_all(self, path, data=None, limit=100):
"""Encapsulates GET all requests"""
return ListResultSet(path=path, data=data or {}, limit=limit) | def function[get_all, parameter[self, path, data, limit]]:
constant[Encapsulates GET all requests]
return[call[name[ListResultSet], parameter[]]] | keyword[def] identifier[get_all] ( identifier[self] , identifier[path] , identifier[data] = keyword[None] , identifier[limit] = literal[int] ):
literal[string]
keyword[return] identifier[ListResultSet] ( identifier[path] = identifier[path] , identifier[data] = identifier[data] keyword[or] {}, identifier[limit] = identifier[limit] ) | def get_all(self, path, data=None, limit=100):
"""Encapsulates GET all requests"""
return ListResultSet(path=path, data=data or {}, limit=limit) |
def translate_latex2unicode(text, kb_file=None):
"""Translate latex text to unicode.
This function will take given text, presumably containing LaTeX symbols,
and attempts to translate it to Unicode using the given or default KB
translation table located under
CFG_ETCDIR/bibconvert/KB/latex-to-unicode.kb.
The translated Unicode string will then be returned.
If the translation table and compiled regular expression object is not
previously generated in the current session, they will be.
:param text: a text presumably containing LaTeX symbols.
:type text: string
:param kb_file: full path to file containing latex2unicode translations.
Defaults to CFG_ETCDIR/bibconvert/KB/latex-to-unicode.kb
:type kb_file: string
:return: Unicode representation of translated text
:rtype: unicode
"""
if kb_file is None:
kb_file = get_kb_filename()
# First decode input text to Unicode
try:
text = decode_to_unicode(text)
except UnicodeDecodeError:
text = unicode(wash_for_utf8(text))
# Load translation table, if required
if CFG_LATEX_UNICODE_TRANSLATION_CONST == {}:
_load_latex2unicode_constants(kb_file)
# Find all matches and replace text
for match in CFG_LATEX_UNICODE_TRANSLATION_CONST['regexp_obj'] \
.finditer(text):
# If LaTeX style markers {, } and $ are before or after the
# matching text, it will replace those as well
text = re.sub("[\{\$]?%s[\}\$]?" % (re.escape(match.group()),),
CFG_LATEX_UNICODE_TRANSLATION_CONST[
'table'][match.group()],
text)
# Return Unicode representation of translated text
return text | def function[translate_latex2unicode, parameter[text, kb_file]]:
constant[Translate latex text to unicode.
This function will take given text, presumably containing LaTeX symbols,
and attempts to translate it to Unicode using the given or default KB
translation table located under
CFG_ETCDIR/bibconvert/KB/latex-to-unicode.kb.
The translated Unicode string will then be returned.
If the translation table and compiled regular expression object is not
previously generated in the current session, they will be.
:param text: a text presumably containing LaTeX symbols.
:type text: string
:param kb_file: full path to file containing latex2unicode translations.
Defaults to CFG_ETCDIR/bibconvert/KB/latex-to-unicode.kb
:type kb_file: string
:return: Unicode representation of translated text
:rtype: unicode
]
if compare[name[kb_file] is constant[None]] begin[:]
variable[kb_file] assign[=] call[name[get_kb_filename], parameter[]]
<ast.Try object at 0x7da1b2775a50>
if compare[name[CFG_LATEX_UNICODE_TRANSLATION_CONST] equal[==] dictionary[[], []]] begin[:]
call[name[_load_latex2unicode_constants], parameter[name[kb_file]]]
for taget[name[match]] in starred[call[call[name[CFG_LATEX_UNICODE_TRANSLATION_CONST]][constant[regexp_obj]].finditer, parameter[name[text]]]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[binary_operation[constant[[\{\$]?%s[\}\$]?] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b27144f0>]]], call[call[name[CFG_LATEX_UNICODE_TRANSLATION_CONST]][constant[table]]][call[name[match].group, parameter[]]], name[text]]]
return[name[text]] | keyword[def] identifier[translate_latex2unicode] ( identifier[text] , identifier[kb_file] = keyword[None] ):
literal[string]
keyword[if] identifier[kb_file] keyword[is] keyword[None] :
identifier[kb_file] = identifier[get_kb_filename] ()
keyword[try] :
identifier[text] = identifier[decode_to_unicode] ( identifier[text] )
keyword[except] identifier[UnicodeDecodeError] :
identifier[text] = identifier[unicode] ( identifier[wash_for_utf8] ( identifier[text] ))
keyword[if] identifier[CFG_LATEX_UNICODE_TRANSLATION_CONST] =={}:
identifier[_load_latex2unicode_constants] ( identifier[kb_file] )
keyword[for] identifier[match] keyword[in] identifier[CFG_LATEX_UNICODE_TRANSLATION_CONST] [ literal[string] ]. identifier[finditer] ( identifier[text] ):
identifier[text] = identifier[re] . identifier[sub] ( literal[string] %( identifier[re] . identifier[escape] ( identifier[match] . identifier[group] ()),),
identifier[CFG_LATEX_UNICODE_TRANSLATION_CONST] [
literal[string] ][ identifier[match] . identifier[group] ()],
identifier[text] )
keyword[return] identifier[text] | def translate_latex2unicode(text, kb_file=None):
"""Translate latex text to unicode.
This function will take given text, presumably containing LaTeX symbols,
and attempts to translate it to Unicode using the given or default KB
translation table located under
CFG_ETCDIR/bibconvert/KB/latex-to-unicode.kb.
The translated Unicode string will then be returned.
If the translation table and compiled regular expression object is not
previously generated in the current session, they will be.
:param text: a text presumably containing LaTeX symbols.
:type text: string
:param kb_file: full path to file containing latex2unicode translations.
Defaults to CFG_ETCDIR/bibconvert/KB/latex-to-unicode.kb
:type kb_file: string
:return: Unicode representation of translated text
:rtype: unicode
"""
if kb_file is None:
kb_file = get_kb_filename() # depends on [control=['if'], data=['kb_file']]
# First decode input text to Unicode
try:
text = decode_to_unicode(text) # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
text = unicode(wash_for_utf8(text)) # depends on [control=['except'], data=[]]
# Load translation table, if required
if CFG_LATEX_UNICODE_TRANSLATION_CONST == {}:
_load_latex2unicode_constants(kb_file) # depends on [control=['if'], data=[]]
# Find all matches and replace text
for match in CFG_LATEX_UNICODE_TRANSLATION_CONST['regexp_obj'].finditer(text):
# If LaTeX style markers {, } and $ are before or after the
# matching text, it will replace those as well
text = re.sub('[\\{\\$]?%s[\\}\\$]?' % (re.escape(match.group()),), CFG_LATEX_UNICODE_TRANSLATION_CONST['table'][match.group()], text) # depends on [control=['for'], data=['match']]
# Return Unicode representation of translated text
return text |
def mapper_from_prior_arguments(self, arguments):
"""
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors.
Parameters
----------
arguments: {Prior: Prior}
A dictionary mapping_matrix priors to priors
Returns
-------
model_mapper: ModelMapper
A new model mapper with updated priors.
"""
mapper = copy.deepcopy(self)
for prior_model_tuple in self.prior_model_tuples:
setattr(mapper, prior_model_tuple.name,
prior_model_tuple.prior_model.gaussian_prior_model_for_arguments(arguments))
return mapper | def function[mapper_from_prior_arguments, parameter[self, arguments]]:
constant[
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors.
Parameters
----------
arguments: {Prior: Prior}
A dictionary mapping_matrix priors to priors
Returns
-------
model_mapper: ModelMapper
A new model mapper with updated priors.
]
variable[mapper] assign[=] call[name[copy].deepcopy, parameter[name[self]]]
for taget[name[prior_model_tuple]] in starred[name[self].prior_model_tuples] begin[:]
call[name[setattr], parameter[name[mapper], name[prior_model_tuple].name, call[name[prior_model_tuple].prior_model.gaussian_prior_model_for_arguments, parameter[name[arguments]]]]]
return[name[mapper]] | keyword[def] identifier[mapper_from_prior_arguments] ( identifier[self] , identifier[arguments] ):
literal[string]
identifier[mapper] = identifier[copy] . identifier[deepcopy] ( identifier[self] )
keyword[for] identifier[prior_model_tuple] keyword[in] identifier[self] . identifier[prior_model_tuples] :
identifier[setattr] ( identifier[mapper] , identifier[prior_model_tuple] . identifier[name] ,
identifier[prior_model_tuple] . identifier[prior_model] . identifier[gaussian_prior_model_for_arguments] ( identifier[arguments] ))
keyword[return] identifier[mapper] | def mapper_from_prior_arguments(self, arguments):
"""
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors.
Parameters
----------
arguments: {Prior: Prior}
A dictionary mapping_matrix priors to priors
Returns
-------
model_mapper: ModelMapper
A new model mapper with updated priors.
"""
mapper = copy.deepcopy(self)
for prior_model_tuple in self.prior_model_tuples:
setattr(mapper, prior_model_tuple.name, prior_model_tuple.prior_model.gaussian_prior_model_for_arguments(arguments)) # depends on [control=['for'], data=['prior_model_tuple']]
return mapper |
def set_freqs(self, n, f_lo_ghz, f_hi_ghz):
"""Set the frequency grid on which to perform the calculations.
**Call signature**
*n*
The number of frequency points to sample.
*f_lo_ghz*
The lowest frequency to sample, in GHz.
*f_hi_ghz*
The highest frequency to sample, in GHz.
Returns
*self* for convenience in chaining.
"""
if not (f_lo_ghz >= 0):
raise ValueError('must have f_lo_ghz >= 0; got %r' % (f_lo_ghz,))
if not (f_hi_ghz >= f_lo_ghz):
raise ValueError('must have f_hi_ghz >= f_lo_ghz; got %r, %r' % (f_hi_ghz, f_lo_ghz))
if not n >= 1:
raise ValueError('must have n >= 1; got %r' % (n,))
self.in_vals[IN_VAL_NFREQ] = n
self.in_vals[IN_VAL_FREQ0] = f_lo_ghz * 1e9 # GHz => Hz
self.in_vals[IN_VAL_LOGDFREQ] = np.log10(f_hi_ghz / f_lo_ghz) / n
return self | def function[set_freqs, parameter[self, n, f_lo_ghz, f_hi_ghz]]:
constant[Set the frequency grid on which to perform the calculations.
**Call signature**
*n*
The number of frequency points to sample.
*f_lo_ghz*
The lowest frequency to sample, in GHz.
*f_hi_ghz*
The highest frequency to sample, in GHz.
Returns
*self* for convenience in chaining.
]
if <ast.UnaryOp object at 0x7da1b27baf80> begin[:]
<ast.Raise object at 0x7da1b27b9e10>
if <ast.UnaryOp object at 0x7da1b27ba800> begin[:]
<ast.Raise object at 0x7da1b27b9b40>
if <ast.UnaryOp object at 0x7da1b27b7b20> begin[:]
<ast.Raise object at 0x7da1b27b4790>
call[name[self].in_vals][name[IN_VAL_NFREQ]] assign[=] name[n]
call[name[self].in_vals][name[IN_VAL_FREQ0]] assign[=] binary_operation[name[f_lo_ghz] * constant[1000000000.0]]
call[name[self].in_vals][name[IN_VAL_LOGDFREQ]] assign[=] binary_operation[call[name[np].log10, parameter[binary_operation[name[f_hi_ghz] / name[f_lo_ghz]]]] / name[n]]
return[name[self]] | keyword[def] identifier[set_freqs] ( identifier[self] , identifier[n] , identifier[f_lo_ghz] , identifier[f_hi_ghz] ):
literal[string]
keyword[if] keyword[not] ( identifier[f_lo_ghz] >= literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[f_lo_ghz] ,))
keyword[if] keyword[not] ( identifier[f_hi_ghz] >= identifier[f_lo_ghz] ):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[f_hi_ghz] , identifier[f_lo_ghz] ))
keyword[if] keyword[not] identifier[n] >= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[n] ,))
identifier[self] . identifier[in_vals] [ identifier[IN_VAL_NFREQ] ]= identifier[n]
identifier[self] . identifier[in_vals] [ identifier[IN_VAL_FREQ0] ]= identifier[f_lo_ghz] * literal[int]
identifier[self] . identifier[in_vals] [ identifier[IN_VAL_LOGDFREQ] ]= identifier[np] . identifier[log10] ( identifier[f_hi_ghz] / identifier[f_lo_ghz] )/ identifier[n]
keyword[return] identifier[self] | def set_freqs(self, n, f_lo_ghz, f_hi_ghz):
"""Set the frequency grid on which to perform the calculations.
**Call signature**
*n*
The number of frequency points to sample.
*f_lo_ghz*
The lowest frequency to sample, in GHz.
*f_hi_ghz*
The highest frequency to sample, in GHz.
Returns
*self* for convenience in chaining.
"""
if not f_lo_ghz >= 0:
raise ValueError('must have f_lo_ghz >= 0; got %r' % (f_lo_ghz,)) # depends on [control=['if'], data=[]]
if not f_hi_ghz >= f_lo_ghz:
raise ValueError('must have f_hi_ghz >= f_lo_ghz; got %r, %r' % (f_hi_ghz, f_lo_ghz)) # depends on [control=['if'], data=[]]
if not n >= 1:
raise ValueError('must have n >= 1; got %r' % (n,)) # depends on [control=['if'], data=[]]
self.in_vals[IN_VAL_NFREQ] = n
self.in_vals[IN_VAL_FREQ0] = f_lo_ghz * 1000000000.0 # GHz => Hz
self.in_vals[IN_VAL_LOGDFREQ] = np.log10(f_hi_ghz / f_lo_ghz) / n
return self |
def _match_by_requirements(self, current_venvs, requirements, interpreter, options):
"""Select a venv matching interpreter and options, complying with requirements.
Several venvs can be found in this case, will return the better fit.
"""
matching_venvs = []
for venv_str in current_venvs:
venv = json.loads(venv_str)
# simple filter, need to have exactly same options and interpreter
if venv.get('options') != options or venv.get('interpreter') != interpreter:
continue
# requirements complying: result can be None (no comply) or a score to later sort
matching = self._venv_match(venv['installed'], requirements)
if matching is not None:
matching_venvs.append((matching, venv))
if not matching_venvs:
return
return self._select_better_fit(matching_venvs) | def function[_match_by_requirements, parameter[self, current_venvs, requirements, interpreter, options]]:
constant[Select a venv matching interpreter and options, complying with requirements.
Several venvs can be found in this case, will return the better fit.
]
variable[matching_venvs] assign[=] list[[]]
for taget[name[venv_str]] in starred[name[current_venvs]] begin[:]
variable[venv] assign[=] call[name[json].loads, parameter[name[venv_str]]]
if <ast.BoolOp object at 0x7da1b0d32920> begin[:]
continue
variable[matching] assign[=] call[name[self]._venv_match, parameter[call[name[venv]][constant[installed]], name[requirements]]]
if compare[name[matching] is_not constant[None]] begin[:]
call[name[matching_venvs].append, parameter[tuple[[<ast.Name object at 0x7da1b0f5aaa0>, <ast.Name object at 0x7da1b0f58760>]]]]
if <ast.UnaryOp object at 0x7da1b0f5a4a0> begin[:]
return[None]
return[call[name[self]._select_better_fit, parameter[name[matching_venvs]]]] | keyword[def] identifier[_match_by_requirements] ( identifier[self] , identifier[current_venvs] , identifier[requirements] , identifier[interpreter] , identifier[options] ):
literal[string]
identifier[matching_venvs] =[]
keyword[for] identifier[venv_str] keyword[in] identifier[current_venvs] :
identifier[venv] = identifier[json] . identifier[loads] ( identifier[venv_str] )
keyword[if] identifier[venv] . identifier[get] ( literal[string] )!= identifier[options] keyword[or] identifier[venv] . identifier[get] ( literal[string] )!= identifier[interpreter] :
keyword[continue]
identifier[matching] = identifier[self] . identifier[_venv_match] ( identifier[venv] [ literal[string] ], identifier[requirements] )
keyword[if] identifier[matching] keyword[is] keyword[not] keyword[None] :
identifier[matching_venvs] . identifier[append] (( identifier[matching] , identifier[venv] ))
keyword[if] keyword[not] identifier[matching_venvs] :
keyword[return]
keyword[return] identifier[self] . identifier[_select_better_fit] ( identifier[matching_venvs] ) | def _match_by_requirements(self, current_venvs, requirements, interpreter, options):
"""Select a venv matching interpreter and options, complying with requirements.
Several venvs can be found in this case, will return the better fit.
"""
matching_venvs = []
for venv_str in current_venvs:
venv = json.loads(venv_str)
# simple filter, need to have exactly same options and interpreter
if venv.get('options') != options or venv.get('interpreter') != interpreter:
continue # depends on [control=['if'], data=[]]
# requirements complying: result can be None (no comply) or a score to later sort
matching = self._venv_match(venv['installed'], requirements)
if matching is not None:
matching_venvs.append((matching, venv)) # depends on [control=['if'], data=['matching']] # depends on [control=['for'], data=['venv_str']]
if not matching_venvs:
return # depends on [control=['if'], data=[]]
return self._select_better_fit(matching_venvs) |
def stp(self, val=True):
""" Turn STP protocol on/off. """
if val: state = 'on'
else: state = 'off'
_runshell([brctlexe, 'stp', self.name, state],
"Could not set stp on %s." % self.name) | def function[stp, parameter[self, val]]:
constant[ Turn STP protocol on/off. ]
if name[val] begin[:]
variable[state] assign[=] constant[on]
call[name[_runshell], parameter[list[[<ast.Name object at 0x7da1b0ce6560>, <ast.Constant object at 0x7da1b0ce6680>, <ast.Attribute object at 0x7da1b0ce6740>, <ast.Name object at 0x7da1b0ce6620>]], binary_operation[constant[Could not set stp on %s.] <ast.Mod object at 0x7da2590d6920> name[self].name]]] | keyword[def] identifier[stp] ( identifier[self] , identifier[val] = keyword[True] ):
literal[string]
keyword[if] identifier[val] : identifier[state] = literal[string]
keyword[else] : identifier[state] = literal[string]
identifier[_runshell] ([ identifier[brctlexe] , literal[string] , identifier[self] . identifier[name] , identifier[state] ],
literal[string] % identifier[self] . identifier[name] ) | def stp(self, val=True):
""" Turn STP protocol on/off. """
if val:
state = 'on' # depends on [control=['if'], data=[]]
else:
state = 'off'
_runshell([brctlexe, 'stp', self.name, state], 'Could not set stp on %s.' % self.name) |
def list_namespaced_resource_quota(self, namespace, **kwargs):
"""
list or watch objects of kind ResourceQuota
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_resource_quota(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ResourceQuotaList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_resource_quota_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_resource_quota_with_http_info(namespace, **kwargs)
return data | def function[list_namespaced_resource_quota, parameter[self, namespace]]:
constant[
list or watch objects of kind ResourceQuota
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_resource_quota(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ResourceQuotaList
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].list_namespaced_resource_quota_with_http_info, parameter[name[namespace]]]] | keyword[def] identifier[list_namespaced_resource_quota] ( identifier[self] , identifier[namespace] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[list_namespaced_resource_quota_with_http_info] ( identifier[namespace] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[list_namespaced_resource_quota_with_http_info] ( identifier[namespace] ,** identifier[kwargs] )
keyword[return] identifier[data] | def list_namespaced_resource_quota(self, namespace, **kwargs):
"""
list or watch objects of kind ResourceQuota
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_resource_quota(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ResourceQuotaList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_resource_quota_with_http_info(namespace, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.list_namespaced_resource_quota_with_http_info(namespace, **kwargs)
return data |
def eigenvectors_nrev(T, k, right=True, ncv=None):
r"""Compute eigenvectors of transition matrix.
Parameters
----------
T : (M, M) scipy.sparse matrix
Transition matrix (stochastic matrix)
k : int
Number of eigenvalues to compute
right : bool, optional
If True compute right eigenvectors, left eigenvectors otherwise
ncv : int, optional
The number of Lanczos vectors generated, `ncv` must be greater than k;
it is recommended that ncv > 2*k
Returns
-------
eigvec : (M, k) ndarray
k-eigenvectors of T
"""
if right:
val, vecs = scipy.sparse.linalg.eigs(T, k=k, which='LM', ncv=ncv)
ind = np.argsort(np.abs(val))[::-1]
return vecs[:, ind]
else:
val, vecs = scipy.sparse.linalg.eigs(T.transpose(), k=k, which='LM', ncv=ncv)
ind = np.argsort(np.abs(val))[::-1]
return vecs[:, ind] | def function[eigenvectors_nrev, parameter[T, k, right, ncv]]:
constant[Compute eigenvectors of transition matrix.
Parameters
----------
T : (M, M) scipy.sparse matrix
Transition matrix (stochastic matrix)
k : int
Number of eigenvalues to compute
right : bool, optional
If True compute right eigenvectors, left eigenvectors otherwise
ncv : int, optional
The number of Lanczos vectors generated, `ncv` must be greater than k;
it is recommended that ncv > 2*k
Returns
-------
eigvec : (M, k) ndarray
k-eigenvectors of T
]
if name[right] begin[:]
<ast.Tuple object at 0x7da1b2648790> assign[=] call[name[scipy].sparse.linalg.eigs, parameter[name[T]]]
variable[ind] assign[=] call[call[name[np].argsort, parameter[call[name[np].abs, parameter[name[val]]]]]][<ast.Slice object at 0x7da1b264b340>]
return[call[name[vecs]][tuple[[<ast.Slice object at 0x7da1b26491b0>, <ast.Name object at 0x7da1b264ac50>]]]] | keyword[def] identifier[eigenvectors_nrev] ( identifier[T] , identifier[k] , identifier[right] = keyword[True] , identifier[ncv] = keyword[None] ):
literal[string]
keyword[if] identifier[right] :
identifier[val] , identifier[vecs] = identifier[scipy] . identifier[sparse] . identifier[linalg] . identifier[eigs] ( identifier[T] , identifier[k] = identifier[k] , identifier[which] = literal[string] , identifier[ncv] = identifier[ncv] )
identifier[ind] = identifier[np] . identifier[argsort] ( identifier[np] . identifier[abs] ( identifier[val] ))[::- literal[int] ]
keyword[return] identifier[vecs] [:, identifier[ind] ]
keyword[else] :
identifier[val] , identifier[vecs] = identifier[scipy] . identifier[sparse] . identifier[linalg] . identifier[eigs] ( identifier[T] . identifier[transpose] (), identifier[k] = identifier[k] , identifier[which] = literal[string] , identifier[ncv] = identifier[ncv] )
identifier[ind] = identifier[np] . identifier[argsort] ( identifier[np] . identifier[abs] ( identifier[val] ))[::- literal[int] ]
keyword[return] identifier[vecs] [:, identifier[ind] ] | def eigenvectors_nrev(T, k, right=True, ncv=None):
"""Compute eigenvectors of transition matrix.
Parameters
----------
T : (M, M) scipy.sparse matrix
Transition matrix (stochastic matrix)
k : int
Number of eigenvalues to compute
right : bool, optional
If True compute right eigenvectors, left eigenvectors otherwise
ncv : int, optional
The number of Lanczos vectors generated, `ncv` must be greater than k;
it is recommended that ncv > 2*k
Returns
-------
eigvec : (M, k) ndarray
k-eigenvectors of T
"""
if right:
(val, vecs) = scipy.sparse.linalg.eigs(T, k=k, which='LM', ncv=ncv)
ind = np.argsort(np.abs(val))[::-1]
return vecs[:, ind] # depends on [control=['if'], data=[]]
else:
(val, vecs) = scipy.sparse.linalg.eigs(T.transpose(), k=k, which='LM', ncv=ncv)
ind = np.argsort(np.abs(val))[::-1]
return vecs[:, ind] |
def get_template_as_json(template_id, **kwargs):
"""
Get a template (including attribute and dataset definitions) as a JSON
string. This is just a wrapper around the get_template_as_dict function.
"""
user_id = kwargs['user_id']
return json.dumps(get_template_as_dict(template_id, user_id=user_id)) | def function[get_template_as_json, parameter[template_id]]:
constant[
Get a template (including attribute and dataset definitions) as a JSON
string. This is just a wrapper around the get_template_as_dict function.
]
variable[user_id] assign[=] call[name[kwargs]][constant[user_id]]
return[call[name[json].dumps, parameter[call[name[get_template_as_dict], parameter[name[template_id]]]]]] | keyword[def] identifier[get_template_as_json] ( identifier[template_id] ,** identifier[kwargs] ):
literal[string]
identifier[user_id] = identifier[kwargs] [ literal[string] ]
keyword[return] identifier[json] . identifier[dumps] ( identifier[get_template_as_dict] ( identifier[template_id] , identifier[user_id] = identifier[user_id] )) | def get_template_as_json(template_id, **kwargs):
"""
Get a template (including attribute and dataset definitions) as a JSON
string. This is just a wrapper around the get_template_as_dict function.
"""
user_id = kwargs['user_id']
return json.dumps(get_template_as_dict(template_id, user_id=user_id)) |
def design_delete(self, name, use_devmode=True, syncwait=0):
"""
Delete a design document
:param string name: The name of the design document to delete
:param bool use_devmode: Whether the design to delete is a development
mode design doc.
:param float syncwait: Timeout for operation verification. See
:meth:`design_create` for more information on this parameter.
:return: An :class:`HttpResult` object.
:raise: :exc:`couchbase.exceptions.HTTPError` if the design does not
exist
:raise: :exc:`couchbase.exceptions.TimeoutError` if ``syncwait`` was
specified and the operation could not be verified within the
specified interval.
.. seealso:: :meth:`design_create`, :meth:`design_get`
"""
name = self._mk_devmode(name, use_devmode)
existing = None
if syncwait:
try:
existing = self.design_get(name, use_devmode=False)
except CouchbaseError:
pass
ret = self._http_request(type=_LCB.LCB_HTTP_TYPE_VIEW,
path="_design/" + name,
method=_LCB.LCB_HTTP_METHOD_DELETE)
self._design_poll(name, 'del', existing, syncwait)
return ret | def function[design_delete, parameter[self, name, use_devmode, syncwait]]:
constant[
Delete a design document
:param string name: The name of the design document to delete
:param bool use_devmode: Whether the design to delete is a development
mode design doc.
:param float syncwait: Timeout for operation verification. See
:meth:`design_create` for more information on this parameter.
:return: An :class:`HttpResult` object.
:raise: :exc:`couchbase.exceptions.HTTPError` if the design does not
exist
:raise: :exc:`couchbase.exceptions.TimeoutError` if ``syncwait`` was
specified and the operation could not be verified within the
specified interval.
.. seealso:: :meth:`design_create`, :meth:`design_get`
]
variable[name] assign[=] call[name[self]._mk_devmode, parameter[name[name], name[use_devmode]]]
variable[existing] assign[=] constant[None]
if name[syncwait] begin[:]
<ast.Try object at 0x7da207f9a500>
variable[ret] assign[=] call[name[self]._http_request, parameter[]]
call[name[self]._design_poll, parameter[name[name], constant[del], name[existing], name[syncwait]]]
return[name[ret]] | keyword[def] identifier[design_delete] ( identifier[self] , identifier[name] , identifier[use_devmode] = keyword[True] , identifier[syncwait] = literal[int] ):
literal[string]
identifier[name] = identifier[self] . identifier[_mk_devmode] ( identifier[name] , identifier[use_devmode] )
identifier[existing] = keyword[None]
keyword[if] identifier[syncwait] :
keyword[try] :
identifier[existing] = identifier[self] . identifier[design_get] ( identifier[name] , identifier[use_devmode] = keyword[False] )
keyword[except] identifier[CouchbaseError] :
keyword[pass]
identifier[ret] = identifier[self] . identifier[_http_request] ( identifier[type] = identifier[_LCB] . identifier[LCB_HTTP_TYPE_VIEW] ,
identifier[path] = literal[string] + identifier[name] ,
identifier[method] = identifier[_LCB] . identifier[LCB_HTTP_METHOD_DELETE] )
identifier[self] . identifier[_design_poll] ( identifier[name] , literal[string] , identifier[existing] , identifier[syncwait] )
keyword[return] identifier[ret] | def design_delete(self, name, use_devmode=True, syncwait=0):
"""
Delete a design document
:param string name: The name of the design document to delete
:param bool use_devmode: Whether the design to delete is a development
mode design doc.
:param float syncwait: Timeout for operation verification. See
:meth:`design_create` for more information on this parameter.
:return: An :class:`HttpResult` object.
:raise: :exc:`couchbase.exceptions.HTTPError` if the design does not
exist
:raise: :exc:`couchbase.exceptions.TimeoutError` if ``syncwait`` was
specified and the operation could not be verified within the
specified interval.
.. seealso:: :meth:`design_create`, :meth:`design_get`
"""
name = self._mk_devmode(name, use_devmode)
existing = None
if syncwait:
try:
existing = self.design_get(name, use_devmode=False) # depends on [control=['try'], data=[]]
except CouchbaseError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
ret = self._http_request(type=_LCB.LCB_HTTP_TYPE_VIEW, path='_design/' + name, method=_LCB.LCB_HTTP_METHOD_DELETE)
self._design_poll(name, 'del', existing, syncwait)
return ret |
def raise_for_status(self, allow_redirects=True):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
if self.status_code == 304:
return
elif self.error:
if self.traceback:
six.reraise(Exception, Exception(self.error), Traceback.from_string(self.traceback).as_traceback())
http_error = HTTPError(self.error)
elif (self.status_code >= 300) and (self.status_code < 400) and not allow_redirects:
http_error = HTTPError('%s Redirection' % (self.status_code))
elif (self.status_code >= 400) and (self.status_code < 500):
http_error = HTTPError('%s Client Error' % (self.status_code))
elif (self.status_code >= 500) and (self.status_code < 600):
http_error = HTTPError('%s Server Error' % (self.status_code))
else:
return
http_error.response = self
raise http_error | def function[raise_for_status, parameter[self, allow_redirects]]:
constant[Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred.]
if compare[name[self].status_code equal[==] constant[304]] begin[:]
return[None]
name[http_error].response assign[=] name[self]
<ast.Raise object at 0x7da1b208dc90> | keyword[def] identifier[raise_for_status] ( identifier[self] , identifier[allow_redirects] = keyword[True] ):
literal[string]
keyword[if] identifier[self] . identifier[status_code] == literal[int] :
keyword[return]
keyword[elif] identifier[self] . identifier[error] :
keyword[if] identifier[self] . identifier[traceback] :
identifier[six] . identifier[reraise] ( identifier[Exception] , identifier[Exception] ( identifier[self] . identifier[error] ), identifier[Traceback] . identifier[from_string] ( identifier[self] . identifier[traceback] ). identifier[as_traceback] ())
identifier[http_error] = identifier[HTTPError] ( identifier[self] . identifier[error] )
keyword[elif] ( identifier[self] . identifier[status_code] >= literal[int] ) keyword[and] ( identifier[self] . identifier[status_code] < literal[int] ) keyword[and] keyword[not] identifier[allow_redirects] :
identifier[http_error] = identifier[HTTPError] ( literal[string] %( identifier[self] . identifier[status_code] ))
keyword[elif] ( identifier[self] . identifier[status_code] >= literal[int] ) keyword[and] ( identifier[self] . identifier[status_code] < literal[int] ):
identifier[http_error] = identifier[HTTPError] ( literal[string] %( identifier[self] . identifier[status_code] ))
keyword[elif] ( identifier[self] . identifier[status_code] >= literal[int] ) keyword[and] ( identifier[self] . identifier[status_code] < literal[int] ):
identifier[http_error] = identifier[HTTPError] ( literal[string] %( identifier[self] . identifier[status_code] ))
keyword[else] :
keyword[return]
identifier[http_error] . identifier[response] = identifier[self]
keyword[raise] identifier[http_error] | def raise_for_status(self, allow_redirects=True):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
if self.status_code == 304:
return # depends on [control=['if'], data=[]]
elif self.error:
if self.traceback:
six.reraise(Exception, Exception(self.error), Traceback.from_string(self.traceback).as_traceback()) # depends on [control=['if'], data=[]]
http_error = HTTPError(self.error) # depends on [control=['if'], data=[]]
elif self.status_code >= 300 and self.status_code < 400 and (not allow_redirects):
http_error = HTTPError('%s Redirection' % self.status_code) # depends on [control=['if'], data=[]]
elif self.status_code >= 400 and self.status_code < 500:
http_error = HTTPError('%s Client Error' % self.status_code) # depends on [control=['if'], data=[]]
elif self.status_code >= 500 and self.status_code < 600:
http_error = HTTPError('%s Server Error' % self.status_code) # depends on [control=['if'], data=[]]
else:
return
http_error.response = self
raise http_error |
def is_open_on_minute(self, dt):
"""
Given a dt, return whether this exchange is open at the given dt.
Parameters
----------
dt: pd.Timestamp
The dt for which to check if this exchange is open.
Returns
-------
bool
Whether the exchange is open on this dt.
"""
return is_open(self.market_opens_nanos, self.market_closes_nanos,
dt.value) | def function[is_open_on_minute, parameter[self, dt]]:
constant[
Given a dt, return whether this exchange is open at the given dt.
Parameters
----------
dt: pd.Timestamp
The dt for which to check if this exchange is open.
Returns
-------
bool
Whether the exchange is open on this dt.
]
return[call[name[is_open], parameter[name[self].market_opens_nanos, name[self].market_closes_nanos, name[dt].value]]] | keyword[def] identifier[is_open_on_minute] ( identifier[self] , identifier[dt] ):
literal[string]
keyword[return] identifier[is_open] ( identifier[self] . identifier[market_opens_nanos] , identifier[self] . identifier[market_closes_nanos] ,
identifier[dt] . identifier[value] ) | def is_open_on_minute(self, dt):
"""
Given a dt, return whether this exchange is open at the given dt.
Parameters
----------
dt: pd.Timestamp
The dt for which to check if this exchange is open.
Returns
-------
bool
Whether the exchange is open on this dt.
"""
return is_open(self.market_opens_nanos, self.market_closes_nanos, dt.value) |
def set_options(self, **options):
"""
Set instance variables based on an options dict
"""
self.interactive = False
self.verbosity = options['verbosity']
self.symlink = ""
self.clear = False
ignore_patterns = []
self.ignore_patterns = list(set(ignore_patterns))
self.page_themes_updated = 0
self.skins_updated = 0 | def function[set_options, parameter[self]]:
constant[
Set instance variables based on an options dict
]
name[self].interactive assign[=] constant[False]
name[self].verbosity assign[=] call[name[options]][constant[verbosity]]
name[self].symlink assign[=] constant[]
name[self].clear assign[=] constant[False]
variable[ignore_patterns] assign[=] list[[]]
name[self].ignore_patterns assign[=] call[name[list], parameter[call[name[set], parameter[name[ignore_patterns]]]]]
name[self].page_themes_updated assign[=] constant[0]
name[self].skins_updated assign[=] constant[0] | keyword[def] identifier[set_options] ( identifier[self] ,** identifier[options] ):
literal[string]
identifier[self] . identifier[interactive] = keyword[False]
identifier[self] . identifier[verbosity] = identifier[options] [ literal[string] ]
identifier[self] . identifier[symlink] = literal[string]
identifier[self] . identifier[clear] = keyword[False]
identifier[ignore_patterns] =[]
identifier[self] . identifier[ignore_patterns] = identifier[list] ( identifier[set] ( identifier[ignore_patterns] ))
identifier[self] . identifier[page_themes_updated] = literal[int]
identifier[self] . identifier[skins_updated] = literal[int] | def set_options(self, **options):
"""
Set instance variables based on an options dict
"""
self.interactive = False
self.verbosity = options['verbosity']
self.symlink = ''
self.clear = False
ignore_patterns = []
self.ignore_patterns = list(set(ignore_patterns))
self.page_themes_updated = 0
self.skins_updated = 0 |
def klass_to_pypath(klass):
"""when a class is defined within the module that is being executed as
main, the module name will be specified as '__main__' even though the
module actually had its own real name. This ends up being very confusing
to Configman as it tries to refer to a class by its proper module name.
This function will convert a class into its properly qualified actual
pathname. This method is used when a Socorro app is actually invoked
directly through the file in which the App class is defined. This allows
configman to reimport the class under its proper name and treat it as if
it had been run through the SocorroWelcomeApp. In turn, this allows
the application defaults to be fetched from the properly imported class
in time for configman use that information as value source."""
if klass.__module__ == '__main__':
module_path = (
sys.modules['__main__']
.__file__[:-3]
)
module_name = ''
for a_python_path in sys.path:
tentative_pathname = module_path.replace(a_python_path, '')
if tentative_pathname != module_path:
module_name = (
tentative_pathname.replace('/', '.').strip('.')
)
break
if module_name == '':
return py_obj_to_str(klass)
else:
module_name = klass.__module__
return "%s.%s" % (module_name, klass.__name__) | def function[klass_to_pypath, parameter[klass]]:
constant[when a class is defined within the module that is being executed as
main, the module name will be specified as '__main__' even though the
module actually had its own real name. This ends up being very confusing
to Configman as it tries to refer to a class by its proper module name.
This function will convert a class into its properly qualified actual
pathname. This method is used when a Socorro app is actually invoked
directly through the file in which the App class is defined. This allows
configman to reimport the class under its proper name and treat it as if
it had been run through the SocorroWelcomeApp. In turn, this allows
the application defaults to be fetched from the properly imported class
in time for configman use that information as value source.]
if compare[name[klass].__module__ equal[==] constant[__main__]] begin[:]
variable[module_path] assign[=] call[call[name[sys].modules][constant[__main__]].__file__][<ast.Slice object at 0x7da1b26afa30>]
variable[module_name] assign[=] constant[]
for taget[name[a_python_path]] in starred[name[sys].path] begin[:]
variable[tentative_pathname] assign[=] call[name[module_path].replace, parameter[name[a_python_path], constant[]]]
if compare[name[tentative_pathname] not_equal[!=] name[module_path]] begin[:]
variable[module_name] assign[=] call[call[name[tentative_pathname].replace, parameter[constant[/], constant[.]]].strip, parameter[constant[.]]]
break
if compare[name[module_name] equal[==] constant[]] begin[:]
return[call[name[py_obj_to_str], parameter[name[klass]]]]
return[binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26ac2e0>, <ast.Attribute object at 0x7da1b26acc40>]]]] | keyword[def] identifier[klass_to_pypath] ( identifier[klass] ):
literal[string]
keyword[if] identifier[klass] . identifier[__module__] == literal[string] :
identifier[module_path] =(
identifier[sys] . identifier[modules] [ literal[string] ]
. identifier[__file__] [:- literal[int] ]
)
identifier[module_name] = literal[string]
keyword[for] identifier[a_python_path] keyword[in] identifier[sys] . identifier[path] :
identifier[tentative_pathname] = identifier[module_path] . identifier[replace] ( identifier[a_python_path] , literal[string] )
keyword[if] identifier[tentative_pathname] != identifier[module_path] :
identifier[module_name] =(
identifier[tentative_pathname] . identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ( literal[string] )
)
keyword[break]
keyword[if] identifier[module_name] == literal[string] :
keyword[return] identifier[py_obj_to_str] ( identifier[klass] )
keyword[else] :
identifier[module_name] = identifier[klass] . identifier[__module__]
keyword[return] literal[string] %( identifier[module_name] , identifier[klass] . identifier[__name__] ) | def klass_to_pypath(klass):
"""when a class is defined within the module that is being executed as
main, the module name will be specified as '__main__' even though the
module actually had its own real name. This ends up being very confusing
to Configman as it tries to refer to a class by its proper module name.
This function will convert a class into its properly qualified actual
pathname. This method is used when a Socorro app is actually invoked
directly through the file in which the App class is defined. This allows
configman to reimport the class under its proper name and treat it as if
it had been run through the SocorroWelcomeApp. In turn, this allows
the application defaults to be fetched from the properly imported class
in time for configman use that information as value source."""
if klass.__module__ == '__main__':
module_path = sys.modules['__main__'].__file__[:-3]
module_name = ''
for a_python_path in sys.path:
tentative_pathname = module_path.replace(a_python_path, '')
if tentative_pathname != module_path:
module_name = tentative_pathname.replace('/', '.').strip('.')
break # depends on [control=['if'], data=['tentative_pathname']] # depends on [control=['for'], data=['a_python_path']]
if module_name == '':
return py_obj_to_str(klass) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
module_name = klass.__module__
return '%s.%s' % (module_name, klass.__name__) |
def f_store_items(self, iterator, *args, **kwargs):
"""Stores individual items to disk.
This function is useful if you calculated very large results (or large derived parameters)
during runtime and you want to write these to disk immediately and empty them afterwards
to free some memory.
Instead of storing individual parameters or results you can also store whole subtrees with
:func:`~pypet.naturalnaming.NNGroupNode.f_store_child`.
You can pass the following arguments to `f_store_items`:
:param iterator:
An iterable containing the parameters or results to store, either their
names or the instances. You can also pass group instances or names here
to store the annotations of the groups.
:param non_empties:
Optional keyword argument (boolean),
if `True` will only store the subset of provided items that are not empty.
Empty parameters or results found in `iterator` are simply ignored.
:param args: Additional arguments passed to the storage service
:param kwargs:
If you use the standard hdf5 storage service, you can pass the following additional
keyword argument:
:param overwrite:
List names of parts of your item that should
be erased and overwritten by the new data in your leaf.
You can also set `overwrite=True`
to overwrite all parts.
For instance:
>>> traj.f_add_result('mygroup.myresult', partA=42, partB=44, partC=46)
>>> traj.f_store()
>>> traj.mygroup.myresult.partA = 333
>>> traj.mygroup.myresult.partB = 'I am going to change to a string'
>>> traj.f_store_item('mygroup.myresult', overwrite=['partA', 'partB'])
Will store `'mygroup.myresult'` to disk again and overwrite the parts
`'partA'` and `'partB'` with the new values `333` and
`'I am going to change to a string'`.
The data stored as `partC` is not changed.
Be aware that you need to specify the names of parts as they were stored
to HDF5. Depending on how your leaf construction works, this may differ
from the names the data might have in your leaf in the trajectory container.
Note that massive overwriting will fragment and blow up your HDF5 file.
Try to avoid changing data on disk whenever you can.
:raises:
TypeError:
If the (parent) trajectory has never been stored to disk. In this case
use :func:`pypet.trajectory.f_store` first.
ValueError: If no item could be found to be stored.
Note if you use the standard hdf5 storage service, there are no additional arguments
or keyword arguments to pass!
"""
if not self._stored:
raise TypeError('Cannot store stuff for a trajectory that has never been '
'stored to disk. Please call traj.f_store(only_init=True) first.')
fetched_items = self._nn_interface._fetch_items(STORE, iterator, args, kwargs)
if fetched_items:
self._storage_service.store(pypetconstants.LIST, fetched_items,
trajectory_name=self.v_name)
else:
raise ValueError('Your storage was not successful, could not find a single item '
'to store.') | def function[f_store_items, parameter[self, iterator]]:
constant[Stores individual items to disk.
This function is useful if you calculated very large results (or large derived parameters)
during runtime and you want to write these to disk immediately and empty them afterwards
to free some memory.
Instead of storing individual parameters or results you can also store whole subtrees with
:func:`~pypet.naturalnaming.NNGroupNode.f_store_child`.
You can pass the following arguments to `f_store_items`:
:param iterator:
An iterable containing the parameters or results to store, either their
names or the instances. You can also pass group instances or names here
to store the annotations of the groups.
:param non_empties:
Optional keyword argument (boolean),
if `True` will only store the subset of provided items that are not empty.
Empty parameters or results found in `iterator` are simply ignored.
:param args: Additional arguments passed to the storage service
:param kwargs:
If you use the standard hdf5 storage service, you can pass the following additional
keyword argument:
:param overwrite:
List names of parts of your item that should
be erased and overwritten by the new data in your leaf.
You can also set `overwrite=True`
to overwrite all parts.
For instance:
>>> traj.f_add_result('mygroup.myresult', partA=42, partB=44, partC=46)
>>> traj.f_store()
>>> traj.mygroup.myresult.partA = 333
>>> traj.mygroup.myresult.partB = 'I am going to change to a string'
>>> traj.f_store_item('mygroup.myresult', overwrite=['partA', 'partB'])
Will store `'mygroup.myresult'` to disk again and overwrite the parts
`'partA'` and `'partB'` with the new values `333` and
`'I am going to change to a string'`.
The data stored as `partC` is not changed.
Be aware that you need to specify the names of parts as they were stored
to HDF5. Depending on how your leaf construction works, this may differ
from the names the data might have in your leaf in the trajectory container.
Note that massive overwriting will fragment and blow up your HDF5 file.
Try to avoid changing data on disk whenever you can.
:raises:
TypeError:
If the (parent) trajectory has never been stored to disk. In this case
use :func:`pypet.trajectory.f_store` first.
ValueError: If no item could be found to be stored.
Note if you use the standard hdf5 storage service, there are no additional arguments
or keyword arguments to pass!
]
if <ast.UnaryOp object at 0x7da18f721090> begin[:]
<ast.Raise object at 0x7da18f723ee0>
variable[fetched_items] assign[=] call[name[self]._nn_interface._fetch_items, parameter[name[STORE], name[iterator], name[args], name[kwargs]]]
if name[fetched_items] begin[:]
call[name[self]._storage_service.store, parameter[name[pypetconstants].LIST, name[fetched_items]]] | keyword[def] identifier[f_store_items] ( identifier[self] , identifier[iterator] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_stored] :
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] )
identifier[fetched_items] = identifier[self] . identifier[_nn_interface] . identifier[_fetch_items] ( identifier[STORE] , identifier[iterator] , identifier[args] , identifier[kwargs] )
keyword[if] identifier[fetched_items] :
identifier[self] . identifier[_storage_service] . identifier[store] ( identifier[pypetconstants] . identifier[LIST] , identifier[fetched_items] ,
identifier[trajectory_name] = identifier[self] . identifier[v_name] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] ) | def f_store_items(self, iterator, *args, **kwargs):
"""Stores individual items to disk.
This function is useful if you calculated very large results (or large derived parameters)
during runtime and you want to write these to disk immediately and empty them afterwards
to free some memory.
Instead of storing individual parameters or results you can also store whole subtrees with
:func:`~pypet.naturalnaming.NNGroupNode.f_store_child`.
You can pass the following arguments to `f_store_items`:
:param iterator:
An iterable containing the parameters or results to store, either their
names or the instances. You can also pass group instances or names here
to store the annotations of the groups.
:param non_empties:
Optional keyword argument (boolean),
if `True` will only store the subset of provided items that are not empty.
Empty parameters or results found in `iterator` are simply ignored.
:param args: Additional arguments passed to the storage service
:param kwargs:
If you use the standard hdf5 storage service, you can pass the following additional
keyword argument:
:param overwrite:
List names of parts of your item that should
be erased and overwritten by the new data in your leaf.
You can also set `overwrite=True`
to overwrite all parts.
For instance:
>>> traj.f_add_result('mygroup.myresult', partA=42, partB=44, partC=46)
>>> traj.f_store()
>>> traj.mygroup.myresult.partA = 333
>>> traj.mygroup.myresult.partB = 'I am going to change to a string'
>>> traj.f_store_item('mygroup.myresult', overwrite=['partA', 'partB'])
Will store `'mygroup.myresult'` to disk again and overwrite the parts
`'partA'` and `'partB'` with the new values `333` and
`'I am going to change to a string'`.
The data stored as `partC` is not changed.
Be aware that you need to specify the names of parts as they were stored
to HDF5. Depending on how your leaf construction works, this may differ
from the names the data might have in your leaf in the trajectory container.
Note that massive overwriting will fragment and blow up your HDF5 file.
Try to avoid changing data on disk whenever you can.
:raises:
TypeError:
If the (parent) trajectory has never been stored to disk. In this case
use :func:`pypet.trajectory.f_store` first.
ValueError: If no item could be found to be stored.
Note if you use the standard hdf5 storage service, there are no additional arguments
or keyword arguments to pass!
"""
if not self._stored:
raise TypeError('Cannot store stuff for a trajectory that has never been stored to disk. Please call traj.f_store(only_init=True) first.') # depends on [control=['if'], data=[]]
fetched_items = self._nn_interface._fetch_items(STORE, iterator, args, kwargs)
if fetched_items:
self._storage_service.store(pypetconstants.LIST, fetched_items, trajectory_name=self.v_name) # depends on [control=['if'], data=[]]
else:
raise ValueError('Your storage was not successful, could not find a single item to store.') |
def _compute_baseline_survival(self):
"""
Importantly, this agrees with what the KaplanMeierFitter produces. Ex:
Example
-------
>>> from lifelines.datasets import load_rossi
>>> from lifelines import CoxPHFitter, KaplanMeierFitter
>>> rossi = load_rossi()
>>> kmf = KaplanMeierFitter()
>>> kmf.fit(rossi['week'], rossi['arrest'])
>>> rossi2 = rossi[['week', 'arrest']].copy()
>>> rossi2['var1'] = np.random.randn(432)
>>> cph = CoxPHFitter()
>>> cph.fit(rossi2, 'week', 'arrest')
>>> ax = cph.baseline_survival_.plot()
>>> kmf.plot(ax=ax)
"""
survival_df = np.exp(-self.baseline_cumulative_hazard_)
if self.strata is None:
survival_df.columns = ["baseline survival"]
return survival_df | def function[_compute_baseline_survival, parameter[self]]:
constant[
Importantly, this agrees with what the KaplanMeierFitter produces. Ex:
Example
-------
>>> from lifelines.datasets import load_rossi
>>> from lifelines import CoxPHFitter, KaplanMeierFitter
>>> rossi = load_rossi()
>>> kmf = KaplanMeierFitter()
>>> kmf.fit(rossi['week'], rossi['arrest'])
>>> rossi2 = rossi[['week', 'arrest']].copy()
>>> rossi2['var1'] = np.random.randn(432)
>>> cph = CoxPHFitter()
>>> cph.fit(rossi2, 'week', 'arrest')
>>> ax = cph.baseline_survival_.plot()
>>> kmf.plot(ax=ax)
]
variable[survival_df] assign[=] call[name[np].exp, parameter[<ast.UnaryOp object at 0x7da20e956cb0>]]
if compare[name[self].strata is constant[None]] begin[:]
name[survival_df].columns assign[=] list[[<ast.Constant object at 0x7da20c9909d0>]]
return[name[survival_df]] | keyword[def] identifier[_compute_baseline_survival] ( identifier[self] ):
literal[string]
identifier[survival_df] = identifier[np] . identifier[exp] (- identifier[self] . identifier[baseline_cumulative_hazard_] )
keyword[if] identifier[self] . identifier[strata] keyword[is] keyword[None] :
identifier[survival_df] . identifier[columns] =[ literal[string] ]
keyword[return] identifier[survival_df] | def _compute_baseline_survival(self):
"""
Importantly, this agrees with what the KaplanMeierFitter produces. Ex:
Example
-------
>>> from lifelines.datasets import load_rossi
>>> from lifelines import CoxPHFitter, KaplanMeierFitter
>>> rossi = load_rossi()
>>> kmf = KaplanMeierFitter()
>>> kmf.fit(rossi['week'], rossi['arrest'])
>>> rossi2 = rossi[['week', 'arrest']].copy()
>>> rossi2['var1'] = np.random.randn(432)
>>> cph = CoxPHFitter()
>>> cph.fit(rossi2, 'week', 'arrest')
>>> ax = cph.baseline_survival_.plot()
>>> kmf.plot(ax=ax)
"""
survival_df = np.exp(-self.baseline_cumulative_hazard_)
if self.strata is None:
survival_df.columns = ['baseline survival'] # depends on [control=['if'], data=[]]
return survival_df |
def recent_docs(self, include_docs=True, limit=None):
"""
Retrieve recently changed / added docs
Args:
include_docs <bools> if true full document data will be retrieved
limit <int> if != None and > 0 limit the result set to this amount of rows
Returns a view result to be iterated through
"""
try:
return self.bucket.view("_changed", include_docs=include_docs, limit=limit)
except:
raise | def function[recent_docs, parameter[self, include_docs, limit]]:
constant[
Retrieve recently changed / added docs
Args:
include_docs <bools> if true full document data will be retrieved
limit <int> if != None and > 0 limit the result set to this amount of rows
Returns a view result to be iterated through
]
<ast.Try object at 0x7da2044c35e0> | keyword[def] identifier[recent_docs] ( identifier[self] , identifier[include_docs] = keyword[True] , identifier[limit] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[bucket] . identifier[view] ( literal[string] , identifier[include_docs] = identifier[include_docs] , identifier[limit] = identifier[limit] )
keyword[except] :
keyword[raise] | def recent_docs(self, include_docs=True, limit=None):
"""
Retrieve recently changed / added docs
Args:
include_docs <bools> if true full document data will be retrieved
limit <int> if != None and > 0 limit the result set to this amount of rows
Returns a view result to be iterated through
"""
try:
return self.bucket.view('_changed', include_docs=include_docs, limit=limit) # depends on [control=['try'], data=[]]
except:
raise # depends on [control=['except'], data=[]] |
def create(path,archiveList,xFilesFactor=None,aggregationMethod=None,sparse=False,useFallocate=False):
"""create(path,archiveList,xFilesFactor=0.5,aggregationMethod='average')
path is a string
archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints)
xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur
aggregationMethod specifies the function to use when propagating data (see ``whisper.aggregationMethods``)
"""
# Set default params
if xFilesFactor is None:
xFilesFactor = 0.5
if aggregationMethod is None:
aggregationMethod = 'average'
#Validate archive configurations...
validateArchiveList(archiveList)
#Looks good, now we create the file and write the header
if os.path.exists(path):
raise InvalidConfiguration("File %s already exists!" % path)
fh = None
try:
fh = open(path,'wb')
if LOCK:
fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
aggregationType = struct.pack( longFormat, aggregationMethodToType.get(aggregationMethod, 1) )
oldest = max([secondsPerPoint * points for secondsPerPoint,points in archiveList])
maxRetention = struct.pack( longFormat, oldest )
xFilesFactor = struct.pack( floatFormat, float(xFilesFactor) )
archiveCount = struct.pack(longFormat, len(archiveList))
packedMetadata = aggregationType + maxRetention + xFilesFactor + archiveCount
fh.write(packedMetadata)
headerSize = metadataSize + (archiveInfoSize * len(archiveList))
archiveOffsetPointer = headerSize
for secondsPerPoint,points in archiveList:
archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points)
fh.write(archiveInfo)
archiveOffsetPointer += (points * pointSize)
#If configured to use fallocate and capable of fallocate use that, else
#attempt sparse if configure or zero pre-allocate if sparse isn't configured.
if CAN_FALLOCATE and useFallocate:
remaining = archiveOffsetPointer - headerSize
fallocate(fh, headerSize, remaining)
elif sparse:
fh.seek(archiveOffsetPointer - 1)
fh.write('\x00')
else:
remaining = archiveOffsetPointer - headerSize
chunksize = 16384
zeroes = b'\x00' * chunksize
while remaining > chunksize:
fh.write(zeroes)
remaining -= chunksize
fh.write(zeroes[:remaining])
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno())
finally:
if fh:
fh.close() | def function[create, parameter[path, archiveList, xFilesFactor, aggregationMethod, sparse, useFallocate]]:
constant[create(path,archiveList,xFilesFactor=0.5,aggregationMethod='average')
path is a string
archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints)
xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur
aggregationMethod specifies the function to use when propagating data (see ``whisper.aggregationMethods``)
]
if compare[name[xFilesFactor] is constant[None]] begin[:]
variable[xFilesFactor] assign[=] constant[0.5]
if compare[name[aggregationMethod] is constant[None]] begin[:]
variable[aggregationMethod] assign[=] constant[average]
call[name[validateArchiveList], parameter[name[archiveList]]]
if call[name[os].path.exists, parameter[name[path]]] begin[:]
<ast.Raise object at 0x7da1b2345930>
variable[fh] assign[=] constant[None]
<ast.Try object at 0x7da1b2346110> | keyword[def] identifier[create] ( identifier[path] , identifier[archiveList] , identifier[xFilesFactor] = keyword[None] , identifier[aggregationMethod] = keyword[None] , identifier[sparse] = keyword[False] , identifier[useFallocate] = keyword[False] ):
literal[string]
keyword[if] identifier[xFilesFactor] keyword[is] keyword[None] :
identifier[xFilesFactor] = literal[int]
keyword[if] identifier[aggregationMethod] keyword[is] keyword[None] :
identifier[aggregationMethod] = literal[string]
identifier[validateArchiveList] ( identifier[archiveList] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[raise] identifier[InvalidConfiguration] ( literal[string] % identifier[path] )
identifier[fh] = keyword[None]
keyword[try] :
identifier[fh] = identifier[open] ( identifier[path] , literal[string] )
keyword[if] identifier[LOCK] :
identifier[fcntl] . identifier[flock] ( identifier[fh] . identifier[fileno] (), identifier[fcntl] . identifier[LOCK_EX] )
identifier[aggregationType] = identifier[struct] . identifier[pack] ( identifier[longFormat] , identifier[aggregationMethodToType] . identifier[get] ( identifier[aggregationMethod] , literal[int] ))
identifier[oldest] = identifier[max] ([ identifier[secondsPerPoint] * identifier[points] keyword[for] identifier[secondsPerPoint] , identifier[points] keyword[in] identifier[archiveList] ])
identifier[maxRetention] = identifier[struct] . identifier[pack] ( identifier[longFormat] , identifier[oldest] )
identifier[xFilesFactor] = identifier[struct] . identifier[pack] ( identifier[floatFormat] , identifier[float] ( identifier[xFilesFactor] ))
identifier[archiveCount] = identifier[struct] . identifier[pack] ( identifier[longFormat] , identifier[len] ( identifier[archiveList] ))
identifier[packedMetadata] = identifier[aggregationType] + identifier[maxRetention] + identifier[xFilesFactor] + identifier[archiveCount]
identifier[fh] . identifier[write] ( identifier[packedMetadata] )
identifier[headerSize] = identifier[metadataSize] +( identifier[archiveInfoSize] * identifier[len] ( identifier[archiveList] ))
identifier[archiveOffsetPointer] = identifier[headerSize]
keyword[for] identifier[secondsPerPoint] , identifier[points] keyword[in] identifier[archiveList] :
identifier[archiveInfo] = identifier[struct] . identifier[pack] ( identifier[archiveInfoFormat] , identifier[archiveOffsetPointer] , identifier[secondsPerPoint] , identifier[points] )
identifier[fh] . identifier[write] ( identifier[archiveInfo] )
identifier[archiveOffsetPointer] +=( identifier[points] * identifier[pointSize] )
keyword[if] identifier[CAN_FALLOCATE] keyword[and] identifier[useFallocate] :
identifier[remaining] = identifier[archiveOffsetPointer] - identifier[headerSize]
identifier[fallocate] ( identifier[fh] , identifier[headerSize] , identifier[remaining] )
keyword[elif] identifier[sparse] :
identifier[fh] . identifier[seek] ( identifier[archiveOffsetPointer] - literal[int] )
identifier[fh] . identifier[write] ( literal[string] )
keyword[else] :
identifier[remaining] = identifier[archiveOffsetPointer] - identifier[headerSize]
identifier[chunksize] = literal[int]
identifier[zeroes] = literal[string] * identifier[chunksize]
keyword[while] identifier[remaining] > identifier[chunksize] :
identifier[fh] . identifier[write] ( identifier[zeroes] )
identifier[remaining] -= identifier[chunksize]
identifier[fh] . identifier[write] ( identifier[zeroes] [: identifier[remaining] ])
keyword[if] identifier[AUTOFLUSH] :
identifier[fh] . identifier[flush] ()
identifier[os] . identifier[fsync] ( identifier[fh] . identifier[fileno] ())
keyword[finally] :
keyword[if] identifier[fh] :
identifier[fh] . identifier[close] () | def create(path, archiveList, xFilesFactor=None, aggregationMethod=None, sparse=False, useFallocate=False):
"""create(path,archiveList,xFilesFactor=0.5,aggregationMethod='average')
path is a string
archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints)
xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur
aggregationMethod specifies the function to use when propagating data (see ``whisper.aggregationMethods``)
"""
# Set default params
if xFilesFactor is None:
xFilesFactor = 0.5 # depends on [control=['if'], data=['xFilesFactor']]
if aggregationMethod is None:
aggregationMethod = 'average' # depends on [control=['if'], data=['aggregationMethod']]
#Validate archive configurations...
validateArchiveList(archiveList)
#Looks good, now we create the file and write the header
if os.path.exists(path):
raise InvalidConfiguration('File %s already exists!' % path) # depends on [control=['if'], data=[]]
fh = None
try:
fh = open(path, 'wb')
if LOCK:
fcntl.flock(fh.fileno(), fcntl.LOCK_EX) # depends on [control=['if'], data=[]]
aggregationType = struct.pack(longFormat, aggregationMethodToType.get(aggregationMethod, 1))
oldest = max([secondsPerPoint * points for (secondsPerPoint, points) in archiveList])
maxRetention = struct.pack(longFormat, oldest)
xFilesFactor = struct.pack(floatFormat, float(xFilesFactor))
archiveCount = struct.pack(longFormat, len(archiveList))
packedMetadata = aggregationType + maxRetention + xFilesFactor + archiveCount
fh.write(packedMetadata)
headerSize = metadataSize + archiveInfoSize * len(archiveList)
archiveOffsetPointer = headerSize
for (secondsPerPoint, points) in archiveList:
archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points)
fh.write(archiveInfo)
archiveOffsetPointer += points * pointSize # depends on [control=['for'], data=[]]
#If configured to use fallocate and capable of fallocate use that, else
#attempt sparse if configure or zero pre-allocate if sparse isn't configured.
if CAN_FALLOCATE and useFallocate:
remaining = archiveOffsetPointer - headerSize
fallocate(fh, headerSize, remaining) # depends on [control=['if'], data=[]]
elif sparse:
fh.seek(archiveOffsetPointer - 1)
fh.write('\x00') # depends on [control=['if'], data=[]]
else:
remaining = archiveOffsetPointer - headerSize
chunksize = 16384
zeroes = b'\x00' * chunksize
while remaining > chunksize:
fh.write(zeroes)
remaining -= chunksize # depends on [control=['while'], data=['remaining', 'chunksize']]
fh.write(zeroes[:remaining])
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno()) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
finally:
if fh:
fh.close() # depends on [control=['if'], data=[]] |
def loadInputPatternsFromFile(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1):
"""
Deprecated.
"""
self.inputs = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned = 1)
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
self.loadOrder[i] = i | def function[loadInputPatternsFromFile, parameter[self, filename, cols, everyNrows, delim, checkEven]]:
constant[
Deprecated.
]
name[self].inputs assign[=] call[name[self].loadVectors, parameter[name[filename], name[cols], name[everyNrows], name[delim], name[checkEven]]]
name[self].loadOrder assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b039a740>]] * call[name[len], parameter[name[self].inputs]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].inputs]]]]] begin[:]
call[name[self].loadOrder][name[i]] assign[=] name[i] | keyword[def] identifier[loadInputPatternsFromFile] ( identifier[self] , identifier[filename] , identifier[cols] = keyword[None] , identifier[everyNrows] = literal[int] ,
identifier[delim] = literal[string] , identifier[checkEven] = literal[int] ):
literal[string]
identifier[self] . identifier[inputs] = identifier[self] . identifier[loadVectors] ( identifier[filename] , identifier[cols] , identifier[everyNrows] , identifier[delim] , identifier[checkEven] , identifier[patterned] = literal[int] )
identifier[self] . identifier[loadOrder] =[ literal[int] ]* identifier[len] ( identifier[self] . identifier[inputs] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[inputs] )):
identifier[self] . identifier[loadOrder] [ identifier[i] ]= identifier[i] | def loadInputPatternsFromFile(self, filename, cols=None, everyNrows=1, delim=' ', checkEven=1):
"""
Deprecated.
"""
self.inputs = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned=1)
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
self.loadOrder[i] = i # depends on [control=['for'], data=['i']] |
def _file_path(self, dirname, filename):
'''
Builds an absolute path and creates the directory and file if they don't already exist.
@dirname - Directory path.
@filename - File name.
Returns a full path of 'dirname/filename'.
'''
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except KeyboardInterrupt as e:
raise e
except Exception:
pass
fpath = os.path.join(dirname, filename)
if not os.path.exists(fpath):
try:
open(fpath, "w").close()
except KeyboardInterrupt as e:
raise e
except Exception:
pass
return fpath | def function[_file_path, parameter[self, dirname, filename]]:
constant[
Builds an absolute path and creates the directory and file if they don't already exist.
@dirname - Directory path.
@filename - File name.
Returns a full path of 'dirname/filename'.
]
if <ast.UnaryOp object at 0x7da1b215cb20> begin[:]
<ast.Try object at 0x7da1b215cb80>
variable[fpath] assign[=] call[name[os].path.join, parameter[name[dirname], name[filename]]]
if <ast.UnaryOp object at 0x7da1b215ffa0> begin[:]
<ast.Try object at 0x7da1b215fca0>
return[name[fpath]] | keyword[def] identifier[_file_path] ( identifier[self] , identifier[dirname] , identifier[filename] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dirname] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[dirname] )
keyword[except] identifier[KeyboardInterrupt] keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[except] identifier[Exception] :
keyword[pass]
identifier[fpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , identifier[filename] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[fpath] ):
keyword[try] :
identifier[open] ( identifier[fpath] , literal[string] ). identifier[close] ()
keyword[except] identifier[KeyboardInterrupt] keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[except] identifier[Exception] :
keyword[pass]
keyword[return] identifier[fpath] | def _file_path(self, dirname, filename):
"""
Builds an absolute path and creates the directory and file if they don't already exist.
@dirname - Directory path.
@filename - File name.
Returns a full path of 'dirname/filename'.
"""
if not os.path.exists(dirname):
try:
os.makedirs(dirname) # depends on [control=['try'], data=[]]
except KeyboardInterrupt as e:
raise e # depends on [control=['except'], data=['e']]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
fpath = os.path.join(dirname, filename)
if not os.path.exists(fpath):
try:
open(fpath, 'w').close() # depends on [control=['try'], data=[]]
except KeyboardInterrupt as e:
raise e # depends on [control=['except'], data=['e']]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return fpath |
def _services(lancet):
"""List all currently configured services."""
def get_services(config):
for s in config.sections():
if config.has_option(s, 'url'):
if config.has_option(s, 'username'):
yield s
for s in get_services(lancet.config):
click.echo('{}[Logout from {}]'.format(s, lancet.config.get(s, 'url'))) | def function[_services, parameter[lancet]]:
constant[List all currently configured services.]
def function[get_services, parameter[config]]:
for taget[name[s]] in starred[call[name[config].sections, parameter[]]] begin[:]
if call[name[config].has_option, parameter[name[s], constant[url]]] begin[:]
if call[name[config].has_option, parameter[name[s], constant[username]]] begin[:]
<ast.Yield object at 0x7da1b1037f10>
for taget[name[s]] in starred[call[name[get_services], parameter[name[lancet].config]]] begin[:]
call[name[click].echo, parameter[call[constant[{}[Logout from {}]].format, parameter[name[s], call[name[lancet].config.get, parameter[name[s], constant[url]]]]]]] | keyword[def] identifier[_services] ( identifier[lancet] ):
literal[string]
keyword[def] identifier[get_services] ( identifier[config] ):
keyword[for] identifier[s] keyword[in] identifier[config] . identifier[sections] ():
keyword[if] identifier[config] . identifier[has_option] ( identifier[s] , literal[string] ):
keyword[if] identifier[config] . identifier[has_option] ( identifier[s] , literal[string] ):
keyword[yield] identifier[s]
keyword[for] identifier[s] keyword[in] identifier[get_services] ( identifier[lancet] . identifier[config] ):
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[s] , identifier[lancet] . identifier[config] . identifier[get] ( identifier[s] , literal[string] ))) | def _services(lancet):
"""List all currently configured services."""
def get_services(config):
for s in config.sections():
if config.has_option(s, 'url'):
if config.has_option(s, 'username'):
yield s # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']]
for s in get_services(lancet.config):
click.echo('{}[Logout from {}]'.format(s, lancet.config.get(s, 'url'))) # depends on [control=['for'], data=['s']] |
def perform(action_name, container, **kwargs):
"""
Performs an action on the given container map and configuration.
:param action_name: Name of the action (e.g. ``update``).
:param container: Container configuration name.
:param kwargs: Keyword arguments for the action implementation.
"""
cf = container_fabric()
cf.call(action_name, container, **kwargs) | def function[perform, parameter[action_name, container]]:
constant[
Performs an action on the given container map and configuration.
:param action_name: Name of the action (e.g. ``update``).
:param container: Container configuration name.
:param kwargs: Keyword arguments for the action implementation.
]
variable[cf] assign[=] call[name[container_fabric], parameter[]]
call[name[cf].call, parameter[name[action_name], name[container]]] | keyword[def] identifier[perform] ( identifier[action_name] , identifier[container] ,** identifier[kwargs] ):
literal[string]
identifier[cf] = identifier[container_fabric] ()
identifier[cf] . identifier[call] ( identifier[action_name] , identifier[container] ,** identifier[kwargs] ) | def perform(action_name, container, **kwargs):
"""
Performs an action on the given container map and configuration.
:param action_name: Name of the action (e.g. ``update``).
:param container: Container configuration name.
:param kwargs: Keyword arguments for the action implementation.
"""
cf = container_fabric()
cf.call(action_name, container, **kwargs) |
def drop(manager: Manager, network_id: Optional[int], yes):
"""Drop a network by its identifier or drop all networks."""
if network_id:
manager.drop_network_by_id(network_id)
elif yes or click.confirm('Drop all networks?'):
manager.drop_networks() | def function[drop, parameter[manager, network_id, yes]]:
constant[Drop a network by its identifier or drop all networks.]
if name[network_id] begin[:]
call[name[manager].drop_network_by_id, parameter[name[network_id]]] | keyword[def] identifier[drop] ( identifier[manager] : identifier[Manager] , identifier[network_id] : identifier[Optional] [ identifier[int] ], identifier[yes] ):
literal[string]
keyword[if] identifier[network_id] :
identifier[manager] . identifier[drop_network_by_id] ( identifier[network_id] )
keyword[elif] identifier[yes] keyword[or] identifier[click] . identifier[confirm] ( literal[string] ):
identifier[manager] . identifier[drop_networks] () | def drop(manager: Manager, network_id: Optional[int], yes):
"""Drop a network by its identifier or drop all networks."""
if network_id:
manager.drop_network_by_id(network_id) # depends on [control=['if'], data=[]]
elif yes or click.confirm('Drop all networks?'):
manager.drop_networks() # depends on [control=['if'], data=[]] |
def is_valid_version_ip(param):
"""Checks if the parameter is a valid ip version value.
:param param: Value to be validated.
:return: True if the parameter has a valid ip version value, or False otherwise.
"""
if param is None:
return False
if param == IP_VERSION.IPv4[0] or param == IP_VERSION.IPv6[0]:
return True
return False | def function[is_valid_version_ip, parameter[param]]:
constant[Checks if the parameter is a valid ip version value.
:param param: Value to be validated.
:return: True if the parameter has a valid ip version value, or False otherwise.
]
if compare[name[param] is constant[None]] begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da1b2347e20> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_valid_version_ip] ( identifier[param] ):
literal[string]
keyword[if] identifier[param] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[param] == identifier[IP_VERSION] . identifier[IPv4] [ literal[int] ] keyword[or] identifier[param] == identifier[IP_VERSION] . identifier[IPv6] [ literal[int] ]:
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_valid_version_ip(param):
"""Checks if the parameter is a valid ip version value.
:param param: Value to be validated.
:return: True if the parameter has a valid ip version value, or False otherwise.
"""
if param is None:
return False # depends on [control=['if'], data=[]]
if param == IP_VERSION.IPv4[0] or param == IP_VERSION.IPv6[0]:
return True # depends on [control=['if'], data=[]]
return False |
def leverages(self, block='X'):
"""
Calculate the leverages for each observation
:return:
:rtype:
"""
# TODO check with matlab and simca
try:
if block == 'X':
return np.dot(self.scores_t, np.dot(np.linalg.inv(np.dot(self.scores_t.T, self.scores_t), self.scores_t.T)))
elif block == 'Y':
return np.dot(self.scores_u, np.dot(np.linalg.inv(np.dot(self.scores_u.T, self.scores_u), self.scores_u.T)))
else:
raise ValueError
except ValueError as verr:
raise ValueError('block option must be either X or Y') | def function[leverages, parameter[self, block]]:
constant[
Calculate the leverages for each observation
:return:
:rtype:
]
<ast.Try object at 0x7da20cabe380> | keyword[def] identifier[leverages] ( identifier[self] , identifier[block] = literal[string] ):
literal[string]
keyword[try] :
keyword[if] identifier[block] == literal[string] :
keyword[return] identifier[np] . identifier[dot] ( identifier[self] . identifier[scores_t] , identifier[np] . identifier[dot] ( identifier[np] . identifier[linalg] . identifier[inv] ( identifier[np] . identifier[dot] ( identifier[self] . identifier[scores_t] . identifier[T] , identifier[self] . identifier[scores_t] ), identifier[self] . identifier[scores_t] . identifier[T] )))
keyword[elif] identifier[block] == literal[string] :
keyword[return] identifier[np] . identifier[dot] ( identifier[self] . identifier[scores_u] , identifier[np] . identifier[dot] ( identifier[np] . identifier[linalg] . identifier[inv] ( identifier[np] . identifier[dot] ( identifier[self] . identifier[scores_u] . identifier[T] , identifier[self] . identifier[scores_u] ), identifier[self] . identifier[scores_u] . identifier[T] )))
keyword[else] :
keyword[raise] identifier[ValueError]
keyword[except] identifier[ValueError] keyword[as] identifier[verr] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def leverages(self, block='X'):
"""
Calculate the leverages for each observation
:return:
:rtype:
"""
# TODO check with matlab and simca
try:
if block == 'X':
return np.dot(self.scores_t, np.dot(np.linalg.inv(np.dot(self.scores_t.T, self.scores_t), self.scores_t.T))) # depends on [control=['if'], data=[]]
elif block == 'Y':
return np.dot(self.scores_u, np.dot(np.linalg.inv(np.dot(self.scores_u.T, self.scores_u), self.scores_u.T))) # depends on [control=['if'], data=[]]
else:
raise ValueError # depends on [control=['try'], data=[]]
except ValueError as verr:
raise ValueError('block option must be either X or Y') # depends on [control=['except'], data=[]] |
def _setup_process_environment(self, env):
"""
Sets up the process environment.
"""
environ = self._process.processEnvironment()
if env is None:
env = {}
for k, v in os.environ.items():
environ.insert(k, v)
for k, v in env.items():
environ.insert(k, v)
if sys.platform != 'win32':
environ.insert('TERM', 'xterm')
environ.insert('LINES', '24')
environ.insert('COLUMNS', '450')
environ.insert('PYTHONUNBUFFERED', '1')
environ.insert('QT_LOGGING_TO_CONSOLE', '1')
return environ | def function[_setup_process_environment, parameter[self, env]]:
constant[
Sets up the process environment.
]
variable[environ] assign[=] call[name[self]._process.processEnvironment, parameter[]]
if compare[name[env] is constant[None]] begin[:]
variable[env] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18c4cef80>, <ast.Name object at 0x7da18c4cc160>]]] in starred[call[name[os].environ.items, parameter[]]] begin[:]
call[name[environ].insert, parameter[name[k], name[v]]]
for taget[tuple[[<ast.Name object at 0x7da18f00dd50>, <ast.Name object at 0x7da18f00ca90>]]] in starred[call[name[env].items, parameter[]]] begin[:]
call[name[environ].insert, parameter[name[k], name[v]]]
if compare[name[sys].platform not_equal[!=] constant[win32]] begin[:]
call[name[environ].insert, parameter[constant[TERM], constant[xterm]]]
call[name[environ].insert, parameter[constant[LINES], constant[24]]]
call[name[environ].insert, parameter[constant[COLUMNS], constant[450]]]
call[name[environ].insert, parameter[constant[PYTHONUNBUFFERED], constant[1]]]
call[name[environ].insert, parameter[constant[QT_LOGGING_TO_CONSOLE], constant[1]]]
return[name[environ]] | keyword[def] identifier[_setup_process_environment] ( identifier[self] , identifier[env] ):
literal[string]
identifier[environ] = identifier[self] . identifier[_process] . identifier[processEnvironment] ()
keyword[if] identifier[env] keyword[is] keyword[None] :
identifier[env] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[os] . identifier[environ] . identifier[items] ():
identifier[environ] . identifier[insert] ( identifier[k] , identifier[v] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[env] . identifier[items] ():
identifier[environ] . identifier[insert] ( identifier[k] , identifier[v] )
keyword[if] identifier[sys] . identifier[platform] != literal[string] :
identifier[environ] . identifier[insert] ( literal[string] , literal[string] )
identifier[environ] . identifier[insert] ( literal[string] , literal[string] )
identifier[environ] . identifier[insert] ( literal[string] , literal[string] )
identifier[environ] . identifier[insert] ( literal[string] , literal[string] )
identifier[environ] . identifier[insert] ( literal[string] , literal[string] )
keyword[return] identifier[environ] | def _setup_process_environment(self, env):
"""
Sets up the process environment.
"""
environ = self._process.processEnvironment()
if env is None:
env = {} # depends on [control=['if'], data=['env']]
for (k, v) in os.environ.items():
environ.insert(k, v) # depends on [control=['for'], data=[]]
for (k, v) in env.items():
environ.insert(k, v) # depends on [control=['for'], data=[]]
if sys.platform != 'win32':
environ.insert('TERM', 'xterm')
environ.insert('LINES', '24')
environ.insert('COLUMNS', '450') # depends on [control=['if'], data=[]]
environ.insert('PYTHONUNBUFFERED', '1')
environ.insert('QT_LOGGING_TO_CONSOLE', '1')
return environ |
def mouseMoved(self, viewPos):
""" Updates the probe text with the values under the cursor.
Draws a vertical line and a symbol at the position of the probe.
"""
try:
check_class(viewPos, QtCore.QPointF)
self.crossLineVerShadow.setVisible(False)
self.crossLineVertical.setVisible(False)
self.probeLabel.setText("")
self.probeDataItem.clear()
if (self._hasValidData() and self.config.probeCti.configValue and
self.viewBox.sceneBoundingRect().contains(viewPos)):
scenePos = self.viewBox.mapSceneToView(viewPos)
index = int(scenePos.x())
data = self.slicedArray.data
if not 0 <= index < len(data):
txt = "<span style='color: grey'>no data at cursor</span>"
self.probeLabel.setText(txt)
else:
valueStr = to_string(data[index], masked=self.slicedArray.maskAt(index),
maskFormat='<masked>')
self.probeLabel.setText("pos = {!r}, value = {}".format(index, valueStr))
if np.isfinite(data[index]):
self.crossLineVerShadow.setVisible(True)
self.crossLineVerShadow.setPos(index)
self.crossLineVertical.setVisible(True)
self.crossLineVertical.setPos(index)
if data[index] > 0 or self.config.yLogCti.configValue == False:
self.probeDataItem.setData((index,), (data[index],))
except Exception as ex:
# In contrast to _drawContents, this function is a slot and thus must not throw
# exceptions. The exception is logged. Perhaps we should clear the cross plots, but
# this could, in turn, raise exceptions.
if DEBUGGING:
raise
else:
logger.exception(ex) | def function[mouseMoved, parameter[self, viewPos]]:
constant[ Updates the probe text with the values under the cursor.
Draws a vertical line and a symbol at the position of the probe.
]
<ast.Try object at 0x7da1b04f9f90> | keyword[def] identifier[mouseMoved] ( identifier[self] , identifier[viewPos] ):
literal[string]
keyword[try] :
identifier[check_class] ( identifier[viewPos] , identifier[QtCore] . identifier[QPointF] )
identifier[self] . identifier[crossLineVerShadow] . identifier[setVisible] ( keyword[False] )
identifier[self] . identifier[crossLineVertical] . identifier[setVisible] ( keyword[False] )
identifier[self] . identifier[probeLabel] . identifier[setText] ( literal[string] )
identifier[self] . identifier[probeDataItem] . identifier[clear] ()
keyword[if] ( identifier[self] . identifier[_hasValidData] () keyword[and] identifier[self] . identifier[config] . identifier[probeCti] . identifier[configValue] keyword[and]
identifier[self] . identifier[viewBox] . identifier[sceneBoundingRect] (). identifier[contains] ( identifier[viewPos] )):
identifier[scenePos] = identifier[self] . identifier[viewBox] . identifier[mapSceneToView] ( identifier[viewPos] )
identifier[index] = identifier[int] ( identifier[scenePos] . identifier[x] ())
identifier[data] = identifier[self] . identifier[slicedArray] . identifier[data]
keyword[if] keyword[not] literal[int] <= identifier[index] < identifier[len] ( identifier[data] ):
identifier[txt] = literal[string]
identifier[self] . identifier[probeLabel] . identifier[setText] ( identifier[txt] )
keyword[else] :
identifier[valueStr] = identifier[to_string] ( identifier[data] [ identifier[index] ], identifier[masked] = identifier[self] . identifier[slicedArray] . identifier[maskAt] ( identifier[index] ),
identifier[maskFormat] = literal[string] )
identifier[self] . identifier[probeLabel] . identifier[setText] ( literal[string] . identifier[format] ( identifier[index] , identifier[valueStr] ))
keyword[if] identifier[np] . identifier[isfinite] ( identifier[data] [ identifier[index] ]):
identifier[self] . identifier[crossLineVerShadow] . identifier[setVisible] ( keyword[True] )
identifier[self] . identifier[crossLineVerShadow] . identifier[setPos] ( identifier[index] )
identifier[self] . identifier[crossLineVertical] . identifier[setVisible] ( keyword[True] )
identifier[self] . identifier[crossLineVertical] . identifier[setPos] ( identifier[index] )
keyword[if] identifier[data] [ identifier[index] ]> literal[int] keyword[or] identifier[self] . identifier[config] . identifier[yLogCti] . identifier[configValue] == keyword[False] :
identifier[self] . identifier[probeDataItem] . identifier[setData] (( identifier[index] ,),( identifier[data] [ identifier[index] ],))
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[if] identifier[DEBUGGING] :
keyword[raise]
keyword[else] :
identifier[logger] . identifier[exception] ( identifier[ex] ) | def mouseMoved(self, viewPos):
""" Updates the probe text with the values under the cursor.
Draws a vertical line and a symbol at the position of the probe.
"""
try:
check_class(viewPos, QtCore.QPointF)
self.crossLineVerShadow.setVisible(False)
self.crossLineVertical.setVisible(False)
self.probeLabel.setText('')
self.probeDataItem.clear()
if self._hasValidData() and self.config.probeCti.configValue and self.viewBox.sceneBoundingRect().contains(viewPos):
scenePos = self.viewBox.mapSceneToView(viewPos)
index = int(scenePos.x())
data = self.slicedArray.data
if not 0 <= index < len(data):
txt = "<span style='color: grey'>no data at cursor</span>"
self.probeLabel.setText(txt) # depends on [control=['if'], data=[]]
else:
valueStr = to_string(data[index], masked=self.slicedArray.maskAt(index), maskFormat='<masked>')
self.probeLabel.setText('pos = {!r}, value = {}'.format(index, valueStr))
if np.isfinite(data[index]):
self.crossLineVerShadow.setVisible(True)
self.crossLineVerShadow.setPos(index)
self.crossLineVertical.setVisible(True)
self.crossLineVertical.setPos(index)
if data[index] > 0 or self.config.yLogCti.configValue == False:
self.probeDataItem.setData((index,), (data[index],)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as ex:
# In contrast to _drawContents, this function is a slot and thus must not throw
# exceptions. The exception is logged. Perhaps we should clear the cross plots, but
# this could, in turn, raise exceptions.
if DEBUGGING:
raise # depends on [control=['if'], data=[]]
else:
logger.exception(ex) # depends on [control=['except'], data=['ex']] |
def indent(self):
"""
Indents the document text under cursor.
:return: Method success.
:rtype: bool
"""
cursor = self.textCursor()
if not cursor.hasSelection():
cursor.insertText(self.__indent_marker)
else:
block = self.document().findBlock(cursor.selectionStart())
while True:
block_cursor = self.textCursor()
block_cursor.setPosition(block.position())
block_cursor.insertText(self.__indent_marker)
if block.contains(cursor.selectionEnd()):
break
block = block.next()
return True | def function[indent, parameter[self]]:
constant[
Indents the document text under cursor.
:return: Method success.
:rtype: bool
]
variable[cursor] assign[=] call[name[self].textCursor, parameter[]]
if <ast.UnaryOp object at 0x7da1b0ab8d60> begin[:]
call[name[cursor].insertText, parameter[name[self].__indent_marker]]
return[constant[True]] | keyword[def] identifier[indent] ( identifier[self] ):
literal[string]
identifier[cursor] = identifier[self] . identifier[textCursor] ()
keyword[if] keyword[not] identifier[cursor] . identifier[hasSelection] ():
identifier[cursor] . identifier[insertText] ( identifier[self] . identifier[__indent_marker] )
keyword[else] :
identifier[block] = identifier[self] . identifier[document] (). identifier[findBlock] ( identifier[cursor] . identifier[selectionStart] ())
keyword[while] keyword[True] :
identifier[block_cursor] = identifier[self] . identifier[textCursor] ()
identifier[block_cursor] . identifier[setPosition] ( identifier[block] . identifier[position] ())
identifier[block_cursor] . identifier[insertText] ( identifier[self] . identifier[__indent_marker] )
keyword[if] identifier[block] . identifier[contains] ( identifier[cursor] . identifier[selectionEnd] ()):
keyword[break]
identifier[block] = identifier[block] . identifier[next] ()
keyword[return] keyword[True] | def indent(self):
"""
Indents the document text under cursor.
:return: Method success.
:rtype: bool
"""
cursor = self.textCursor()
if not cursor.hasSelection():
cursor.insertText(self.__indent_marker) # depends on [control=['if'], data=[]]
else:
block = self.document().findBlock(cursor.selectionStart())
while True:
block_cursor = self.textCursor()
block_cursor.setPosition(block.position())
block_cursor.insertText(self.__indent_marker)
if block.contains(cursor.selectionEnd()):
break # depends on [control=['if'], data=[]]
block = block.next() # depends on [control=['while'], data=[]]
return True |
def ray_bounds(ray_origins,
ray_directions,
bounds,
buffer_dist=1e-5):
"""
Given a set of rays and a bounding box for the volume of interest
where the rays will be passing through, find the bounding boxes
of the rays as they pass through the volume.
Parameters
------------
ray_origins: (m,3) float, ray origin points
ray_directions: (m,3) float, ray direction vectors
bounds: (2,3) bounding box (min, max)
buffer_dist: float, distance to pad zero width bounding boxes
Returns
---------
ray_bounding: (n) set of AABB of rays passing through volume
"""
ray_origins = np.asanyarray(ray_origins, dtype=np.float64)
ray_directions = np.asanyarray(ray_directions, dtype=np.float64)
# bounding box we are testing against
bounds = np.asanyarray(bounds)
# find the primary axis of the vector
axis = np.abs(ray_directions).argmax(axis=1)
axis_bound = bounds.reshape((2, -1)).T[axis]
axis_ori = np.array([ray_origins[i][a]
for i, a in enumerate(axis)]).reshape((-1, 1))
axis_dir = np.array([ray_directions[i][a]
for i, a in enumerate(axis)]).reshape((-1, 1))
# parametric equation of a line
# point = direction*t + origin
# p = dt + o
# t = (p-o)/d
t = (axis_bound - axis_ori) / axis_dir
# prevent the bounding box from including triangles
# behind the ray origin
t[t < buffer_dist] = buffer_dist
# the value of t for both the upper and lower bounds
t_a = t[:, 0].reshape((-1, 1))
t_b = t[:, 1].reshape((-1, 1))
# the cartesion point for where the line hits the plane defined by
# axis
on_a = (ray_directions * t_a) + ray_origins
on_b = (ray_directions * t_b) + ray_origins
on_plane = np.column_stack(
(on_a, on_b)).reshape(
(-1, 2, ray_directions.shape[1]))
ray_bounding = np.hstack((on_plane.min(axis=1),
on_plane.max(axis=1)))
# pad the bounding box by TOL_BUFFER
# not sure if this is necessary, but if the ray is axis aligned
# this function will otherwise return zero volume bounding boxes
# which may or may not screw up the r-tree intersection queries
ray_bounding += np.array([-1, -1, -1, 1, 1, 1]) * buffer_dist
return ray_bounding | def function[ray_bounds, parameter[ray_origins, ray_directions, bounds, buffer_dist]]:
constant[
Given a set of rays and a bounding box for the volume of interest
where the rays will be passing through, find the bounding boxes
of the rays as they pass through the volume.
Parameters
------------
ray_origins: (m,3) float, ray origin points
ray_directions: (m,3) float, ray direction vectors
bounds: (2,3) bounding box (min, max)
buffer_dist: float, distance to pad zero width bounding boxes
Returns
---------
ray_bounding: (n) set of AABB of rays passing through volume
]
variable[ray_origins] assign[=] call[name[np].asanyarray, parameter[name[ray_origins]]]
variable[ray_directions] assign[=] call[name[np].asanyarray, parameter[name[ray_directions]]]
variable[bounds] assign[=] call[name[np].asanyarray, parameter[name[bounds]]]
variable[axis] assign[=] call[call[name[np].abs, parameter[name[ray_directions]]].argmax, parameter[]]
variable[axis_bound] assign[=] call[call[name[bounds].reshape, parameter[tuple[[<ast.Constant object at 0x7da20c7cb1c0>, <ast.UnaryOp object at 0x7da20c7cae30>]]]].T][name[axis]]
variable[axis_ori] assign[=] call[call[name[np].array, parameter[<ast.ListComp object at 0x7da20c7c96f0>]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da20c7c8b80>, <ast.Constant object at 0x7da20c7cac20>]]]]
variable[axis_dir] assign[=] call[call[name[np].array, parameter[<ast.ListComp object at 0x7da20c7ca680>]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da20c7ca410>, <ast.Constant object at 0x7da20c7cbf40>]]]]
variable[t] assign[=] binary_operation[binary_operation[name[axis_bound] - name[axis_ori]] / name[axis_dir]]
call[name[t]][compare[name[t] less[<] name[buffer_dist]]] assign[=] name[buffer_dist]
variable[t_a] assign[=] call[call[name[t]][tuple[[<ast.Slice object at 0x7da20c7cbca0>, <ast.Constant object at 0x7da20c7cb760>]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da20c7ca050>, <ast.Constant object at 0x7da20c7c98d0>]]]]
variable[t_b] assign[=] call[call[name[t]][tuple[[<ast.Slice object at 0x7da20c7cb580>, <ast.Constant object at 0x7da20c7ca7a0>]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da20c7cac80>, <ast.Constant object at 0x7da20c7c8640>]]]]
variable[on_a] assign[=] binary_operation[binary_operation[name[ray_directions] * name[t_a]] + name[ray_origins]]
variable[on_b] assign[=] binary_operation[binary_operation[name[ray_directions] * name[t_b]] + name[ray_origins]]
variable[on_plane] assign[=] call[call[name[np].column_stack, parameter[tuple[[<ast.Name object at 0x7da20c7cb970>, <ast.Name object at 0x7da20c7cbee0>]]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da20c7ca4a0>, <ast.Constant object at 0x7da20c7c9bd0>, <ast.Subscript object at 0x7da20c7cb940>]]]]
variable[ray_bounding] assign[=] call[name[np].hstack, parameter[tuple[[<ast.Call object at 0x7da20c7caef0>, <ast.Call object at 0x7da20c7c9e40>]]]]
<ast.AugAssign object at 0x7da20c7caa10>
return[name[ray_bounding]] | keyword[def] identifier[ray_bounds] ( identifier[ray_origins] ,
identifier[ray_directions] ,
identifier[bounds] ,
identifier[buffer_dist] = literal[int] ):
literal[string]
identifier[ray_origins] = identifier[np] . identifier[asanyarray] ( identifier[ray_origins] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[ray_directions] = identifier[np] . identifier[asanyarray] ( identifier[ray_directions] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[bounds] = identifier[np] . identifier[asanyarray] ( identifier[bounds] )
identifier[axis] = identifier[np] . identifier[abs] ( identifier[ray_directions] ). identifier[argmax] ( identifier[axis] = literal[int] )
identifier[axis_bound] = identifier[bounds] . identifier[reshape] (( literal[int] ,- literal[int] )). identifier[T] [ identifier[axis] ]
identifier[axis_ori] = identifier[np] . identifier[array] ([ identifier[ray_origins] [ identifier[i] ][ identifier[a] ]
keyword[for] identifier[i] , identifier[a] keyword[in] identifier[enumerate] ( identifier[axis] )]). identifier[reshape] ((- literal[int] , literal[int] ))
identifier[axis_dir] = identifier[np] . identifier[array] ([ identifier[ray_directions] [ identifier[i] ][ identifier[a] ]
keyword[for] identifier[i] , identifier[a] keyword[in] identifier[enumerate] ( identifier[axis] )]). identifier[reshape] ((- literal[int] , literal[int] ))
identifier[t] =( identifier[axis_bound] - identifier[axis_ori] )/ identifier[axis_dir]
identifier[t] [ identifier[t] < identifier[buffer_dist] ]= identifier[buffer_dist]
identifier[t_a] = identifier[t] [:, literal[int] ]. identifier[reshape] ((- literal[int] , literal[int] ))
identifier[t_b] = identifier[t] [:, literal[int] ]. identifier[reshape] ((- literal[int] , literal[int] ))
identifier[on_a] =( identifier[ray_directions] * identifier[t_a] )+ identifier[ray_origins]
identifier[on_b] =( identifier[ray_directions] * identifier[t_b] )+ identifier[ray_origins]
identifier[on_plane] = identifier[np] . identifier[column_stack] (
( identifier[on_a] , identifier[on_b] )). identifier[reshape] (
(- literal[int] , literal[int] , identifier[ray_directions] . identifier[shape] [ literal[int] ]))
identifier[ray_bounding] = identifier[np] . identifier[hstack] (( identifier[on_plane] . identifier[min] ( identifier[axis] = literal[int] ),
identifier[on_plane] . identifier[max] ( identifier[axis] = literal[int] )))
identifier[ray_bounding] += identifier[np] . identifier[array] ([- literal[int] ,- literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] ])* identifier[buffer_dist]
keyword[return] identifier[ray_bounding] | def ray_bounds(ray_origins, ray_directions, bounds, buffer_dist=1e-05):
"""
Given a set of rays and a bounding box for the volume of interest
where the rays will be passing through, find the bounding boxes
of the rays as they pass through the volume.
Parameters
------------
ray_origins: (m,3) float, ray origin points
ray_directions: (m,3) float, ray direction vectors
bounds: (2,3) bounding box (min, max)
buffer_dist: float, distance to pad zero width bounding boxes
Returns
---------
ray_bounding: (n) set of AABB of rays passing through volume
"""
ray_origins = np.asanyarray(ray_origins, dtype=np.float64)
ray_directions = np.asanyarray(ray_directions, dtype=np.float64)
# bounding box we are testing against
bounds = np.asanyarray(bounds)
# find the primary axis of the vector
axis = np.abs(ray_directions).argmax(axis=1)
axis_bound = bounds.reshape((2, -1)).T[axis]
axis_ori = np.array([ray_origins[i][a] for (i, a) in enumerate(axis)]).reshape((-1, 1))
axis_dir = np.array([ray_directions[i][a] for (i, a) in enumerate(axis)]).reshape((-1, 1))
# parametric equation of a line
# point = direction*t + origin
# p = dt + o
# t = (p-o)/d
t = (axis_bound - axis_ori) / axis_dir
# prevent the bounding box from including triangles
# behind the ray origin
t[t < buffer_dist] = buffer_dist
# the value of t for both the upper and lower bounds
t_a = t[:, 0].reshape((-1, 1))
t_b = t[:, 1].reshape((-1, 1))
# the cartesion point for where the line hits the plane defined by
# axis
on_a = ray_directions * t_a + ray_origins
on_b = ray_directions * t_b + ray_origins
on_plane = np.column_stack((on_a, on_b)).reshape((-1, 2, ray_directions.shape[1]))
ray_bounding = np.hstack((on_plane.min(axis=1), on_plane.max(axis=1)))
# pad the bounding box by TOL_BUFFER
# not sure if this is necessary, but if the ray is axis aligned
# this function will otherwise return zero volume bounding boxes
# which may or may not screw up the r-tree intersection queries
ray_bounding += np.array([-1, -1, -1, 1, 1, 1]) * buffer_dist
return ray_bounding |
def bind_pale_to_webapp2(pale_app_module,
webapp_wsgiapplication,
route_prefix=None):
"""Binds a Pale API implementation to a webapp2 WSGIApplication"""
if not isinstance(webapp_wsgiapplication, webapp2.WSGIApplication):
raise TypeError("pale.adapters.webapp2.bind_pale_to_webapp2 expected "
"the passed in webapp_wsgiapplication to be an instance of "
"WSGIApplication, but it was an instance of %s instead."
% (type(webapp_wsgiapplication), ))
if not pale.is_pale_module(pale_app_module):
raise TypeError("pale.adapters.webapp2.bind_pale_to_webapp2 expected "
"the passed in pale_app_module to be a Python module with a "
"`_module_type` value equal to `pale.ImplementationModule`, "
"but it found an instance of %s instead."
% (type(pale_app_module), ))
endpoints = pale.extract_endpoints(pale_app_module)
for endpoint in endpoints:
endpoint._set_response_class(RESPONSE_CLASS)
method = endpoint._http_method
name = endpoint._route_name
req_handler = pale_webapp2_request_handler_generator(endpoint)
route_uri = endpoint._uri
if route_prefix is not None:
route_uri = "%s%s" % (route_prefix, route_uri)
route = webapp2.Route(
route_uri,
handler=req_handler,
name=name,
handler_method='pale_handler',
methods=[method, "OPTIONS"])
webapp_wsgiapplication.router.add(route) | def function[bind_pale_to_webapp2, parameter[pale_app_module, webapp_wsgiapplication, route_prefix]]:
constant[Binds a Pale API implementation to a webapp2 WSGIApplication]
if <ast.UnaryOp object at 0x7da20c993cd0> begin[:]
<ast.Raise object at 0x7da20c992ce0>
if <ast.UnaryOp object at 0x7da20c9901f0> begin[:]
<ast.Raise object at 0x7da18ede7b50>
variable[endpoints] assign[=] call[name[pale].extract_endpoints, parameter[name[pale_app_module]]]
for taget[name[endpoint]] in starred[name[endpoints]] begin[:]
call[name[endpoint]._set_response_class, parameter[name[RESPONSE_CLASS]]]
variable[method] assign[=] name[endpoint]._http_method
variable[name] assign[=] name[endpoint]._route_name
variable[req_handler] assign[=] call[name[pale_webapp2_request_handler_generator], parameter[name[endpoint]]]
variable[route_uri] assign[=] name[endpoint]._uri
if compare[name[route_prefix] is_not constant[None]] begin[:]
variable[route_uri] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede58a0>, <ast.Name object at 0x7da18ede7a90>]]]
variable[route] assign[=] call[name[webapp2].Route, parameter[name[route_uri]]]
call[name[webapp_wsgiapplication].router.add, parameter[name[route]]] | keyword[def] identifier[bind_pale_to_webapp2] ( identifier[pale_app_module] ,
identifier[webapp_wsgiapplication] ,
identifier[route_prefix] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[webapp_wsgiapplication] , identifier[webapp2] . identifier[WSGIApplication] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string]
literal[string]
%( identifier[type] ( identifier[webapp_wsgiapplication] ),))
keyword[if] keyword[not] identifier[pale] . identifier[is_pale_module] ( identifier[pale_app_module] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string]
literal[string]
literal[string]
%( identifier[type] ( identifier[pale_app_module] ),))
identifier[endpoints] = identifier[pale] . identifier[extract_endpoints] ( identifier[pale_app_module] )
keyword[for] identifier[endpoint] keyword[in] identifier[endpoints] :
identifier[endpoint] . identifier[_set_response_class] ( identifier[RESPONSE_CLASS] )
identifier[method] = identifier[endpoint] . identifier[_http_method]
identifier[name] = identifier[endpoint] . identifier[_route_name]
identifier[req_handler] = identifier[pale_webapp2_request_handler_generator] ( identifier[endpoint] )
identifier[route_uri] = identifier[endpoint] . identifier[_uri]
keyword[if] identifier[route_prefix] keyword[is] keyword[not] keyword[None] :
identifier[route_uri] = literal[string] %( identifier[route_prefix] , identifier[route_uri] )
identifier[route] = identifier[webapp2] . identifier[Route] (
identifier[route_uri] ,
identifier[handler] = identifier[req_handler] ,
identifier[name] = identifier[name] ,
identifier[handler_method] = literal[string] ,
identifier[methods] =[ identifier[method] , literal[string] ])
identifier[webapp_wsgiapplication] . identifier[router] . identifier[add] ( identifier[route] ) | def bind_pale_to_webapp2(pale_app_module, webapp_wsgiapplication, route_prefix=None):
"""Binds a Pale API implementation to a webapp2 WSGIApplication"""
if not isinstance(webapp_wsgiapplication, webapp2.WSGIApplication):
raise TypeError('pale.adapters.webapp2.bind_pale_to_webapp2 expected the passed in webapp_wsgiapplication to be an instance of WSGIApplication, but it was an instance of %s instead.' % (type(webapp_wsgiapplication),)) # depends on [control=['if'], data=[]]
if not pale.is_pale_module(pale_app_module):
raise TypeError('pale.adapters.webapp2.bind_pale_to_webapp2 expected the passed in pale_app_module to be a Python module with a `_module_type` value equal to `pale.ImplementationModule`, but it found an instance of %s instead.' % (type(pale_app_module),)) # depends on [control=['if'], data=[]]
endpoints = pale.extract_endpoints(pale_app_module)
for endpoint in endpoints:
endpoint._set_response_class(RESPONSE_CLASS)
method = endpoint._http_method
name = endpoint._route_name
req_handler = pale_webapp2_request_handler_generator(endpoint)
route_uri = endpoint._uri
if route_prefix is not None:
route_uri = '%s%s' % (route_prefix, route_uri) # depends on [control=['if'], data=['route_prefix']]
route = webapp2.Route(route_uri, handler=req_handler, name=name, handler_method='pale_handler', methods=[method, 'OPTIONS'])
webapp_wsgiapplication.router.add(route) # depends on [control=['for'], data=['endpoint']] |
def _process_maybe_work(self, yes_work, maybe_work, work_dir,
yn_results_path, stats):
"""Returns statistics of how `yes_work` compares with `maybe_work`.
:param yes_work: name of work for which stats are collected
:type yes_work: `str`
:param maybe_work: name of work being compared with `yes_work`
:type maybe_work: `str`
:param work_dir: directory where generated files are saved
:type work_dir: `str`
:param yn_results_path: path to results intersecting
`yes_work` with "no" works
:type yn_results_path: `str`
:param stats: data structure to hold statistical data of the
comparison
:type stats: `dict`
:rtype: `dict`
"""
if maybe_work == yes_work:
return stats
self._logger.info(
'Processing "maybe" work {} against "yes" work {}.'.format(
maybe_work, yes_work))
# Set base values for each statistic of interest, for each
# witness.
for siglum in self._corpus.get_sigla(maybe_work):
witness = (maybe_work, siglum)
stats[COMMON][witness] = 0
stats[SHARED][witness] = 0
stats[UNIQUE][witness] = 100
works = [yes_work, maybe_work]
# Sort the works to have a single filename for the
# intersection each pair of works, whether they are yes or
# maybe. This saves repeating the intersection with the roles
# switched, since _run_query will use a found file rather than
# rerun the query.
works.sort()
ym_results_path = os.path.join(
self._ym_intersects_dir, '{}_intersect_{}.csv'.format(*works))
stats = self._process_intersection(yes_work, maybe_work, work_dir,
ym_results_path, stats)
stats = self._process_diff(yes_work, maybe_work, work_dir,
ym_results_path, yn_results_path, stats)
return stats | def function[_process_maybe_work, parameter[self, yes_work, maybe_work, work_dir, yn_results_path, stats]]:
constant[Returns statistics of how `yes_work` compares with `maybe_work`.
:param yes_work: name of work for which stats are collected
:type yes_work: `str`
:param maybe_work: name of work being compared with `yes_work`
:type maybe_work: `str`
:param work_dir: directory where generated files are saved
:type work_dir: `str`
:param yn_results_path: path to results intersecting
`yes_work` with "no" works
:type yn_results_path: `str`
:param stats: data structure to hold statistical data of the
comparison
:type stats: `dict`
:rtype: `dict`
]
if compare[name[maybe_work] equal[==] name[yes_work]] begin[:]
return[name[stats]]
call[name[self]._logger.info, parameter[call[constant[Processing "maybe" work {} against "yes" work {}.].format, parameter[name[maybe_work], name[yes_work]]]]]
for taget[name[siglum]] in starred[call[name[self]._corpus.get_sigla, parameter[name[maybe_work]]]] begin[:]
variable[witness] assign[=] tuple[[<ast.Name object at 0x7da1b19c03a0>, <ast.Name object at 0x7da1b19c0880>]]
call[call[name[stats]][name[COMMON]]][name[witness]] assign[=] constant[0]
call[call[name[stats]][name[SHARED]]][name[witness]] assign[=] constant[0]
call[call[name[stats]][name[UNIQUE]]][name[witness]] assign[=] constant[100]
variable[works] assign[=] list[[<ast.Name object at 0x7da1b19c0d90>, <ast.Name object at 0x7da1b19c0280>]]
call[name[works].sort, parameter[]]
variable[ym_results_path] assign[=] call[name[os].path.join, parameter[name[self]._ym_intersects_dir, call[constant[{}_intersect_{}.csv].format, parameter[<ast.Starred object at 0x7da1b19c0ac0>]]]]
variable[stats] assign[=] call[name[self]._process_intersection, parameter[name[yes_work], name[maybe_work], name[work_dir], name[ym_results_path], name[stats]]]
variable[stats] assign[=] call[name[self]._process_diff, parameter[name[yes_work], name[maybe_work], name[work_dir], name[ym_results_path], name[yn_results_path], name[stats]]]
return[name[stats]] | keyword[def] identifier[_process_maybe_work] ( identifier[self] , identifier[yes_work] , identifier[maybe_work] , identifier[work_dir] ,
identifier[yn_results_path] , identifier[stats] ):
literal[string]
keyword[if] identifier[maybe_work] == identifier[yes_work] :
keyword[return] identifier[stats]
identifier[self] . identifier[_logger] . identifier[info] (
literal[string] . identifier[format] (
identifier[maybe_work] , identifier[yes_work] ))
keyword[for] identifier[siglum] keyword[in] identifier[self] . identifier[_corpus] . identifier[get_sigla] ( identifier[maybe_work] ):
identifier[witness] =( identifier[maybe_work] , identifier[siglum] )
identifier[stats] [ identifier[COMMON] ][ identifier[witness] ]= literal[int]
identifier[stats] [ identifier[SHARED] ][ identifier[witness] ]= literal[int]
identifier[stats] [ identifier[UNIQUE] ][ identifier[witness] ]= literal[int]
identifier[works] =[ identifier[yes_work] , identifier[maybe_work] ]
identifier[works] . identifier[sort] ()
identifier[ym_results_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[_ym_intersects_dir] , literal[string] . identifier[format] (* identifier[works] ))
identifier[stats] = identifier[self] . identifier[_process_intersection] ( identifier[yes_work] , identifier[maybe_work] , identifier[work_dir] ,
identifier[ym_results_path] , identifier[stats] )
identifier[stats] = identifier[self] . identifier[_process_diff] ( identifier[yes_work] , identifier[maybe_work] , identifier[work_dir] ,
identifier[ym_results_path] , identifier[yn_results_path] , identifier[stats] )
keyword[return] identifier[stats] | def _process_maybe_work(self, yes_work, maybe_work, work_dir, yn_results_path, stats):
"""Returns statistics of how `yes_work` compares with `maybe_work`.
:param yes_work: name of work for which stats are collected
:type yes_work: `str`
:param maybe_work: name of work being compared with `yes_work`
:type maybe_work: `str`
:param work_dir: directory where generated files are saved
:type work_dir: `str`
:param yn_results_path: path to results intersecting
`yes_work` with "no" works
:type yn_results_path: `str`
:param stats: data structure to hold statistical data of the
comparison
:type stats: `dict`
:rtype: `dict`
"""
if maybe_work == yes_work:
return stats # depends on [control=['if'], data=[]]
self._logger.info('Processing "maybe" work {} against "yes" work {}.'.format(maybe_work, yes_work))
# Set base values for each statistic of interest, for each
# witness.
for siglum in self._corpus.get_sigla(maybe_work):
witness = (maybe_work, siglum)
stats[COMMON][witness] = 0
stats[SHARED][witness] = 0
stats[UNIQUE][witness] = 100 # depends on [control=['for'], data=['siglum']]
works = [yes_work, maybe_work]
# Sort the works to have a single filename for the
# intersection each pair of works, whether they are yes or
# maybe. This saves repeating the intersection with the roles
# switched, since _run_query will use a found file rather than
# rerun the query.
works.sort()
ym_results_path = os.path.join(self._ym_intersects_dir, '{}_intersect_{}.csv'.format(*works))
stats = self._process_intersection(yes_work, maybe_work, work_dir, ym_results_path, stats)
stats = self._process_diff(yes_work, maybe_work, work_dir, ym_results_path, yn_results_path, stats)
return stats |
def fit_lines(self, window=1500, break_thresh=1500):
"""
Fits lines to pitch contours.
:param window: size of each chunk to which linear equation is to be fit (in milliseconds).
To keep it simple, hop is chosen to be one third of the window.
:param break_thresh: If there is silence beyond this limit (in milliseconds),
the contour will be broken there into two so that we don't fit a line over and
including the silent region.
"""
window /= 1000
hop = window/3
break_thresh /= 1000
#cut the whole song into pieces if there are gaps more than break_thresh seconds
i = 0
break_indices = []
count = 0
while i < len(self.pitch):
if self.pitch[i] == -10000:
count = 1
start_index = i
while i < len(self.pitch) and self.pitch[i] == -10000:
count += 1
i += 1
end_index = i-1
if self.timestamps[end_index]-self.timestamps[start_index] >= break_thresh:
break_indices.append([start_index, end_index])
i += 1
break_indices = np.array(break_indices)
#In creating the data blocks which are not silences, note that we
# take complimentary break indices. i.e., if [[s1, e1], [s2, e2] ...]
# is break_indices, we take e1-s2, e2-s3 chunks and build data blocks
data_blocks = []
if len(break_indices) == 0:
t_pitch = self.pitch.reshape(len(self.pitch), 1)
t_timestamps = self.timestamps.reshape(len(self.timestamps), 1)
data_blocks = [np.append(t_timestamps, t_pitch, axis=1)]
else:
if break_indices[0, 0] != 0:
t_pitch = self.pitch[:break_indices[0, 0]]
t_pitch = t_pitch.reshape(len(t_pitch), 1)
t_timestamps = self.timestamps[:break_indices[0, 0]]
t_timestamps = t_timestamps.reshape(len(t_timestamps), 1)
data_blocks.append(np.append(t_timestamps, t_pitch, axis=1))
block_start = break_indices[0, 1]
for i in xrange(1, len(break_indices)):
block_end = break_indices[i, 0]
t_pitch = self.pitch[block_start:block_end]
t_pitch = t_pitch.reshape(len(t_pitch), 1)
t_timestamps = self.timestamps[block_start:block_end]
t_timestamps = t_timestamps.reshape(len(t_timestamps), 1)
data_blocks.append(np.append(t_timestamps, t_pitch, axis=1))
block_start = break_indices[i, 1]
if block_start != len(self.pitch)-1:
t_pitch = self.pitch[block_start:]
t_pitch = t_pitch.reshape(len(t_pitch), 1)
t_timestamps = self.timestamps[block_start:]
t_timestamps = t_timestamps.reshape(len(t_timestamps), 1)
data_blocks.append(np.append(t_timestamps, t_pitch, axis=1))
label_start_offset = (window-hop)/2
label_end_offset = label_start_offset+hop
#dataNew = np.zeros_like(data)
#dataNew[:, 0] = data[:, 0]
data_new = np.array([[0, 0]])
for data in data_blocks:
start_index = 0
while start_index < len(data)-1:
end_index = utils.find_nearest_index(data[:, 0], data[start_index][0]+window)
segment = data[start_index:end_index]
if len(segment) == 0:
start_index = utils.find_nearest_index(data[:, 0], data[start_index, 0]+hop)
continue
segment_clean = np.delete(segment, np.where(segment[:, 1] == -10000), axis=0)
if len(segment_clean) == 0:
#After splitting into blocks, this loop better not come into play
#raise ValueError("This part of the block is absolute silence! Make sure block_thresh >= window!")
start_index = utils.find_nearest_index(data[:, 0], data[start_index, 0]+hop)
continue
n_clean = len(segment_clean)
x_clean = np.matrix(segment_clean[:, 0]).reshape(n_clean, 1)
y_clean = np.matrix(segment_clean[:, 1]).reshape(n_clean, 1)
#return [x_clean, y_clean]
theta = utils.normal_equation(x_clean, y_clean)
#determine the start and end of the segment to be labelled
label_start_index = utils.find_nearest_index(x_clean, data[start_index, 0]+label_start_offset)
label_end_index = utils.find_nearest_index(x_clean, data[start_index, 0]+label_end_offset)
x_clean = x_clean[label_start_index:label_end_index]
#return x_clean
x_clean = np.insert(x_clean, 0, np.ones(len(x_clean)), axis=1)
newy = x_clean*theta
result = np.append(x_clean[:, 1], newy, axis=1)
data_new = np.append(data_new, result, axis=0)
start_index = utils.find_nearest_index(data[:, 0], data[start_index, 0]+hop)
return [data_new[:, 0], data_new[:, 1]] | def function[fit_lines, parameter[self, window, break_thresh]]:
constant[
Fits lines to pitch contours.
:param window: size of each chunk to which linear equation is to be fit (in milliseconds).
To keep it simple, hop is chosen to be one third of the window.
:param break_thresh: If there is silence beyond this limit (in milliseconds),
the contour will be broken there into two so that we don't fit a line over and
including the silent region.
]
<ast.AugAssign object at 0x7da18f09caf0>
variable[hop] assign[=] binary_operation[name[window] / constant[3]]
<ast.AugAssign object at 0x7da18f09ecb0>
variable[i] assign[=] constant[0]
variable[break_indices] assign[=] list[[]]
variable[count] assign[=] constant[0]
while compare[name[i] less[<] call[name[len], parameter[name[self].pitch]]] begin[:]
if compare[call[name[self].pitch][name[i]] equal[==] <ast.UnaryOp object at 0x7da18f09f610>] begin[:]
variable[count] assign[=] constant[1]
variable[start_index] assign[=] name[i]
while <ast.BoolOp object at 0x7da18f09d270> begin[:]
<ast.AugAssign object at 0x7da18f09cc40>
<ast.AugAssign object at 0x7da18f09e3e0>
variable[end_index] assign[=] binary_operation[name[i] - constant[1]]
if compare[binary_operation[call[name[self].timestamps][name[end_index]] - call[name[self].timestamps][name[start_index]]] greater_or_equal[>=] name[break_thresh]] begin[:]
call[name[break_indices].append, parameter[list[[<ast.Name object at 0x7da18f09d480>, <ast.Name object at 0x7da18f09f9a0>]]]]
<ast.AugAssign object at 0x7da18f09e9b0>
variable[break_indices] assign[=] call[name[np].array, parameter[name[break_indices]]]
variable[data_blocks] assign[=] list[[]]
if compare[call[name[len], parameter[name[break_indices]]] equal[==] constant[0]] begin[:]
variable[t_pitch] assign[=] call[name[self].pitch.reshape, parameter[call[name[len], parameter[name[self].pitch]], constant[1]]]
variable[t_timestamps] assign[=] call[name[self].timestamps.reshape, parameter[call[name[len], parameter[name[self].timestamps]], constant[1]]]
variable[data_blocks] assign[=] list[[<ast.Call object at 0x7da18f09ebf0>]]
variable[label_start_offset] assign[=] binary_operation[binary_operation[name[window] - name[hop]] / constant[2]]
variable[label_end_offset] assign[=] binary_operation[name[label_start_offset] + name[hop]]
variable[data_new] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da18dc98ac0>]]]]
for taget[name[data]] in starred[name[data_blocks]] begin[:]
variable[start_index] assign[=] constant[0]
while compare[name[start_index] less[<] binary_operation[call[name[len], parameter[name[data]]] - constant[1]]] begin[:]
variable[end_index] assign[=] call[name[utils].find_nearest_index, parameter[call[name[data]][tuple[[<ast.Slice object at 0x7da18dc99600>, <ast.Constant object at 0x7da18dc9a4a0>]]], binary_operation[call[call[name[data]][name[start_index]]][constant[0]] + name[window]]]]
variable[segment] assign[=] call[name[data]][<ast.Slice object at 0x7da18dc991b0>]
if compare[call[name[len], parameter[name[segment]]] equal[==] constant[0]] begin[:]
variable[start_index] assign[=] call[name[utils].find_nearest_index, parameter[call[name[data]][tuple[[<ast.Slice object at 0x7da18dc99a80>, <ast.Constant object at 0x7da18dc9bb80>]]], binary_operation[call[name[data]][tuple[[<ast.Name object at 0x7da18dc99cc0>, <ast.Constant object at 0x7da18dc99ab0>]]] + name[hop]]]]
continue
variable[segment_clean] assign[=] call[name[np].delete, parameter[name[segment], call[name[np].where, parameter[compare[call[name[segment]][tuple[[<ast.Slice object at 0x7da18dc98880>, <ast.Constant object at 0x7da18dc98fd0>]]] equal[==] <ast.UnaryOp object at 0x7da18dc986a0>]]]]]
if compare[call[name[len], parameter[name[segment_clean]]] equal[==] constant[0]] begin[:]
variable[start_index] assign[=] call[name[utils].find_nearest_index, parameter[call[name[data]][tuple[[<ast.Slice object at 0x7da18bc70af0>, <ast.Constant object at 0x7da18bc71660>]]], binary_operation[call[name[data]][tuple[[<ast.Name object at 0x7da18bc72a70>, <ast.Constant object at 0x7da18bc713f0>]]] + name[hop]]]]
continue
variable[n_clean] assign[=] call[name[len], parameter[name[segment_clean]]]
variable[x_clean] assign[=] call[call[name[np].matrix, parameter[call[name[segment_clean]][tuple[[<ast.Slice object at 0x7da18bc729e0>, <ast.Constant object at 0x7da18bc71930>]]]]].reshape, parameter[name[n_clean], constant[1]]]
variable[y_clean] assign[=] call[call[name[np].matrix, parameter[call[name[segment_clean]][tuple[[<ast.Slice object at 0x7da18bc72d10>, <ast.Constant object at 0x7da18bc72b30>]]]]].reshape, parameter[name[n_clean], constant[1]]]
variable[theta] assign[=] call[name[utils].normal_equation, parameter[name[x_clean], name[y_clean]]]
variable[label_start_index] assign[=] call[name[utils].find_nearest_index, parameter[name[x_clean], binary_operation[call[name[data]][tuple[[<ast.Name object at 0x7da18bc72ce0>, <ast.Constant object at 0x7da18bc724a0>]]] + name[label_start_offset]]]]
variable[label_end_index] assign[=] call[name[utils].find_nearest_index, parameter[name[x_clean], binary_operation[call[name[data]][tuple[[<ast.Name object at 0x7da207f99d80>, <ast.Constant object at 0x7da207f988b0>]]] + name[label_end_offset]]]]
variable[x_clean] assign[=] call[name[x_clean]][<ast.Slice object at 0x7da207f9bf10>]
variable[x_clean] assign[=] call[name[np].insert, parameter[name[x_clean], constant[0], call[name[np].ones, parameter[call[name[len], parameter[name[x_clean]]]]]]]
variable[newy] assign[=] binary_operation[name[x_clean] * name[theta]]
variable[result] assign[=] call[name[np].append, parameter[call[name[x_clean]][tuple[[<ast.Slice object at 0x7da207f98a90>, <ast.Constant object at 0x7da207f9b640>]]], name[newy]]]
variable[data_new] assign[=] call[name[np].append, parameter[name[data_new], name[result]]]
variable[start_index] assign[=] call[name[utils].find_nearest_index, parameter[call[name[data]][tuple[[<ast.Slice object at 0x7da207f9b3d0>, <ast.Constant object at 0x7da207f98d60>]]], binary_operation[call[name[data]][tuple[[<ast.Name object at 0x7da207f98e20>, <ast.Constant object at 0x7da207f9a470>]]] + name[hop]]]]
return[list[[<ast.Subscript object at 0x7da207f9afe0>, <ast.Subscript object at 0x7da207f983a0>]]] | keyword[def] identifier[fit_lines] ( identifier[self] , identifier[window] = literal[int] , identifier[break_thresh] = literal[int] ):
literal[string]
identifier[window] /= literal[int]
identifier[hop] = identifier[window] / literal[int]
identifier[break_thresh] /= literal[int]
identifier[i] = literal[int]
identifier[break_indices] =[]
identifier[count] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[self] . identifier[pitch] ):
keyword[if] identifier[self] . identifier[pitch] [ identifier[i] ]==- literal[int] :
identifier[count] = literal[int]
identifier[start_index] = identifier[i]
keyword[while] identifier[i] < identifier[len] ( identifier[self] . identifier[pitch] ) keyword[and] identifier[self] . identifier[pitch] [ identifier[i] ]==- literal[int] :
identifier[count] += literal[int]
identifier[i] += literal[int]
identifier[end_index] = identifier[i] - literal[int]
keyword[if] identifier[self] . identifier[timestamps] [ identifier[end_index] ]- identifier[self] . identifier[timestamps] [ identifier[start_index] ]>= identifier[break_thresh] :
identifier[break_indices] . identifier[append] ([ identifier[start_index] , identifier[end_index] ])
identifier[i] += literal[int]
identifier[break_indices] = identifier[np] . identifier[array] ( identifier[break_indices] )
identifier[data_blocks] =[]
keyword[if] identifier[len] ( identifier[break_indices] )== literal[int] :
identifier[t_pitch] = identifier[self] . identifier[pitch] . identifier[reshape] ( identifier[len] ( identifier[self] . identifier[pitch] ), literal[int] )
identifier[t_timestamps] = identifier[self] . identifier[timestamps] . identifier[reshape] ( identifier[len] ( identifier[self] . identifier[timestamps] ), literal[int] )
identifier[data_blocks] =[ identifier[np] . identifier[append] ( identifier[t_timestamps] , identifier[t_pitch] , identifier[axis] = literal[int] )]
keyword[else] :
keyword[if] identifier[break_indices] [ literal[int] , literal[int] ]!= literal[int] :
identifier[t_pitch] = identifier[self] . identifier[pitch] [: identifier[break_indices] [ literal[int] , literal[int] ]]
identifier[t_pitch] = identifier[t_pitch] . identifier[reshape] ( identifier[len] ( identifier[t_pitch] ), literal[int] )
identifier[t_timestamps] = identifier[self] . identifier[timestamps] [: identifier[break_indices] [ literal[int] , literal[int] ]]
identifier[t_timestamps] = identifier[t_timestamps] . identifier[reshape] ( identifier[len] ( identifier[t_timestamps] ), literal[int] )
identifier[data_blocks] . identifier[append] ( identifier[np] . identifier[append] ( identifier[t_timestamps] , identifier[t_pitch] , identifier[axis] = literal[int] ))
identifier[block_start] = identifier[break_indices] [ literal[int] , literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] , identifier[len] ( identifier[break_indices] )):
identifier[block_end] = identifier[break_indices] [ identifier[i] , literal[int] ]
identifier[t_pitch] = identifier[self] . identifier[pitch] [ identifier[block_start] : identifier[block_end] ]
identifier[t_pitch] = identifier[t_pitch] . identifier[reshape] ( identifier[len] ( identifier[t_pitch] ), literal[int] )
identifier[t_timestamps] = identifier[self] . identifier[timestamps] [ identifier[block_start] : identifier[block_end] ]
identifier[t_timestamps] = identifier[t_timestamps] . identifier[reshape] ( identifier[len] ( identifier[t_timestamps] ), literal[int] )
identifier[data_blocks] . identifier[append] ( identifier[np] . identifier[append] ( identifier[t_timestamps] , identifier[t_pitch] , identifier[axis] = literal[int] ))
identifier[block_start] = identifier[break_indices] [ identifier[i] , literal[int] ]
keyword[if] identifier[block_start] != identifier[len] ( identifier[self] . identifier[pitch] )- literal[int] :
identifier[t_pitch] = identifier[self] . identifier[pitch] [ identifier[block_start] :]
identifier[t_pitch] = identifier[t_pitch] . identifier[reshape] ( identifier[len] ( identifier[t_pitch] ), literal[int] )
identifier[t_timestamps] = identifier[self] . identifier[timestamps] [ identifier[block_start] :]
identifier[t_timestamps] = identifier[t_timestamps] . identifier[reshape] ( identifier[len] ( identifier[t_timestamps] ), literal[int] )
identifier[data_blocks] . identifier[append] ( identifier[np] . identifier[append] ( identifier[t_timestamps] , identifier[t_pitch] , identifier[axis] = literal[int] ))
identifier[label_start_offset] =( identifier[window] - identifier[hop] )/ literal[int]
identifier[label_end_offset] = identifier[label_start_offset] + identifier[hop]
identifier[data_new] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] ]])
keyword[for] identifier[data] keyword[in] identifier[data_blocks] :
identifier[start_index] = literal[int]
keyword[while] identifier[start_index] < identifier[len] ( identifier[data] )- literal[int] :
identifier[end_index] = identifier[utils] . identifier[find_nearest_index] ( identifier[data] [:, literal[int] ], identifier[data] [ identifier[start_index] ][ literal[int] ]+ identifier[window] )
identifier[segment] = identifier[data] [ identifier[start_index] : identifier[end_index] ]
keyword[if] identifier[len] ( identifier[segment] )== literal[int] :
identifier[start_index] = identifier[utils] . identifier[find_nearest_index] ( identifier[data] [:, literal[int] ], identifier[data] [ identifier[start_index] , literal[int] ]+ identifier[hop] )
keyword[continue]
identifier[segment_clean] = identifier[np] . identifier[delete] ( identifier[segment] , identifier[np] . identifier[where] ( identifier[segment] [:, literal[int] ]==- literal[int] ), identifier[axis] = literal[int] )
keyword[if] identifier[len] ( identifier[segment_clean] )== literal[int] :
identifier[start_index] = identifier[utils] . identifier[find_nearest_index] ( identifier[data] [:, literal[int] ], identifier[data] [ identifier[start_index] , literal[int] ]+ identifier[hop] )
keyword[continue]
identifier[n_clean] = identifier[len] ( identifier[segment_clean] )
identifier[x_clean] = identifier[np] . identifier[matrix] ( identifier[segment_clean] [:, literal[int] ]). identifier[reshape] ( identifier[n_clean] , literal[int] )
identifier[y_clean] = identifier[np] . identifier[matrix] ( identifier[segment_clean] [:, literal[int] ]). identifier[reshape] ( identifier[n_clean] , literal[int] )
identifier[theta] = identifier[utils] . identifier[normal_equation] ( identifier[x_clean] , identifier[y_clean] )
identifier[label_start_index] = identifier[utils] . identifier[find_nearest_index] ( identifier[x_clean] , identifier[data] [ identifier[start_index] , literal[int] ]+ identifier[label_start_offset] )
identifier[label_end_index] = identifier[utils] . identifier[find_nearest_index] ( identifier[x_clean] , identifier[data] [ identifier[start_index] , literal[int] ]+ identifier[label_end_offset] )
identifier[x_clean] = identifier[x_clean] [ identifier[label_start_index] : identifier[label_end_index] ]
identifier[x_clean] = identifier[np] . identifier[insert] ( identifier[x_clean] , literal[int] , identifier[np] . identifier[ones] ( identifier[len] ( identifier[x_clean] )), identifier[axis] = literal[int] )
identifier[newy] = identifier[x_clean] * identifier[theta]
identifier[result] = identifier[np] . identifier[append] ( identifier[x_clean] [:, literal[int] ], identifier[newy] , identifier[axis] = literal[int] )
identifier[data_new] = identifier[np] . identifier[append] ( identifier[data_new] , identifier[result] , identifier[axis] = literal[int] )
identifier[start_index] = identifier[utils] . identifier[find_nearest_index] ( identifier[data] [:, literal[int] ], identifier[data] [ identifier[start_index] , literal[int] ]+ identifier[hop] )
keyword[return] [ identifier[data_new] [:, literal[int] ], identifier[data_new] [:, literal[int] ]] | def fit_lines(self, window=1500, break_thresh=1500):
"""
Fits lines to pitch contours.
:param window: size of each chunk to which linear equation is to be fit (in milliseconds).
To keep it simple, hop is chosen to be one third of the window.
:param break_thresh: If there is silence beyond this limit (in milliseconds),
the contour will be broken there into two so that we don't fit a line over and
including the silent region.
"""
window /= 1000
hop = window / 3
break_thresh /= 1000
#cut the whole song into pieces if there are gaps more than break_thresh seconds
i = 0
break_indices = []
count = 0
while i < len(self.pitch):
if self.pitch[i] == -10000:
count = 1
start_index = i
while i < len(self.pitch) and self.pitch[i] == -10000:
count += 1
i += 1 # depends on [control=['while'], data=[]]
end_index = i - 1
if self.timestamps[end_index] - self.timestamps[start_index] >= break_thresh:
break_indices.append([start_index, end_index]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
i += 1 # depends on [control=['while'], data=['i']]
break_indices = np.array(break_indices)
#In creating the data blocks which are not silences, note that we
# take complimentary break indices. i.e., if [[s1, e1], [s2, e2] ...]
# is break_indices, we take e1-s2, e2-s3 chunks and build data blocks
data_blocks = []
if len(break_indices) == 0:
t_pitch = self.pitch.reshape(len(self.pitch), 1)
t_timestamps = self.timestamps.reshape(len(self.timestamps), 1)
data_blocks = [np.append(t_timestamps, t_pitch, axis=1)] # depends on [control=['if'], data=[]]
else:
if break_indices[0, 0] != 0:
t_pitch = self.pitch[:break_indices[0, 0]]
t_pitch = t_pitch.reshape(len(t_pitch), 1)
t_timestamps = self.timestamps[:break_indices[0, 0]]
t_timestamps = t_timestamps.reshape(len(t_timestamps), 1)
data_blocks.append(np.append(t_timestamps, t_pitch, axis=1)) # depends on [control=['if'], data=[]]
block_start = break_indices[0, 1]
for i in xrange(1, len(break_indices)):
block_end = break_indices[i, 0]
t_pitch = self.pitch[block_start:block_end]
t_pitch = t_pitch.reshape(len(t_pitch), 1)
t_timestamps = self.timestamps[block_start:block_end]
t_timestamps = t_timestamps.reshape(len(t_timestamps), 1)
data_blocks.append(np.append(t_timestamps, t_pitch, axis=1))
block_start = break_indices[i, 1] # depends on [control=['for'], data=['i']]
if block_start != len(self.pitch) - 1:
t_pitch = self.pitch[block_start:]
t_pitch = t_pitch.reshape(len(t_pitch), 1)
t_timestamps = self.timestamps[block_start:]
t_timestamps = t_timestamps.reshape(len(t_timestamps), 1)
data_blocks.append(np.append(t_timestamps, t_pitch, axis=1)) # depends on [control=['if'], data=['block_start']]
label_start_offset = (window - hop) / 2
label_end_offset = label_start_offset + hop
#dataNew = np.zeros_like(data)
#dataNew[:, 0] = data[:, 0]
data_new = np.array([[0, 0]])
for data in data_blocks:
start_index = 0
while start_index < len(data) - 1:
end_index = utils.find_nearest_index(data[:, 0], data[start_index][0] + window)
segment = data[start_index:end_index]
if len(segment) == 0:
start_index = utils.find_nearest_index(data[:, 0], data[start_index, 0] + hop)
continue # depends on [control=['if'], data=[]]
segment_clean = np.delete(segment, np.where(segment[:, 1] == -10000), axis=0)
if len(segment_clean) == 0:
#After splitting into blocks, this loop better not come into play
#raise ValueError("This part of the block is absolute silence! Make sure block_thresh >= window!")
start_index = utils.find_nearest_index(data[:, 0], data[start_index, 0] + hop)
continue # depends on [control=['if'], data=[]]
n_clean = len(segment_clean)
x_clean = np.matrix(segment_clean[:, 0]).reshape(n_clean, 1)
y_clean = np.matrix(segment_clean[:, 1]).reshape(n_clean, 1)
#return [x_clean, y_clean]
theta = utils.normal_equation(x_clean, y_clean)
#determine the start and end of the segment to be labelled
label_start_index = utils.find_nearest_index(x_clean, data[start_index, 0] + label_start_offset)
label_end_index = utils.find_nearest_index(x_clean, data[start_index, 0] + label_end_offset)
x_clean = x_clean[label_start_index:label_end_index]
#return x_clean
x_clean = np.insert(x_clean, 0, np.ones(len(x_clean)), axis=1)
newy = x_clean * theta
result = np.append(x_clean[:, 1], newy, axis=1)
data_new = np.append(data_new, result, axis=0)
start_index = utils.find_nearest_index(data[:, 0], data[start_index, 0] + hop) # depends on [control=['while'], data=['start_index']] # depends on [control=['for'], data=['data']]
return [data_new[:, 0], data_new[:, 1]] |
def _characterize_header(self, header, hgroups):
"""Characterize header groups into different data types.
"""
out = []
for h in [header[g[0]] for g in hgroups]:
this_ctype = None
for ctype, names in self._col_types.items():
if h.startswith(names):
this_ctype = ctype
break
out.append(this_ctype)
return out | def function[_characterize_header, parameter[self, header, hgroups]]:
constant[Characterize header groups into different data types.
]
variable[out] assign[=] list[[]]
for taget[name[h]] in starred[<ast.ListComp object at 0x7da2043466e0>] begin[:]
variable[this_ctype] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da204346a70>, <ast.Name object at 0x7da204344700>]]] in starred[call[name[self]._col_types.items, parameter[]]] begin[:]
if call[name[h].startswith, parameter[name[names]]] begin[:]
variable[this_ctype] assign[=] name[ctype]
break
call[name[out].append, parameter[name[this_ctype]]]
return[name[out]] | keyword[def] identifier[_characterize_header] ( identifier[self] , identifier[header] , identifier[hgroups] ):
literal[string]
identifier[out] =[]
keyword[for] identifier[h] keyword[in] [ identifier[header] [ identifier[g] [ literal[int] ]] keyword[for] identifier[g] keyword[in] identifier[hgroups] ]:
identifier[this_ctype] = keyword[None]
keyword[for] identifier[ctype] , identifier[names] keyword[in] identifier[self] . identifier[_col_types] . identifier[items] ():
keyword[if] identifier[h] . identifier[startswith] ( identifier[names] ):
identifier[this_ctype] = identifier[ctype]
keyword[break]
identifier[out] . identifier[append] ( identifier[this_ctype] )
keyword[return] identifier[out] | def _characterize_header(self, header, hgroups):
"""Characterize header groups into different data types.
"""
out = []
for h in [header[g[0]] for g in hgroups]:
this_ctype = None
for (ctype, names) in self._col_types.items():
if h.startswith(names):
this_ctype = ctype
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
out.append(this_ctype) # depends on [control=['for'], data=['h']]
return out |
def prevention():
"""The |Transition| for the prevention example from Actual Causation
Figure 5D.
"""
tpm = np.array([
[0.5, 0.5, 1],
[0.5, 0.5, 0],
[0.5, 0.5, 1],
[0.5, 0.5, 1],
[0.5, 0.5, 1],
[0.5, 0.5, 0],
[0.5, 0.5, 1],
[0.5, 0.5, 1]
])
cm = np.array([
[0, 0, 1],
[0, 0, 1],
[0, 0, 0]
])
network = Network(tpm, cm, node_labels=['A', 'B', 'F'])
x_state = (1, 1, 1)
y_state = (1, 1, 1)
return Transition(network, x_state, y_state, (0, 1), (2,)) | def function[prevention, parameter[]]:
constant[The |Transition| for the prevention example from Actual Causation
Figure 5D.
]
variable[tpm] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da18c4cfd30>, <ast.List object at 0x7da18c4cdc90>, <ast.List object at 0x7da18c4cc6d0>, <ast.List object at 0x7da18c4cccd0>, <ast.List object at 0x7da18c4cd540>, <ast.List object at 0x7da18ede7ca0>, <ast.List object at 0x7da18ede5120>, <ast.List object at 0x7da18ede5960>]]]]
variable[cm] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da18ede4580>, <ast.List object at 0x7da18ede4850>, <ast.List object at 0x7da18ede5600>]]]]
variable[network] assign[=] call[name[Network], parameter[name[tpm], name[cm]]]
variable[x_state] assign[=] tuple[[<ast.Constant object at 0x7da18ede4d60>, <ast.Constant object at 0x7da18ede5ba0>, <ast.Constant object at 0x7da18ede53f0>]]
variable[y_state] assign[=] tuple[[<ast.Constant object at 0x7da18ede5660>, <ast.Constant object at 0x7da18ede49d0>, <ast.Constant object at 0x7da18ede70a0>]]
return[call[name[Transition], parameter[name[network], name[x_state], name[y_state], tuple[[<ast.Constant object at 0x7da18ede4700>, <ast.Constant object at 0x7da18ede6710>]], tuple[[<ast.Constant object at 0x7da18ede75e0>]]]]] | keyword[def] identifier[prevention] ():
literal[string]
identifier[tpm] = identifier[np] . identifier[array] ([
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ]
])
identifier[cm] = identifier[np] . identifier[array] ([
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ],
[ literal[int] , literal[int] , literal[int] ]
])
identifier[network] = identifier[Network] ( identifier[tpm] , identifier[cm] , identifier[node_labels] =[ literal[string] , literal[string] , literal[string] ])
identifier[x_state] =( literal[int] , literal[int] , literal[int] )
identifier[y_state] =( literal[int] , literal[int] , literal[int] )
keyword[return] identifier[Transition] ( identifier[network] , identifier[x_state] , identifier[y_state] ,( literal[int] , literal[int] ),( literal[int] ,)) | def prevention():
"""The |Transition| for the prevention example from Actual Causation
Figure 5D.
"""
tpm = np.array([[0.5, 0.5, 1], [0.5, 0.5, 0], [0.5, 0.5, 1], [0.5, 0.5, 1], [0.5, 0.5, 1], [0.5, 0.5, 0], [0.5, 0.5, 1], [0.5, 0.5, 1]])
cm = np.array([[0, 0, 1], [0, 0, 1], [0, 0, 0]])
network = Network(tpm, cm, node_labels=['A', 'B', 'F'])
x_state = (1, 1, 1)
y_state = (1, 1, 1)
return Transition(network, x_state, y_state, (0, 1), (2,)) |
def case_to_clinVars(self, case_id):
"""Get all variants included in clinvar submissions for a case
Args:
case_id(str): a case _id
Returns:
submission_variants(dict): keys are variant ids and values are variant submission objects
"""
query = dict(case_id=case_id, csv_type='variant')
clinvar_objs = list(self.clinvar_collection.find(query))
submitted_vars = {}
for clinvar in clinvar_objs:
submitted_vars[clinvar.get('local_id')] = clinvar
return submitted_vars | def function[case_to_clinVars, parameter[self, case_id]]:
constant[Get all variants included in clinvar submissions for a case
Args:
case_id(str): a case _id
Returns:
submission_variants(dict): keys are variant ids and values are variant submission objects
]
variable[query] assign[=] call[name[dict], parameter[]]
variable[clinvar_objs] assign[=] call[name[list], parameter[call[name[self].clinvar_collection.find, parameter[name[query]]]]]
variable[submitted_vars] assign[=] dictionary[[], []]
for taget[name[clinvar]] in starred[name[clinvar_objs]] begin[:]
call[name[submitted_vars]][call[name[clinvar].get, parameter[constant[local_id]]]] assign[=] name[clinvar]
return[name[submitted_vars]] | keyword[def] identifier[case_to_clinVars] ( identifier[self] , identifier[case_id] ):
literal[string]
identifier[query] = identifier[dict] ( identifier[case_id] = identifier[case_id] , identifier[csv_type] = literal[string] )
identifier[clinvar_objs] = identifier[list] ( identifier[self] . identifier[clinvar_collection] . identifier[find] ( identifier[query] ))
identifier[submitted_vars] ={}
keyword[for] identifier[clinvar] keyword[in] identifier[clinvar_objs] :
identifier[submitted_vars] [ identifier[clinvar] . identifier[get] ( literal[string] )]= identifier[clinvar]
keyword[return] identifier[submitted_vars] | def case_to_clinVars(self, case_id):
"""Get all variants included in clinvar submissions for a case
Args:
case_id(str): a case _id
Returns:
submission_variants(dict): keys are variant ids and values are variant submission objects
"""
query = dict(case_id=case_id, csv_type='variant')
clinvar_objs = list(self.clinvar_collection.find(query))
submitted_vars = {}
for clinvar in clinvar_objs:
submitted_vars[clinvar.get('local_id')] = clinvar # depends on [control=['for'], data=['clinvar']]
return submitted_vars |
def add_triangle(self, neighbors, color, center=None, opacity=0.4,
draw_edges=False, edges_color=[0.0, 0.0, 0.0],
edges_linewidth=2):
"""
Adds a triangular surface between three atoms.
Args:
atoms: Atoms between which a triangle will be drawn.
color: Color for triangle as RGB.
center: The "central atom" of the triangle
opacity: opacity of the triangle
draw_edges: If set to True, the a line will be drawn at each edge
edges_color: Color of the line for the edges
edges_linewidth: Width of the line drawn for the edges
"""
points = vtk.vtkPoints()
triangle = vtk.vtkTriangle()
for ii in range(3):
points.InsertNextPoint(neighbors[ii].x, neighbors[ii].y,
neighbors[ii].z)
triangle.GetPointIds().SetId(ii, ii)
triangles = vtk.vtkCellArray()
triangles.InsertNextCell(triangle)
# polydata object
trianglePolyData = vtk.vtkPolyData()
trianglePolyData.SetPoints( points )
trianglePolyData.SetPolys( triangles )
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(trianglePolyData)
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetOpacity(opacity)
if color == 'element':
if center is None:
raise ValueError(
'Color should be chosen according to the central atom, '
'and central atom is not provided')
# If partial occupations are involved, the color of the specie with
# the highest occupation is used
myoccu = 0.0
for specie, occu in center.species.items():
if occu > myoccu:
myspecie = specie
myoccu = occu
color = [i / 255 for i in self.el_color_mapping[myspecie.symbol]]
ac.GetProperty().SetColor(color)
else:
ac.GetProperty().SetColor(color)
if draw_edges:
ac.GetProperty().SetEdgeColor(edges_color)
ac.GetProperty().SetLineWidth(edges_linewidth)
ac.GetProperty().EdgeVisibilityOn()
self.ren.AddActor(ac) | def function[add_triangle, parameter[self, neighbors, color, center, opacity, draw_edges, edges_color, edges_linewidth]]:
constant[
Adds a triangular surface between three atoms.
Args:
atoms: Atoms between which a triangle will be drawn.
color: Color for triangle as RGB.
center: The "central atom" of the triangle
opacity: opacity of the triangle
draw_edges: If set to True, the a line will be drawn at each edge
edges_color: Color of the line for the edges
edges_linewidth: Width of the line drawn for the edges
]
variable[points] assign[=] call[name[vtk].vtkPoints, parameter[]]
variable[triangle] assign[=] call[name[vtk].vtkTriangle, parameter[]]
for taget[name[ii]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
call[name[points].InsertNextPoint, parameter[call[name[neighbors]][name[ii]].x, call[name[neighbors]][name[ii]].y, call[name[neighbors]][name[ii]].z]]
call[call[name[triangle].GetPointIds, parameter[]].SetId, parameter[name[ii], name[ii]]]
variable[triangles] assign[=] call[name[vtk].vtkCellArray, parameter[]]
call[name[triangles].InsertNextCell, parameter[name[triangle]]]
variable[trianglePolyData] assign[=] call[name[vtk].vtkPolyData, parameter[]]
call[name[trianglePolyData].SetPoints, parameter[name[points]]]
call[name[trianglePolyData].SetPolys, parameter[name[triangles]]]
variable[mapper] assign[=] call[name[vtk].vtkPolyDataMapper, parameter[]]
call[name[mapper].SetInput, parameter[name[trianglePolyData]]]
variable[ac] assign[=] call[name[vtk].vtkActor, parameter[]]
call[name[ac].SetMapper, parameter[name[mapper]]]
call[call[name[ac].GetProperty, parameter[]].SetOpacity, parameter[name[opacity]]]
if compare[name[color] equal[==] constant[element]] begin[:]
if compare[name[center] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f813d90>
variable[myoccu] assign[=] constant[0.0]
for taget[tuple[[<ast.Name object at 0x7da18f813400>, <ast.Name object at 0x7da18f8107f0>]]] in starred[call[name[center].species.items, parameter[]]] begin[:]
if compare[name[occu] greater[>] name[myoccu]] begin[:]
variable[myspecie] assign[=] name[specie]
variable[myoccu] assign[=] name[occu]
variable[color] assign[=] <ast.ListComp object at 0x7da18f8123e0>
call[call[name[ac].GetProperty, parameter[]].SetColor, parameter[name[color]]]
if name[draw_edges] begin[:]
call[call[name[ac].GetProperty, parameter[]].SetEdgeColor, parameter[name[edges_color]]]
call[call[name[ac].GetProperty, parameter[]].SetLineWidth, parameter[name[edges_linewidth]]]
call[call[name[ac].GetProperty, parameter[]].EdgeVisibilityOn, parameter[]]
call[name[self].ren.AddActor, parameter[name[ac]]] | keyword[def] identifier[add_triangle] ( identifier[self] , identifier[neighbors] , identifier[color] , identifier[center] = keyword[None] , identifier[opacity] = literal[int] ,
identifier[draw_edges] = keyword[False] , identifier[edges_color] =[ literal[int] , literal[int] , literal[int] ],
identifier[edges_linewidth] = literal[int] ):
literal[string]
identifier[points] = identifier[vtk] . identifier[vtkPoints] ()
identifier[triangle] = identifier[vtk] . identifier[vtkTriangle] ()
keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] ):
identifier[points] . identifier[InsertNextPoint] ( identifier[neighbors] [ identifier[ii] ]. identifier[x] , identifier[neighbors] [ identifier[ii] ]. identifier[y] ,
identifier[neighbors] [ identifier[ii] ]. identifier[z] )
identifier[triangle] . identifier[GetPointIds] (). identifier[SetId] ( identifier[ii] , identifier[ii] )
identifier[triangles] = identifier[vtk] . identifier[vtkCellArray] ()
identifier[triangles] . identifier[InsertNextCell] ( identifier[triangle] )
identifier[trianglePolyData] = identifier[vtk] . identifier[vtkPolyData] ()
identifier[trianglePolyData] . identifier[SetPoints] ( identifier[points] )
identifier[trianglePolyData] . identifier[SetPolys] ( identifier[triangles] )
identifier[mapper] = identifier[vtk] . identifier[vtkPolyDataMapper] ()
identifier[mapper] . identifier[SetInput] ( identifier[trianglePolyData] )
identifier[ac] = identifier[vtk] . identifier[vtkActor] ()
identifier[ac] . identifier[SetMapper] ( identifier[mapper] )
identifier[ac] . identifier[GetProperty] (). identifier[SetOpacity] ( identifier[opacity] )
keyword[if] identifier[color] == literal[string] :
keyword[if] identifier[center] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] )
identifier[myoccu] = literal[int]
keyword[for] identifier[specie] , identifier[occu] keyword[in] identifier[center] . identifier[species] . identifier[items] ():
keyword[if] identifier[occu] > identifier[myoccu] :
identifier[myspecie] = identifier[specie]
identifier[myoccu] = identifier[occu]
identifier[color] =[ identifier[i] / literal[int] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[el_color_mapping] [ identifier[myspecie] . identifier[symbol] ]]
identifier[ac] . identifier[GetProperty] (). identifier[SetColor] ( identifier[color] )
keyword[else] :
identifier[ac] . identifier[GetProperty] (). identifier[SetColor] ( identifier[color] )
keyword[if] identifier[draw_edges] :
identifier[ac] . identifier[GetProperty] (). identifier[SetEdgeColor] ( identifier[edges_color] )
identifier[ac] . identifier[GetProperty] (). identifier[SetLineWidth] ( identifier[edges_linewidth] )
identifier[ac] . identifier[GetProperty] (). identifier[EdgeVisibilityOn] ()
identifier[self] . identifier[ren] . identifier[AddActor] ( identifier[ac] ) | def add_triangle(self, neighbors, color, center=None, opacity=0.4, draw_edges=False, edges_color=[0.0, 0.0, 0.0], edges_linewidth=2):
"""
Adds a triangular surface between three atoms.
Args:
atoms: Atoms between which a triangle will be drawn.
color: Color for triangle as RGB.
center: The "central atom" of the triangle
opacity: opacity of the triangle
draw_edges: If set to True, the a line will be drawn at each edge
edges_color: Color of the line for the edges
edges_linewidth: Width of the line drawn for the edges
"""
points = vtk.vtkPoints()
triangle = vtk.vtkTriangle()
for ii in range(3):
points.InsertNextPoint(neighbors[ii].x, neighbors[ii].y, neighbors[ii].z)
triangle.GetPointIds().SetId(ii, ii) # depends on [control=['for'], data=['ii']]
triangles = vtk.vtkCellArray()
triangles.InsertNextCell(triangle)
# polydata object
trianglePolyData = vtk.vtkPolyData()
trianglePolyData.SetPoints(points)
trianglePolyData.SetPolys(triangles)
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(trianglePolyData)
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetOpacity(opacity)
if color == 'element':
if center is None:
raise ValueError('Color should be chosen according to the central atom, and central atom is not provided') # depends on [control=['if'], data=[]]
# If partial occupations are involved, the color of the specie with
# the highest occupation is used
myoccu = 0.0
for (specie, occu) in center.species.items():
if occu > myoccu:
myspecie = specie
myoccu = occu # depends on [control=['if'], data=['occu', 'myoccu']] # depends on [control=['for'], data=[]]
color = [i / 255 for i in self.el_color_mapping[myspecie.symbol]]
ac.GetProperty().SetColor(color) # depends on [control=['if'], data=['color']]
else:
ac.GetProperty().SetColor(color)
if draw_edges:
ac.GetProperty().SetEdgeColor(edges_color)
ac.GetProperty().SetLineWidth(edges_linewidth)
ac.GetProperty().EdgeVisibilityOn() # depends on [control=['if'], data=[]]
self.ren.AddActor(ac) |
def get_role(role_id,**kwargs):
"""
Get a role by its ID.
"""
try:
role = db.DBSession.query(Role).filter(Role.id==role_id).one()
return role
except NoResultFound:
raise HydraError("Role not found (role_id={})".format(role_id)) | def function[get_role, parameter[role_id]]:
constant[
Get a role by its ID.
]
<ast.Try object at 0x7da204346f50> | keyword[def] identifier[get_role] ( identifier[role_id] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[role] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Role] ). identifier[filter] ( identifier[Role] . identifier[id] == identifier[role_id] ). identifier[one] ()
keyword[return] identifier[role]
keyword[except] identifier[NoResultFound] :
keyword[raise] identifier[HydraError] ( literal[string] . identifier[format] ( identifier[role_id] )) | def get_role(role_id, **kwargs):
"""
Get a role by its ID.
"""
try:
role = db.DBSession.query(Role).filter(Role.id == role_id).one()
return role # depends on [control=['try'], data=[]]
except NoResultFound:
raise HydraError('Role not found (role_id={})'.format(role_id)) # depends on [control=['except'], data=[]] |
def get(self, jid=None): # pylint: disable=W0221
'''
A convenience URL for getting lists of previously run jobs or getting
the return from a single job
.. http:get:: /jobs/(jid)
List jobs or show a single job from the job cache.
:status 200: |200|
:status 401: |401|
:status 406: |406|
**Example request:**
.. code-block:: bash
curl -i localhost:8000/jobs
.. code-block:: text
GET /jobs HTTP/1.1
Host: localhost:8000
Accept: application/x-yaml
**Example response:**
.. code-block:: text
HTTP/1.1 200 OK
Content-Length: 165
Content-Type: application/x-yaml
return:
- '20121130104633606931':
Arguments:
- '3'
Function: test.fib
Start Time: 2012, Nov 30 10:46:33.606931
Target: jerry
Target-type: glob
**Example request:**
.. code-block:: bash
curl -i localhost:8000/jobs/20121130104633606931
.. code-block:: text
GET /jobs/20121130104633606931 HTTP/1.1
Host: localhost:8000
Accept: application/x-yaml
**Example response:**
.. code-block:: text
HTTP/1.1 200 OK
Content-Length: 73
Content-Type: application/x-yaml
info:
- Arguments:
- '3'
Function: test.fib
Minions:
- jerry
Start Time: 2012, Nov 30 10:46:33.606931
Target: '*'
Target-type: glob
User: saltdev
jid: '20121130104633606931'
return:
- jerry:
- - 0
- 1
- 1
- 2
- 6.9141387939453125e-06
'''
# if you aren't authenticated, redirect to login
if not self._verify_auth():
self.redirect('/login')
return
if jid:
self.lowstate = [{
'fun': 'jobs.list_job',
'jid': jid,
'client': 'runner',
}]
else:
self.lowstate = [{
'fun': 'jobs.list_jobs',
'client': 'runner',
}]
self.disbatch() | def function[get, parameter[self, jid]]:
constant[
A convenience URL for getting lists of previously run jobs or getting
the return from a single job
.. http:get:: /jobs/(jid)
List jobs or show a single job from the job cache.
:status 200: |200|
:status 401: |401|
:status 406: |406|
**Example request:**
.. code-block:: bash
curl -i localhost:8000/jobs
.. code-block:: text
GET /jobs HTTP/1.1
Host: localhost:8000
Accept: application/x-yaml
**Example response:**
.. code-block:: text
HTTP/1.1 200 OK
Content-Length: 165
Content-Type: application/x-yaml
return:
- '20121130104633606931':
Arguments:
- '3'
Function: test.fib
Start Time: 2012, Nov 30 10:46:33.606931
Target: jerry
Target-type: glob
**Example request:**
.. code-block:: bash
curl -i localhost:8000/jobs/20121130104633606931
.. code-block:: text
GET /jobs/20121130104633606931 HTTP/1.1
Host: localhost:8000
Accept: application/x-yaml
**Example response:**
.. code-block:: text
HTTP/1.1 200 OK
Content-Length: 73
Content-Type: application/x-yaml
info:
- Arguments:
- '3'
Function: test.fib
Minions:
- jerry
Start Time: 2012, Nov 30 10:46:33.606931
Target: '*'
Target-type: glob
User: saltdev
jid: '20121130104633606931'
return:
- jerry:
- - 0
- 1
- 1
- 2
- 6.9141387939453125e-06
]
if <ast.UnaryOp object at 0x7da2043445b0> begin[:]
call[name[self].redirect, parameter[constant[/login]]]
return[None]
if name[jid] begin[:]
name[self].lowstate assign[=] list[[<ast.Dict object at 0x7da2046235e0>]]
call[name[self].disbatch, parameter[]] | keyword[def] identifier[get] ( identifier[self] , identifier[jid] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_verify_auth] ():
identifier[self] . identifier[redirect] ( literal[string] )
keyword[return]
keyword[if] identifier[jid] :
identifier[self] . identifier[lowstate] =[{
literal[string] : literal[string] ,
literal[string] : identifier[jid] ,
literal[string] : literal[string] ,
}]
keyword[else] :
identifier[self] . identifier[lowstate] =[{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}]
identifier[self] . identifier[disbatch] () | def get(self, jid=None): # pylint: disable=W0221
"\n A convenience URL for getting lists of previously run jobs or getting\n the return from a single job\n\n .. http:get:: /jobs/(jid)\n\n List jobs or show a single job from the job cache.\n\n :status 200: |200|\n :status 401: |401|\n :status 406: |406|\n\n **Example request:**\n\n .. code-block:: bash\n\n curl -i localhost:8000/jobs\n\n .. code-block:: text\n\n GET /jobs HTTP/1.1\n Host: localhost:8000\n Accept: application/x-yaml\n\n **Example response:**\n\n .. code-block:: text\n\n HTTP/1.1 200 OK\n Content-Length: 165\n Content-Type: application/x-yaml\n\n return:\n - '20121130104633606931':\n Arguments:\n - '3'\n Function: test.fib\n Start Time: 2012, Nov 30 10:46:33.606931\n Target: jerry\n Target-type: glob\n\n **Example request:**\n\n .. code-block:: bash\n\n curl -i localhost:8000/jobs/20121130104633606931\n\n .. code-block:: text\n\n GET /jobs/20121130104633606931 HTTP/1.1\n Host: localhost:8000\n Accept: application/x-yaml\n\n **Example response:**\n\n .. code-block:: text\n\n HTTP/1.1 200 OK\n Content-Length: 73\n Content-Type: application/x-yaml\n\n info:\n - Arguments:\n - '3'\n Function: test.fib\n Minions:\n - jerry\n Start Time: 2012, Nov 30 10:46:33.606931\n Target: '*'\n Target-type: glob\n User: saltdev\n jid: '20121130104633606931'\n return:\n - jerry:\n - - 0\n - 1\n - 1\n - 2\n - 6.9141387939453125e-06\n "
# if you aren't authenticated, redirect to login
if not self._verify_auth():
self.redirect('/login')
return # depends on [control=['if'], data=[]]
if jid:
self.lowstate = [{'fun': 'jobs.list_job', 'jid': jid, 'client': 'runner'}] # depends on [control=['if'], data=[]]
else:
self.lowstate = [{'fun': 'jobs.list_jobs', 'client': 'runner'}]
self.disbatch() |
def can_create_log_entry_with_record_types(self, log_entry_record_types):
"""Tests if this user can create a single ``LogEntry`` using the desired record types.
While ``LoggingManager.getLogEntryRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``LogEntry``.
Providing an empty array tests if a ``LogEntry`` can be created
with no records.
arg: log_entry_record_types (osid.type.Type[]): array of log
entry record types
return: (boolean) - ``true`` if ``LogEntry`` creation using the
specified record ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``log_entry_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=log_entry_record_types)
return True | def function[can_create_log_entry_with_record_types, parameter[self, log_entry_record_types]]:
constant[Tests if this user can create a single ``LogEntry`` using the desired record types.
While ``LoggingManager.getLogEntryRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``LogEntry``.
Providing an empty array tests if a ``LogEntry`` can be created
with no records.
arg: log_entry_record_types (osid.type.Type[]): array of log
entry record types
return: (boolean) - ``true`` if ``LogEntry`` creation using the
specified record ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``log_entry_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.can_create_catalog_with_record_types, parameter[]]]
return[constant[True]] | keyword[def] identifier[can_create_log_entry_with_record_types] ( identifier[self] , identifier[log_entry_record_types] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[can_create_catalog_with_record_types] ( identifier[catalog_record_types] = identifier[log_entry_record_types] )
keyword[return] keyword[True] | def can_create_log_entry_with_record_types(self, log_entry_record_types):
"""Tests if this user can create a single ``LogEntry`` using the desired record types.
While ``LoggingManager.getLogEntryRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``LogEntry``.
Providing an empty array tests if a ``LogEntry`` can be created
with no records.
arg: log_entry_record_types (osid.type.Type[]): array of log
entry record types
return: (boolean) - ``true`` if ``LogEntry`` creation using the
specified record ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``log_entry_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=log_entry_record_types) # depends on [control=['if'], data=[]]
return True |
def validateEmail(value, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, excMsg=None):
"""Raises ValidationException if value is not an email address.
Returns the value argument.
* value (str): The value being validated as an email address.
* blank (bool): If True, a blank string will be accepted. Defaults to False.
* strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped.
* allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers.
* blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation.
* excMsg (str): A custom message to use in the raised ValidationException.
>>> import pysimplevalidate as pysv
>>> pysv.validateEmail('al@inventwithpython.com')
'al@inventwithpython.com'
>>> pysv.validateEmail('alinventwithpython.com')
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: 'alinventwithpython.com' is not a valid email address.
"""
# Reuse the logic in validateRegex()
try:
result = validateRegex(value=value, regex=EMAIL_REGEX, blank=blank, strip=strip, allowlistRegexes=allowlistRegexes, blocklistRegexes=blocklistRegexes)
if result is not None:
return result
except ValidationException:
_raiseValidationException(_('%r is not a valid email address.') % (value), excMsg) | def function[validateEmail, parameter[value, blank, strip, allowlistRegexes, blocklistRegexes, excMsg]]:
constant[Raises ValidationException if value is not an email address.
Returns the value argument.
* value (str): The value being validated as an email address.
* blank (bool): If True, a blank string will be accepted. Defaults to False.
* strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped.
* allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers.
* blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation.
* excMsg (str): A custom message to use in the raised ValidationException.
>>> import pysimplevalidate as pysv
>>> pysv.validateEmail('al@inventwithpython.com')
'al@inventwithpython.com'
>>> pysv.validateEmail('alinventwithpython.com')
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: 'alinventwithpython.com' is not a valid email address.
]
<ast.Try object at 0x7da18fe91210> | keyword[def] identifier[validateEmail] ( identifier[value] , identifier[blank] = keyword[False] , identifier[strip] = keyword[None] , identifier[allowlistRegexes] = keyword[None] , identifier[blocklistRegexes] = keyword[None] , identifier[excMsg] = keyword[None] ):
literal[string]
keyword[try] :
identifier[result] = identifier[validateRegex] ( identifier[value] = identifier[value] , identifier[regex] = identifier[EMAIL_REGEX] , identifier[blank] = identifier[blank] , identifier[strip] = identifier[strip] , identifier[allowlistRegexes] = identifier[allowlistRegexes] , identifier[blocklistRegexes] = identifier[blocklistRegexes] )
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[result]
keyword[except] identifier[ValidationException] :
identifier[_raiseValidationException] ( identifier[_] ( literal[string] )%( identifier[value] ), identifier[excMsg] ) | def validateEmail(value, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, excMsg=None):
"""Raises ValidationException if value is not an email address.
Returns the value argument.
* value (str): The value being validated as an email address.
* blank (bool): If True, a blank string will be accepted. Defaults to False.
* strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped.
* allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers.
* blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation.
* excMsg (str): A custom message to use in the raised ValidationException.
>>> import pysimplevalidate as pysv
>>> pysv.validateEmail('al@inventwithpython.com')
'al@inventwithpython.com'
>>> pysv.validateEmail('alinventwithpython.com')
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: 'alinventwithpython.com' is not a valid email address.
"""
# Reuse the logic in validateRegex()
try:
result = validateRegex(value=value, regex=EMAIL_REGEX, blank=blank, strip=strip, allowlistRegexes=allowlistRegexes, blocklistRegexes=blocklistRegexes)
if result is not None:
return result # depends on [control=['if'], data=['result']] # depends on [control=['try'], data=[]]
except ValidationException:
_raiseValidationException(_('%r is not a valid email address.') % value, excMsg) # depends on [control=['except'], data=[]] |
def covars(X, Y, remove_mean=False, modify_data=False, symmetrize=False, weights=None, sparse_mode='auto',
sparse_tol=0.0):
""" Computes the covariance and cross-covariance matrix of X and Y
If symmetrize is False, computes
.. math:
C_XX &=& X^\top X
C_XY &=& X^\top Y
If symmetrize is True, computes
.. math:
C_XX &=& \frac{1}{2} (X^\top X + Y^\top Y)
C_XY &=& \frac{1}{2} (X^\top Y + Y^\top X)
while exploiting zero or constant columns in the data matrix.
WARNING: Directly use moments_XXXY if you can. This function does an additional
constant-matrix multiplication and does not return the mean.
Parameters
----------
X : ndarray (T, M)
Data matrix
Y : ndarray (T, N)
Second data matrix
remove_mean : bool
True: remove column mean from the data, False: don't remove mean.
modify_data : bool
If remove_mean=True, the mean will be removed in the data matrix X,
without creating an independent copy. This option is faster but might
lead to surprises because your input array is changed.
symmetrize : bool
Computes symmetrized means and moments (see above)
weights : None or ndarray(T, )
weights assigned to each trajectory point of X. If None, all data points have weight one.
If ndarray, each data point is assigned a separate weight.
sparse_mode : str
one of:
* 'dense' : always use dense mode
* 'sparse' : always use sparse mode if possible
* 'auto' : automatic
sparse_tol: float
Threshold for considering column to be zero in order to save computing
effort when the data is sparse or almost sparse.
If max(abs(X[:, i])) < sparse_tol, then row i (and also column i if Y
is not given) of the covariance matrix will be set to zero. If Y is
given and max(abs(Y[:, i])) < sparse_tol, then column i of the
covariance matrix will be set to zero.
Returns
-------
C_XX : ndarray (M, M)
Covariance matrix of X
C_XY : ndarray (M, N)
Covariance matrix of XY
See also
--------
moments_XXXY
"""
w, sx, sy, Mxx, Mxy = moments_XXXY(X, Y, remove_mean=remove_mean, modify_data=modify_data, weights=weights,
symmetrize=symmetrize, sparse_mode=sparse_mode, sparse_tol=sparse_tol)
return Mxx / float(w), Mxy / float(w) | def function[covars, parameter[X, Y, remove_mean, modify_data, symmetrize, weights, sparse_mode, sparse_tol]]:
constant[ Computes the covariance and cross-covariance matrix of X and Y
If symmetrize is False, computes
.. math:
C_XX &=& X^ op X
C_XY &=& X^ op Y
If symmetrize is True, computes
.. math:
C_XX &=& rac{1}{2} (X^ op X + Y^ op Y)
C_XY &=& rac{1}{2} (X^ op Y + Y^ op X)
while exploiting zero or constant columns in the data matrix.
WARNING: Directly use moments_XXXY if you can. This function does an additional
constant-matrix multiplication and does not return the mean.
Parameters
----------
X : ndarray (T, M)
Data matrix
Y : ndarray (T, N)
Second data matrix
remove_mean : bool
True: remove column mean from the data, False: don't remove mean.
modify_data : bool
If remove_mean=True, the mean will be removed in the data matrix X,
without creating an independent copy. This option is faster but might
lead to surprises because your input array is changed.
symmetrize : bool
Computes symmetrized means and moments (see above)
weights : None or ndarray(T, )
weights assigned to each trajectory point of X. If None, all data points have weight one.
If ndarray, each data point is assigned a separate weight.
sparse_mode : str
one of:
* 'dense' : always use dense mode
* 'sparse' : always use sparse mode if possible
* 'auto' : automatic
sparse_tol: float
Threshold for considering column to be zero in order to save computing
effort when the data is sparse or almost sparse.
If max(abs(X[:, i])) < sparse_tol, then row i (and also column i if Y
is not given) of the covariance matrix will be set to zero. If Y is
given and max(abs(Y[:, i])) < sparse_tol, then column i of the
covariance matrix will be set to zero.
Returns
-------
C_XX : ndarray (M, M)
Covariance matrix of X
C_XY : ndarray (M, N)
Covariance matrix of XY
See also
--------
moments_XXXY
]
<ast.Tuple object at 0x7da1b07e09d0> assign[=] call[name[moments_XXXY], parameter[name[X], name[Y]]]
return[tuple[[<ast.BinOp object at 0x7da1b0772290>, <ast.BinOp object at 0x7da1b0772440>]]] | keyword[def] identifier[covars] ( identifier[X] , identifier[Y] , identifier[remove_mean] = keyword[False] , identifier[modify_data] = keyword[False] , identifier[symmetrize] = keyword[False] , identifier[weights] = keyword[None] , identifier[sparse_mode] = literal[string] ,
identifier[sparse_tol] = literal[int] ):
literal[string]
identifier[w] , identifier[sx] , identifier[sy] , identifier[Mxx] , identifier[Mxy] = identifier[moments_XXXY] ( identifier[X] , identifier[Y] , identifier[remove_mean] = identifier[remove_mean] , identifier[modify_data] = identifier[modify_data] , identifier[weights] = identifier[weights] ,
identifier[symmetrize] = identifier[symmetrize] , identifier[sparse_mode] = identifier[sparse_mode] , identifier[sparse_tol] = identifier[sparse_tol] )
keyword[return] identifier[Mxx] / identifier[float] ( identifier[w] ), identifier[Mxy] / identifier[float] ( identifier[w] ) | def covars(X, Y, remove_mean=False, modify_data=False, symmetrize=False, weights=None, sparse_mode='auto', sparse_tol=0.0):
""" Computes the covariance and cross-covariance matrix of X and Y
If symmetrize is False, computes
.. math:
C_XX &=& X^ op X
C_XY &=& X^ op Y
If symmetrize is True, computes
.. math:
C_XX &=& \x0crac{1}{2} (X^ op X + Y^ op Y)
C_XY &=& \x0crac{1}{2} (X^ op Y + Y^ op X)
while exploiting zero or constant columns in the data matrix.
WARNING: Directly use moments_XXXY if you can. This function does an additional
constant-matrix multiplication and does not return the mean.
Parameters
----------
X : ndarray (T, M)
Data matrix
Y : ndarray (T, N)
Second data matrix
remove_mean : bool
True: remove column mean from the data, False: don't remove mean.
modify_data : bool
If remove_mean=True, the mean will be removed in the data matrix X,
without creating an independent copy. This option is faster but might
lead to surprises because your input array is changed.
symmetrize : bool
Computes symmetrized means and moments (see above)
weights : None or ndarray(T, )
weights assigned to each trajectory point of X. If None, all data points have weight one.
If ndarray, each data point is assigned a separate weight.
sparse_mode : str
one of:
* 'dense' : always use dense mode
* 'sparse' : always use sparse mode if possible
* 'auto' : automatic
sparse_tol: float
Threshold for considering column to be zero in order to save computing
effort when the data is sparse or almost sparse.
If max(abs(X[:, i])) < sparse_tol, then row i (and also column i if Y
is not given) of the covariance matrix will be set to zero. If Y is
given and max(abs(Y[:, i])) < sparse_tol, then column i of the
covariance matrix will be set to zero.
Returns
-------
C_XX : ndarray (M, M)
Covariance matrix of X
C_XY : ndarray (M, N)
Covariance matrix of XY
See also
--------
moments_XXXY
"""
(w, sx, sy, Mxx, Mxy) = moments_XXXY(X, Y, remove_mean=remove_mean, modify_data=modify_data, weights=weights, symmetrize=symmetrize, sparse_mode=sparse_mode, sparse_tol=sparse_tol)
return (Mxx / float(w), Mxy / float(w)) |
def show(
self,
filename: Optional[str] = None,
show_link: bool = True,
auto_open: bool = True,
detect_notebook: bool = True,
) -> None:
"""Display the chart.
Parameters
----------
filename : str, optional
Save plot to this filename, otherwise it's saved to a temporary file.
show_link : bool, optional
Show link to plotly.
auto_open : bool, optional
Automatically open the plot (in the browser).
detect_notebook : bool, optional
Try to detect if we're running in a notebook.
"""
kargs = {}
if detect_notebook and _detect_notebook():
py.init_notebook_mode()
plot = py.iplot
else:
plot = py.plot
if filename is None:
filename = NamedTemporaryFile(prefix='plotly', suffix='.html', delete=False).name
kargs['filename'] = filename
kargs['auto_open'] = auto_open
plot(self, show_link=show_link, **kargs) | def function[show, parameter[self, filename, show_link, auto_open, detect_notebook]]:
constant[Display the chart.
Parameters
----------
filename : str, optional
Save plot to this filename, otherwise it's saved to a temporary file.
show_link : bool, optional
Show link to plotly.
auto_open : bool, optional
Automatically open the plot (in the browser).
detect_notebook : bool, optional
Try to detect if we're running in a notebook.
]
variable[kargs] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da207f01090> begin[:]
call[name[py].init_notebook_mode, parameter[]]
variable[plot] assign[=] name[py].iplot
call[name[plot], parameter[name[self]]] | keyword[def] identifier[show] (
identifier[self] ,
identifier[filename] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[show_link] : identifier[bool] = keyword[True] ,
identifier[auto_open] : identifier[bool] = keyword[True] ,
identifier[detect_notebook] : identifier[bool] = keyword[True] ,
)-> keyword[None] :
literal[string]
identifier[kargs] ={}
keyword[if] identifier[detect_notebook] keyword[and] identifier[_detect_notebook] ():
identifier[py] . identifier[init_notebook_mode] ()
identifier[plot] = identifier[py] . identifier[iplot]
keyword[else] :
identifier[plot] = identifier[py] . identifier[plot]
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = identifier[NamedTemporaryFile] ( identifier[prefix] = literal[string] , identifier[suffix] = literal[string] , identifier[delete] = keyword[False] ). identifier[name]
identifier[kargs] [ literal[string] ]= identifier[filename]
identifier[kargs] [ literal[string] ]= identifier[auto_open]
identifier[plot] ( identifier[self] , identifier[show_link] = identifier[show_link] ,** identifier[kargs] ) | def show(self, filename: Optional[str]=None, show_link: bool=True, auto_open: bool=True, detect_notebook: bool=True) -> None:
"""Display the chart.
Parameters
----------
filename : str, optional
Save plot to this filename, otherwise it's saved to a temporary file.
show_link : bool, optional
Show link to plotly.
auto_open : bool, optional
Automatically open the plot (in the browser).
detect_notebook : bool, optional
Try to detect if we're running in a notebook.
"""
kargs = {}
if detect_notebook and _detect_notebook():
py.init_notebook_mode()
plot = py.iplot # depends on [control=['if'], data=[]]
else:
plot = py.plot
if filename is None:
filename = NamedTemporaryFile(prefix='plotly', suffix='.html', delete=False).name # depends on [control=['if'], data=['filename']]
kargs['filename'] = filename
kargs['auto_open'] = auto_open
plot(self, show_link=show_link, **kargs) |
def xpubsubSockets(self, hostSub, portSub, hostPub, portPub):
'''
Creates frontend and backend for a XPUB/XSUB forwarding device
'''
frontend_addr = self.tcpAddress(hostSub, portSub)
backend_addr = self.tcpAddress(hostPub, portPub)
frontendSocket = self._context.socket(zmq.SUB)
frontendSocket.bind(frontend_addr)
frontendSocket.setsockopt(zmq.SUBSCRIBE, b'')
backendSocket = self._context.socket(zmq.PUB)
backendSocket.bind(backend_addr)
return frontendSocket, backendSocket | def function[xpubsubSockets, parameter[self, hostSub, portSub, hostPub, portPub]]:
constant[
Creates frontend and backend for a XPUB/XSUB forwarding device
]
variable[frontend_addr] assign[=] call[name[self].tcpAddress, parameter[name[hostSub], name[portSub]]]
variable[backend_addr] assign[=] call[name[self].tcpAddress, parameter[name[hostPub], name[portPub]]]
variable[frontendSocket] assign[=] call[name[self]._context.socket, parameter[name[zmq].SUB]]
call[name[frontendSocket].bind, parameter[name[frontend_addr]]]
call[name[frontendSocket].setsockopt, parameter[name[zmq].SUBSCRIBE, constant[b'']]]
variable[backendSocket] assign[=] call[name[self]._context.socket, parameter[name[zmq].PUB]]
call[name[backendSocket].bind, parameter[name[backend_addr]]]
return[tuple[[<ast.Name object at 0x7da18bc70af0>, <ast.Name object at 0x7da18bc70e50>]]] | keyword[def] identifier[xpubsubSockets] ( identifier[self] , identifier[hostSub] , identifier[portSub] , identifier[hostPub] , identifier[portPub] ):
literal[string]
identifier[frontend_addr] = identifier[self] . identifier[tcpAddress] ( identifier[hostSub] , identifier[portSub] )
identifier[backend_addr] = identifier[self] . identifier[tcpAddress] ( identifier[hostPub] , identifier[portPub] )
identifier[frontendSocket] = identifier[self] . identifier[_context] . identifier[socket] ( identifier[zmq] . identifier[SUB] )
identifier[frontendSocket] . identifier[bind] ( identifier[frontend_addr] )
identifier[frontendSocket] . identifier[setsockopt] ( identifier[zmq] . identifier[SUBSCRIBE] , literal[string] )
identifier[backendSocket] = identifier[self] . identifier[_context] . identifier[socket] ( identifier[zmq] . identifier[PUB] )
identifier[backendSocket] . identifier[bind] ( identifier[backend_addr] )
keyword[return] identifier[frontendSocket] , identifier[backendSocket] | def xpubsubSockets(self, hostSub, portSub, hostPub, portPub):
"""
Creates frontend and backend for a XPUB/XSUB forwarding device
"""
frontend_addr = self.tcpAddress(hostSub, portSub)
backend_addr = self.tcpAddress(hostPub, portPub)
frontendSocket = self._context.socket(zmq.SUB)
frontendSocket.bind(frontend_addr)
frontendSocket.setsockopt(zmq.SUBSCRIBE, b'')
backendSocket = self._context.socket(zmq.PUB)
backendSocket.bind(backend_addr)
return (frontendSocket, backendSocket) |
def init_bn_weight(layer):
'''initilize batch norm layer weight.
'''
n_filters = layer.num_features
new_weights = [
add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])),
add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])),
add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])),
add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])),
]
layer.set_weights(new_weights) | def function[init_bn_weight, parameter[layer]]:
constant[initilize batch norm layer weight.
]
variable[n_filters] assign[=] name[layer].num_features
variable[new_weights] assign[=] list[[<ast.Call object at 0x7da20e74a800>, <ast.Call object at 0x7da20e74b850>, <ast.Call object at 0x7da20e7497b0>, <ast.Call object at 0x7da20e74a8f0>]]
call[name[layer].set_weights, parameter[name[new_weights]]] | keyword[def] identifier[init_bn_weight] ( identifier[layer] ):
literal[string]
identifier[n_filters] = identifier[layer] . identifier[num_features]
identifier[new_weights] =[
identifier[add_noise] ( identifier[np] . identifier[ones] ( identifier[n_filters] , identifier[dtype] = identifier[np] . identifier[float32] ), identifier[np] . identifier[array] ([ literal[int] , literal[int] ])),
identifier[add_noise] ( identifier[np] . identifier[zeros] ( identifier[n_filters] , identifier[dtype] = identifier[np] . identifier[float32] ), identifier[np] . identifier[array] ([ literal[int] , literal[int] ])),
identifier[add_noise] ( identifier[np] . identifier[zeros] ( identifier[n_filters] , identifier[dtype] = identifier[np] . identifier[float32] ), identifier[np] . identifier[array] ([ literal[int] , literal[int] ])),
identifier[add_noise] ( identifier[np] . identifier[ones] ( identifier[n_filters] , identifier[dtype] = identifier[np] . identifier[float32] ), identifier[np] . identifier[array] ([ literal[int] , literal[int] ])),
]
identifier[layer] . identifier[set_weights] ( identifier[new_weights] ) | def init_bn_weight(layer):
"""initilize batch norm layer weight.
"""
n_filters = layer.num_features
new_weights = [add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1]))]
layer.set_weights(new_weights) |
def TP2(dv, u, jac=False):
'''Demo problem 2 for horsetail matching, takes two input vectors of size 2
and returns just the qoi if jac is False or the qoi and its gradient if jac
is True'''
y = dv[0]/2.
z = dv[1]/2. + 12
q = 0.25*((y**2 + z**2)/10 + 5*u[0]*u[1] - z*u[1]**2) + 0.2*z*u[1]**3 + 7
if not jac:
return q
else:
dqdx1 = (1./8.)*( 2*y/10. )
dqdx2 = (1./8.)*( 2*z/10. - u[1]**2) + 0.1*u[1]**3
return q, [dqdx1, dqdx2] | def function[TP2, parameter[dv, u, jac]]:
constant[Demo problem 2 for horsetail matching, takes two input vectors of size 2
and returns just the qoi if jac is False or the qoi and its gradient if jac
is True]
variable[y] assign[=] binary_operation[call[name[dv]][constant[0]] / constant[2.0]]
variable[z] assign[=] binary_operation[binary_operation[call[name[dv]][constant[1]] / constant[2.0]] + constant[12]]
variable[q] assign[=] binary_operation[binary_operation[binary_operation[constant[0.25] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[y] ** constant[2]] + binary_operation[name[z] ** constant[2]]] / constant[10]] + binary_operation[binary_operation[constant[5] * call[name[u]][constant[0]]] * call[name[u]][constant[1]]]] - binary_operation[name[z] * binary_operation[call[name[u]][constant[1]] ** constant[2]]]]] + binary_operation[binary_operation[constant[0.2] * name[z]] * binary_operation[call[name[u]][constant[1]] ** constant[3]]]] + constant[7]]
if <ast.UnaryOp object at 0x7da204564bb0> begin[:]
return[name[q]] | keyword[def] identifier[TP2] ( identifier[dv] , identifier[u] , identifier[jac] = keyword[False] ):
literal[string]
identifier[y] = identifier[dv] [ literal[int] ]/ literal[int]
identifier[z] = identifier[dv] [ literal[int] ]/ literal[int] + literal[int]
identifier[q] = literal[int] *(( identifier[y] ** literal[int] + identifier[z] ** literal[int] )/ literal[int] + literal[int] * identifier[u] [ literal[int] ]* identifier[u] [ literal[int] ]- identifier[z] * identifier[u] [ literal[int] ]** literal[int] )+ literal[int] * identifier[z] * identifier[u] [ literal[int] ]** literal[int] + literal[int]
keyword[if] keyword[not] identifier[jac] :
keyword[return] identifier[q]
keyword[else] :
identifier[dqdx1] =( literal[int] / literal[int] )*( literal[int] * identifier[y] / literal[int] )
identifier[dqdx2] =( literal[int] / literal[int] )*( literal[int] * identifier[z] / literal[int] - identifier[u] [ literal[int] ]** literal[int] )+ literal[int] * identifier[u] [ literal[int] ]** literal[int]
keyword[return] identifier[q] ,[ identifier[dqdx1] , identifier[dqdx2] ] | def TP2(dv, u, jac=False):
"""Demo problem 2 for horsetail matching, takes two input vectors of size 2
and returns just the qoi if jac is False or the qoi and its gradient if jac
is True"""
y = dv[0] / 2.0
z = dv[1] / 2.0 + 12
q = 0.25 * ((y ** 2 + z ** 2) / 10 + 5 * u[0] * u[1] - z * u[1] ** 2) + 0.2 * z * u[1] ** 3 + 7
if not jac:
return q # depends on [control=['if'], data=[]]
else:
dqdx1 = 1.0 / 8.0 * (2 * y / 10.0)
dqdx2 = 1.0 / 8.0 * (2 * z / 10.0 - u[1] ** 2) + 0.1 * u[1] ** 3
return (q, [dqdx1, dqdx2]) |
def find_node_name(each_line,temp_func_list):
"""
Find the slave machine where a Jenkins job was executed on. It will save this
information in g_failed_test_info_dict. In addition, it will
delete this particular function handle off the temp_func_list as we do not need
to perform this action again.
Parameters
----------
each_line : str
contains a line read in from jenkins console
temp_func_list : list of Python function handles
contains a list of functions that we want to invoke to extract information from
the Jenkins console text.
:return: bool to determine if text mining should continue on the jenkins console text
"""
global g_node_name
global g_failed_test_info_dict
if g_node_name in each_line:
temp_strings = each_line.split()
[start,found,endstr] = each_line.partition(g_node_name)
if found:
temp_strings = endstr.split()
g_failed_test_info_dict["6.node_name"] = extract_true_string(temp_strings[1])
temp_func_list.remove(find_node_name)
return True | def function[find_node_name, parameter[each_line, temp_func_list]]:
constant[
Find the slave machine where a Jenkins job was executed on. It will save this
information in g_failed_test_info_dict. In addition, it will
delete this particular function handle off the temp_func_list as we do not need
to perform this action again.
Parameters
----------
each_line : str
contains a line read in from jenkins console
temp_func_list : list of Python function handles
contains a list of functions that we want to invoke to extract information from
the Jenkins console text.
:return: bool to determine if text mining should continue on the jenkins console text
]
<ast.Global object at 0x7da20cabf6a0>
<ast.Global object at 0x7da20cabfb20>
if compare[name[g_node_name] in name[each_line]] begin[:]
variable[temp_strings] assign[=] call[name[each_line].split, parameter[]]
<ast.List object at 0x7da20cabe6e0> assign[=] call[name[each_line].partition, parameter[name[g_node_name]]]
if name[found] begin[:]
variable[temp_strings] assign[=] call[name[endstr].split, parameter[]]
call[name[g_failed_test_info_dict]][constant[6.node_name]] assign[=] call[name[extract_true_string], parameter[call[name[temp_strings]][constant[1]]]]
call[name[temp_func_list].remove, parameter[name[find_node_name]]]
return[constant[True]] | keyword[def] identifier[find_node_name] ( identifier[each_line] , identifier[temp_func_list] ):
literal[string]
keyword[global] identifier[g_node_name]
keyword[global] identifier[g_failed_test_info_dict]
keyword[if] identifier[g_node_name] keyword[in] identifier[each_line] :
identifier[temp_strings] = identifier[each_line] . identifier[split] ()
[ identifier[start] , identifier[found] , identifier[endstr] ]= identifier[each_line] . identifier[partition] ( identifier[g_node_name] )
keyword[if] identifier[found] :
identifier[temp_strings] = identifier[endstr] . identifier[split] ()
identifier[g_failed_test_info_dict] [ literal[string] ]= identifier[extract_true_string] ( identifier[temp_strings] [ literal[int] ])
identifier[temp_func_list] . identifier[remove] ( identifier[find_node_name] )
keyword[return] keyword[True] | def find_node_name(each_line, temp_func_list):
"""
Find the slave machine where a Jenkins job was executed on. It will save this
information in g_failed_test_info_dict. In addition, it will
delete this particular function handle off the temp_func_list as we do not need
to perform this action again.
Parameters
----------
each_line : str
contains a line read in from jenkins console
temp_func_list : list of Python function handles
contains a list of functions that we want to invoke to extract information from
the Jenkins console text.
:return: bool to determine if text mining should continue on the jenkins console text
"""
global g_node_name
global g_failed_test_info_dict
if g_node_name in each_line:
temp_strings = each_line.split()
[start, found, endstr] = each_line.partition(g_node_name)
if found:
temp_strings = endstr.split()
g_failed_test_info_dict['6.node_name'] = extract_true_string(temp_strings[1])
temp_func_list.remove(find_node_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['g_node_name', 'each_line']]
return True |
def create_temp_user_avatar(self,
user,
filename,
size,
avatar_img,
contentType=None,
auto_confirm=False,
):
"""Register an image file as a user avatar.
The avatar created is temporary and must be confirmed before it can
be used.
Avatar images are specified by a filename, size, and file object. By default, the client will attempt to
autodetect the picture's content type: this mechanism relies on ``libmagic`` and will not work out of the box
on Windows systems (see http://filemagic.readthedocs.org/en/latest/guide.html for details on how to install
support). The ``contentType`` argument can be used to explicitly set the value (note that JIRA will reject any
type other than the well-known ones for images, e.g. ``image/jpg``, ``image/png``, etc.)
This method returns a dict of properties that can be used to crop a subarea of a larger image for use. This
dict should be saved and passed to :py:meth:`confirm_user_avatar` to finish the avatar creation process. If you
want to cut out the middleman and confirm the avatar with JIRA's default cropping, pass the ``auto_confirm``
argument with a truthy value and :py:meth:`confirm_user_avatar` will be called for you before this method
returns.
:param user: User to register the avatar for
:type user: str
:param filename: name of the avatar file
:type filename: str
:param size: size of the avatar file
:type size: int
:param avatar_img: file-like object containing the avatar
:type avatar_img: bytes
:param contentType: explicit specification for the avatar image's content-type
:type contentType: Optional[Any]
:param auto_confirm: whether to automatically confirm the temporary avatar by calling
:py:meth:`confirm_user_avatar` with the return value of this method. (Default: False)
:type auto_confirm: bool
:rtype: NoReturn
"""
size_from_file = os.path.getsize(filename)
if size != size_from_file:
size = size_from_file
# remove path from filename
filename = os.path.split(filename)[1]
params = {
'username': user,
'filename': filename,
'size': size}
headers = {'X-Atlassian-Token': 'no-check'}
if contentType is not None:
headers['content-type'] = contentType
else:
# try to detect content-type, this may return None
headers['content-type'] = self._get_mime_type(avatar_img)
url = self._get_url('user/avatar/temporary')
r = self._session.post(
url, params=params, headers=headers, data=avatar_img)
cropping_properties = json_loads(r)
if auto_confirm:
return self.confirm_user_avatar(user, cropping_properties)
else:
return cropping_properties | def function[create_temp_user_avatar, parameter[self, user, filename, size, avatar_img, contentType, auto_confirm]]:
constant[Register an image file as a user avatar.
The avatar created is temporary and must be confirmed before it can
be used.
Avatar images are specified by a filename, size, and file object. By default, the client will attempt to
autodetect the picture's content type: this mechanism relies on ``libmagic`` and will not work out of the box
on Windows systems (see http://filemagic.readthedocs.org/en/latest/guide.html for details on how to install
support). The ``contentType`` argument can be used to explicitly set the value (note that JIRA will reject any
type other than the well-known ones for images, e.g. ``image/jpg``, ``image/png``, etc.)
This method returns a dict of properties that can be used to crop a subarea of a larger image for use. This
dict should be saved and passed to :py:meth:`confirm_user_avatar` to finish the avatar creation process. If you
want to cut out the middleman and confirm the avatar with JIRA's default cropping, pass the ``auto_confirm``
argument with a truthy value and :py:meth:`confirm_user_avatar` will be called for you before this method
returns.
:param user: User to register the avatar for
:type user: str
:param filename: name of the avatar file
:type filename: str
:param size: size of the avatar file
:type size: int
:param avatar_img: file-like object containing the avatar
:type avatar_img: bytes
:param contentType: explicit specification for the avatar image's content-type
:type contentType: Optional[Any]
:param auto_confirm: whether to automatically confirm the temporary avatar by calling
:py:meth:`confirm_user_avatar` with the return value of this method. (Default: False)
:type auto_confirm: bool
:rtype: NoReturn
]
variable[size_from_file] assign[=] call[name[os].path.getsize, parameter[name[filename]]]
if compare[name[size] not_equal[!=] name[size_from_file]] begin[:]
variable[size] assign[=] name[size_from_file]
variable[filename] assign[=] call[call[name[os].path.split, parameter[name[filename]]]][constant[1]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b216f2b0>, <ast.Constant object at 0x7da1b216c580>, <ast.Constant object at 0x7da1b216d4b0>], [<ast.Name object at 0x7da1b216ca00>, <ast.Name object at 0x7da1b216d510>, <ast.Name object at 0x7da1b216d0c0>]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b216c9d0>], [<ast.Constant object at 0x7da1b216d810>]]
if compare[name[contentType] is_not constant[None]] begin[:]
call[name[headers]][constant[content-type]] assign[=] name[contentType]
variable[url] assign[=] call[name[self]._get_url, parameter[constant[user/avatar/temporary]]]
variable[r] assign[=] call[name[self]._session.post, parameter[name[url]]]
variable[cropping_properties] assign[=] call[name[json_loads], parameter[name[r]]]
if name[auto_confirm] begin[:]
return[call[name[self].confirm_user_avatar, parameter[name[user], name[cropping_properties]]]] | keyword[def] identifier[create_temp_user_avatar] ( identifier[self] ,
identifier[user] ,
identifier[filename] ,
identifier[size] ,
identifier[avatar_img] ,
identifier[contentType] = keyword[None] ,
identifier[auto_confirm] = keyword[False] ,
):
literal[string]
identifier[size_from_file] = identifier[os] . identifier[path] . identifier[getsize] ( identifier[filename] )
keyword[if] identifier[size] != identifier[size_from_file] :
identifier[size] = identifier[size_from_file]
identifier[filename] = identifier[os] . identifier[path] . identifier[split] ( identifier[filename] )[ literal[int] ]
identifier[params] ={
literal[string] : identifier[user] ,
literal[string] : identifier[filename] ,
literal[string] : identifier[size] }
identifier[headers] ={ literal[string] : literal[string] }
keyword[if] identifier[contentType] keyword[is] keyword[not] keyword[None] :
identifier[headers] [ literal[string] ]= identifier[contentType]
keyword[else] :
identifier[headers] [ literal[string] ]= identifier[self] . identifier[_get_mime_type] ( identifier[avatar_img] )
identifier[url] = identifier[self] . identifier[_get_url] ( literal[string] )
identifier[r] = identifier[self] . identifier[_session] . identifier[post] (
identifier[url] , identifier[params] = identifier[params] , identifier[headers] = identifier[headers] , identifier[data] = identifier[avatar_img] )
identifier[cropping_properties] = identifier[json_loads] ( identifier[r] )
keyword[if] identifier[auto_confirm] :
keyword[return] identifier[self] . identifier[confirm_user_avatar] ( identifier[user] , identifier[cropping_properties] )
keyword[else] :
keyword[return] identifier[cropping_properties] | def create_temp_user_avatar(self, user, filename, size, avatar_img, contentType=None, auto_confirm=False):
"""Register an image file as a user avatar.
The avatar created is temporary and must be confirmed before it can
be used.
Avatar images are specified by a filename, size, and file object. By default, the client will attempt to
autodetect the picture's content type: this mechanism relies on ``libmagic`` and will not work out of the box
on Windows systems (see http://filemagic.readthedocs.org/en/latest/guide.html for details on how to install
support). The ``contentType`` argument can be used to explicitly set the value (note that JIRA will reject any
type other than the well-known ones for images, e.g. ``image/jpg``, ``image/png``, etc.)
This method returns a dict of properties that can be used to crop a subarea of a larger image for use. This
dict should be saved and passed to :py:meth:`confirm_user_avatar` to finish the avatar creation process. If you
want to cut out the middleman and confirm the avatar with JIRA's default cropping, pass the ``auto_confirm``
argument with a truthy value and :py:meth:`confirm_user_avatar` will be called for you before this method
returns.
:param user: User to register the avatar for
:type user: str
:param filename: name of the avatar file
:type filename: str
:param size: size of the avatar file
:type size: int
:param avatar_img: file-like object containing the avatar
:type avatar_img: bytes
:param contentType: explicit specification for the avatar image's content-type
:type contentType: Optional[Any]
:param auto_confirm: whether to automatically confirm the temporary avatar by calling
:py:meth:`confirm_user_avatar` with the return value of this method. (Default: False)
:type auto_confirm: bool
:rtype: NoReturn
"""
size_from_file = os.path.getsize(filename)
if size != size_from_file:
size = size_from_file # depends on [control=['if'], data=['size', 'size_from_file']]
# remove path from filename
filename = os.path.split(filename)[1]
params = {'username': user, 'filename': filename, 'size': size}
headers = {'X-Atlassian-Token': 'no-check'}
if contentType is not None:
headers['content-type'] = contentType # depends on [control=['if'], data=['contentType']]
else:
# try to detect content-type, this may return None
headers['content-type'] = self._get_mime_type(avatar_img)
url = self._get_url('user/avatar/temporary')
r = self._session.post(url, params=params, headers=headers, data=avatar_img)
cropping_properties = json_loads(r)
if auto_confirm:
return self.confirm_user_avatar(user, cropping_properties) # depends on [control=['if'], data=[]]
else:
return cropping_properties |
def point_reflect(pt, nv):
""" Reflect a 3-D point through a plane intersecting the origin.
nv defines the normal vector to the plane (needs not be normalized)
.. todo:: Complete point_reflect docstring
Raises
------
ValueError : If pt or nv are not reducible to 3-D vectors
ValueError : If norm of nv is too small
"""
# Imports
import numpy as np
from scipy import linalg as spla
# Ensure pt is reducible to 3-D vector
pt = make_nd_vec(pt, nd=3, t=np.float64, norm=False)
# Transform the point and return
refl_pt = np.dot(mtx_refl(nv, reps=1), pt)
return refl_pt | def function[point_reflect, parameter[pt, nv]]:
constant[ Reflect a 3-D point through a plane intersecting the origin.
nv defines the normal vector to the plane (needs not be normalized)
.. todo:: Complete point_reflect docstring
Raises
------
ValueError : If pt or nv are not reducible to 3-D vectors
ValueError : If norm of nv is too small
]
import module[numpy] as alias[np]
from relative_module[scipy] import module[linalg]
variable[pt] assign[=] call[name[make_nd_vec], parameter[name[pt]]]
variable[refl_pt] assign[=] call[name[np].dot, parameter[call[name[mtx_refl], parameter[name[nv]]], name[pt]]]
return[name[refl_pt]] | keyword[def] identifier[point_reflect] ( identifier[pt] , identifier[nv] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[from] identifier[scipy] keyword[import] identifier[linalg] keyword[as] identifier[spla]
identifier[pt] = identifier[make_nd_vec] ( identifier[pt] , identifier[nd] = literal[int] , identifier[t] = identifier[np] . identifier[float64] , identifier[norm] = keyword[False] )
identifier[refl_pt] = identifier[np] . identifier[dot] ( identifier[mtx_refl] ( identifier[nv] , identifier[reps] = literal[int] ), identifier[pt] )
keyword[return] identifier[refl_pt] | def point_reflect(pt, nv):
""" Reflect a 3-D point through a plane intersecting the origin.
nv defines the normal vector to the plane (needs not be normalized)
.. todo:: Complete point_reflect docstring
Raises
------
ValueError : If pt or nv are not reducible to 3-D vectors
ValueError : If norm of nv is too small
"""
# Imports
import numpy as np
from scipy import linalg as spla
# Ensure pt is reducible to 3-D vector
pt = make_nd_vec(pt, nd=3, t=np.float64, norm=False)
# Transform the point and return
refl_pt = np.dot(mtx_refl(nv, reps=1), pt)
return refl_pt |
def gauss_fltr_pyramid(dem, size=None, full=False, origmask=False):
"""Pyaramidal downsampling approach for gaussian smoothing
Avoids the need for large kernels, very fast
Needs testing
"""
dem = malib.checkma(dem)
levels = int(np.floor(np.log2(size)))
#print levels
dim = np.floor(np.array(dem.shape)/float(2**levels) + 1)*(2**levels)
#print dem.shape
#print dim
#Can do something with np.pad here
#np.pad(a_fp.filled(), 1, mode='constant', constant_values=(a_fp.fill_value,))
dem2 = np.full(dim, dem.fill_value)
offset = (dim - np.array(dem.shape))/2.0
#print offset
#dem2[0:dem.shape[0],0:dem.shape[1]] = dem.data
dem2[offset[0]:dem.shape[0]+offset[0],offset[1]:dem.shape[1]+offset[1]] = dem.data
dem2 = np.ma.masked_equal(dem2, dem.fill_value)
#dem2 = dem
for n in range(levels):
print(dem2.shape)
dim = (np.floor(np.array(dem2.shape)/2.0 + 1)*2).astype(int)
#dem2 = gauss_fltr_astropy(dem2, size=5, origmask=origmask)
#dem2 = gauss_fltr_astropy(dem2, size=5)
dem2 = gauss_fltr_astropy(dem2, size=5)
#Note: Should use zoom with same bilinear interpolation here for consistency
#However, this doesn't respect nan
#dem2 = zoom(dem2, 0.5, order=1, prefilter=False, cval=dem.fill_value)
dem2 = dem2[::2,::2]
if full:
print("Resizing to original input dimensions")
from scipy.ndimage import zoom
for n in range(levels):
print(dem2.shape)
#Note: order 1 is bilinear
dem2 = zoom(dem2, 2, order=1, prefilter=False, cval=dem.fill_value)
#dem2 = zoom(dem2, 2**levels, order=1, prefilter=False, cval=dem2.fill_value)
print(dem2.shape)
#This was for power of 2 offset
#offset = (2**levels)/2
#print offset
#dem2 = dem2[offset:dem.shape[0]+offset,offset:dem.shape[1]+offset]
#Use original offset
dem2 = dem2[offset[0]:dem.shape[0]+offset[0],offset[1]:dem.shape[1]+offset[1]]
if origmask:
print("Applying original mask")
#Allow filling of interior holes, but use original outer edge
maskfill = malib.maskfill(dem)
#dem2 = np.ma.array(dem2, mask=np.ma.getmaskarray(dem))
dem2 = np.ma.array(dem2, mask=maskfill, fill_value=dem.fill_value)
return dem2 | def function[gauss_fltr_pyramid, parameter[dem, size, full, origmask]]:
constant[Pyaramidal downsampling approach for gaussian smoothing
Avoids the need for large kernels, very fast
Needs testing
]
variable[dem] assign[=] call[name[malib].checkma, parameter[name[dem]]]
variable[levels] assign[=] call[name[int], parameter[call[name[np].floor, parameter[call[name[np].log2, parameter[name[size]]]]]]]
variable[dim] assign[=] binary_operation[call[name[np].floor, parameter[binary_operation[binary_operation[call[name[np].array, parameter[name[dem].shape]] / call[name[float], parameter[binary_operation[constant[2] ** name[levels]]]]] + constant[1]]]] * binary_operation[constant[2] ** name[levels]]]
variable[dem2] assign[=] call[name[np].full, parameter[name[dim], name[dem].fill_value]]
variable[offset] assign[=] binary_operation[binary_operation[name[dim] - call[name[np].array, parameter[name[dem].shape]]] / constant[2.0]]
call[name[dem2]][tuple[[<ast.Slice object at 0x7da1b0604670>, <ast.Slice object at 0x7da1b0607f10>]]] assign[=] name[dem].data
variable[dem2] assign[=] call[name[np].ma.masked_equal, parameter[name[dem2], name[dem].fill_value]]
for taget[name[n]] in starred[call[name[range], parameter[name[levels]]]] begin[:]
call[name[print], parameter[name[dem2].shape]]
variable[dim] assign[=] call[binary_operation[call[name[np].floor, parameter[binary_operation[binary_operation[call[name[np].array, parameter[name[dem2].shape]] / constant[2.0]] + constant[1]]]] * constant[2]].astype, parameter[name[int]]]
variable[dem2] assign[=] call[name[gauss_fltr_astropy], parameter[name[dem2]]]
variable[dem2] assign[=] call[name[dem2]][tuple[[<ast.Slice object at 0x7da1b06047f0>, <ast.Slice object at 0x7da1b0605510>]]]
if name[full] begin[:]
call[name[print], parameter[constant[Resizing to original input dimensions]]]
from relative_module[scipy.ndimage] import module[zoom]
for taget[name[n]] in starred[call[name[range], parameter[name[levels]]]] begin[:]
call[name[print], parameter[name[dem2].shape]]
variable[dem2] assign[=] call[name[zoom], parameter[name[dem2], constant[2]]]
call[name[print], parameter[name[dem2].shape]]
variable[dem2] assign[=] call[name[dem2]][tuple[[<ast.Slice object at 0x7da1b0606620>, <ast.Slice object at 0x7da1b0606680>]]]
if name[origmask] begin[:]
call[name[print], parameter[constant[Applying original mask]]]
variable[maskfill] assign[=] call[name[malib].maskfill, parameter[name[dem]]]
variable[dem2] assign[=] call[name[np].ma.array, parameter[name[dem2]]]
return[name[dem2]] | keyword[def] identifier[gauss_fltr_pyramid] ( identifier[dem] , identifier[size] = keyword[None] , identifier[full] = keyword[False] , identifier[origmask] = keyword[False] ):
literal[string]
identifier[dem] = identifier[malib] . identifier[checkma] ( identifier[dem] )
identifier[levels] = identifier[int] ( identifier[np] . identifier[floor] ( identifier[np] . identifier[log2] ( identifier[size] )))
identifier[dim] = identifier[np] . identifier[floor] ( identifier[np] . identifier[array] ( identifier[dem] . identifier[shape] )/ identifier[float] ( literal[int] ** identifier[levels] )+ literal[int] )*( literal[int] ** identifier[levels] )
identifier[dem2] = identifier[np] . identifier[full] ( identifier[dim] , identifier[dem] . identifier[fill_value] )
identifier[offset] =( identifier[dim] - identifier[np] . identifier[array] ( identifier[dem] . identifier[shape] ))/ literal[int]
identifier[dem2] [ identifier[offset] [ literal[int] ]: identifier[dem] . identifier[shape] [ literal[int] ]+ identifier[offset] [ literal[int] ], identifier[offset] [ literal[int] ]: identifier[dem] . identifier[shape] [ literal[int] ]+ identifier[offset] [ literal[int] ]]= identifier[dem] . identifier[data]
identifier[dem2] = identifier[np] . identifier[ma] . identifier[masked_equal] ( identifier[dem2] , identifier[dem] . identifier[fill_value] )
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[levels] ):
identifier[print] ( identifier[dem2] . identifier[shape] )
identifier[dim] =( identifier[np] . identifier[floor] ( identifier[np] . identifier[array] ( identifier[dem2] . identifier[shape] )/ literal[int] + literal[int] )* literal[int] ). identifier[astype] ( identifier[int] )
identifier[dem2] = identifier[gauss_fltr_astropy] ( identifier[dem2] , identifier[size] = literal[int] )
identifier[dem2] = identifier[dem2] [:: literal[int] ,:: literal[int] ]
keyword[if] identifier[full] :
identifier[print] ( literal[string] )
keyword[from] identifier[scipy] . identifier[ndimage] keyword[import] identifier[zoom]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[levels] ):
identifier[print] ( identifier[dem2] . identifier[shape] )
identifier[dem2] = identifier[zoom] ( identifier[dem2] , literal[int] , identifier[order] = literal[int] , identifier[prefilter] = keyword[False] , identifier[cval] = identifier[dem] . identifier[fill_value] )
identifier[print] ( identifier[dem2] . identifier[shape] )
identifier[dem2] = identifier[dem2] [ identifier[offset] [ literal[int] ]: identifier[dem] . identifier[shape] [ literal[int] ]+ identifier[offset] [ literal[int] ], identifier[offset] [ literal[int] ]: identifier[dem] . identifier[shape] [ literal[int] ]+ identifier[offset] [ literal[int] ]]
keyword[if] identifier[origmask] :
identifier[print] ( literal[string] )
identifier[maskfill] = identifier[malib] . identifier[maskfill] ( identifier[dem] )
identifier[dem2] = identifier[np] . identifier[ma] . identifier[array] ( identifier[dem2] , identifier[mask] = identifier[maskfill] , identifier[fill_value] = identifier[dem] . identifier[fill_value] )
keyword[return] identifier[dem2] | def gauss_fltr_pyramid(dem, size=None, full=False, origmask=False):
"""Pyaramidal downsampling approach for gaussian smoothing
Avoids the need for large kernels, very fast
Needs testing
"""
dem = malib.checkma(dem)
levels = int(np.floor(np.log2(size)))
#print levels
dim = np.floor(np.array(dem.shape) / float(2 ** levels) + 1) * 2 ** levels
#print dem.shape
#print dim
#Can do something with np.pad here
#np.pad(a_fp.filled(), 1, mode='constant', constant_values=(a_fp.fill_value,))
dem2 = np.full(dim, dem.fill_value)
offset = (dim - np.array(dem.shape)) / 2.0
#print offset
#dem2[0:dem.shape[0],0:dem.shape[1]] = dem.data
dem2[offset[0]:dem.shape[0] + offset[0], offset[1]:dem.shape[1] + offset[1]] = dem.data
dem2 = np.ma.masked_equal(dem2, dem.fill_value)
#dem2 = dem
for n in range(levels):
print(dem2.shape)
dim = (np.floor(np.array(dem2.shape) / 2.0 + 1) * 2).astype(int)
#dem2 = gauss_fltr_astropy(dem2, size=5, origmask=origmask)
#dem2 = gauss_fltr_astropy(dem2, size=5)
dem2 = gauss_fltr_astropy(dem2, size=5)
#Note: Should use zoom with same bilinear interpolation here for consistency
#However, this doesn't respect nan
#dem2 = zoom(dem2, 0.5, order=1, prefilter=False, cval=dem.fill_value)
dem2 = dem2[::2, ::2] # depends on [control=['for'], data=[]]
if full:
print('Resizing to original input dimensions')
from scipy.ndimage import zoom
for n in range(levels):
print(dem2.shape)
#Note: order 1 is bilinear
dem2 = zoom(dem2, 2, order=1, prefilter=False, cval=dem.fill_value) # depends on [control=['for'], data=[]]
#dem2 = zoom(dem2, 2**levels, order=1, prefilter=False, cval=dem2.fill_value)
print(dem2.shape)
#This was for power of 2 offset
#offset = (2**levels)/2
#print offset
#dem2 = dem2[offset:dem.shape[0]+offset,offset:dem.shape[1]+offset]
#Use original offset
dem2 = dem2[offset[0]:dem.shape[0] + offset[0], offset[1]:dem.shape[1] + offset[1]]
if origmask:
print('Applying original mask')
#Allow filling of interior holes, but use original outer edge
maskfill = malib.maskfill(dem)
#dem2 = np.ma.array(dem2, mask=np.ma.getmaskarray(dem))
dem2 = np.ma.array(dem2, mask=maskfill, fill_value=dem.fill_value) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return dem2 |
def compute_with_trace(*args):
"""Do Dask compute(), but with added Eliot tracing.
Dask is a graph of tasks, but Eliot logs trees. So we need to emulate a
graph using a tree. We do this by making Eliot action for each task, but
having it list the tasks it depends on.
We use the following algorithm:
1. Create a top-level action.
2. For each entry in the dask graph, create a child with
serialize_task_id. Do this in likely order of execution, so that
if B depends on A the task level of B is higher than the task Ievel
of A.
3. Replace each function with a wrapper that uses the corresponding
task ID (with Action.continue_task), and while it's at it also
records which other things this function depends on.
Known issues:
1. Retries will confuse Eliot. Probably need different
distributed-tree mechanism within Eliot to solve that.
"""
# 1. Create top-level Eliot Action:
with start_action(action_type="dask:compute"):
# In order to reduce logging verbosity, add logging to the already
# optimized graph:
optimized = optimize(*args, optimizations=[_add_logging])
return compute(*optimized, optimize_graph=False) | def function[compute_with_trace, parameter[]]:
constant[Do Dask compute(), but with added Eliot tracing.
Dask is a graph of tasks, but Eliot logs trees. So we need to emulate a
graph using a tree. We do this by making Eliot action for each task, but
having it list the tasks it depends on.
We use the following algorithm:
1. Create a top-level action.
2. For each entry in the dask graph, create a child with
serialize_task_id. Do this in likely order of execution, so that
if B depends on A the task level of B is higher than the task Ievel
of A.
3. Replace each function with a wrapper that uses the corresponding
task ID (with Action.continue_task), and while it's at it also
records which other things this function depends on.
Known issues:
1. Retries will confuse Eliot. Probably need different
distributed-tree mechanism within Eliot to solve that.
]
with call[name[start_action], parameter[]] begin[:]
variable[optimized] assign[=] call[name[optimize], parameter[<ast.Starred object at 0x7da18fe920b0>]]
return[call[name[compute], parameter[<ast.Starred object at 0x7da18fe93d30>]]] | keyword[def] identifier[compute_with_trace] (* identifier[args] ):
literal[string]
keyword[with] identifier[start_action] ( identifier[action_type] = literal[string] ):
identifier[optimized] = identifier[optimize] (* identifier[args] , identifier[optimizations] =[ identifier[_add_logging] ])
keyword[return] identifier[compute] (* identifier[optimized] , identifier[optimize_graph] = keyword[False] ) | def compute_with_trace(*args):
"""Do Dask compute(), but with added Eliot tracing.
Dask is a graph of tasks, but Eliot logs trees. So we need to emulate a
graph using a tree. We do this by making Eliot action for each task, but
having it list the tasks it depends on.
We use the following algorithm:
1. Create a top-level action.
2. For each entry in the dask graph, create a child with
serialize_task_id. Do this in likely order of execution, so that
if B depends on A the task level of B is higher than the task Ievel
of A.
3. Replace each function with a wrapper that uses the corresponding
task ID (with Action.continue_task), and while it's at it also
records which other things this function depends on.
Known issues:
1. Retries will confuse Eliot. Probably need different
distributed-tree mechanism within Eliot to solve that.
"""
# 1. Create top-level Eliot Action:
with start_action(action_type='dask:compute'):
# In order to reduce logging verbosity, add logging to the already
# optimized graph:
optimized = optimize(*args, optimizations=[_add_logging])
return compute(*optimized, optimize_graph=False) # depends on [control=['with'], data=[]] |
def predict_image(img, model_func):
"""
Run detection on one image, using the TF callable.
This function should handle the preprocessing internally.
Args:
img: an image
model_func: a callable from the TF model.
It takes image and returns (boxes, probs, labels, [masks])
Returns:
[DetectionResult]
"""
orig_shape = img.shape[:2]
resizer = CustomResize(cfg.PREPROC.TEST_SHORT_EDGE_SIZE, cfg.PREPROC.MAX_SIZE)
resized_img = resizer.augment(img)
scale = np.sqrt(resized_img.shape[0] * 1.0 / img.shape[0] * resized_img.shape[1] / img.shape[1])
boxes, probs, labels, *masks = model_func(resized_img)
boxes = boxes / scale
# boxes are already clipped inside the graph, but after the floating point scaling, this may not be true any more.
boxes = clip_boxes(boxes, orig_shape)
if masks:
# has mask
full_masks = [_paste_mask(box, mask, orig_shape)
for box, mask in zip(boxes, masks[0])]
masks = full_masks
else:
# fill with none
masks = [None] * len(boxes)
results = [DetectionResult(*args) for args in zip(boxes, probs, labels.tolist(), masks)]
return results | def function[predict_image, parameter[img, model_func]]:
constant[
Run detection on one image, using the TF callable.
This function should handle the preprocessing internally.
Args:
img: an image
model_func: a callable from the TF model.
It takes image and returns (boxes, probs, labels, [masks])
Returns:
[DetectionResult]
]
variable[orig_shape] assign[=] call[name[img].shape][<ast.Slice object at 0x7da207f03a30>]
variable[resizer] assign[=] call[name[CustomResize], parameter[name[cfg].PREPROC.TEST_SHORT_EDGE_SIZE, name[cfg].PREPROC.MAX_SIZE]]
variable[resized_img] assign[=] call[name[resizer].augment, parameter[name[img]]]
variable[scale] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[binary_operation[call[name[resized_img].shape][constant[0]] * constant[1.0]] / call[name[img].shape][constant[0]]] * call[name[resized_img].shape][constant[1]]] / call[name[img].shape][constant[1]]]]]
<ast.Tuple object at 0x7da207f026e0> assign[=] call[name[model_func], parameter[name[resized_img]]]
variable[boxes] assign[=] binary_operation[name[boxes] / name[scale]]
variable[boxes] assign[=] call[name[clip_boxes], parameter[name[boxes], name[orig_shape]]]
if name[masks] begin[:]
variable[full_masks] assign[=] <ast.ListComp object at 0x7da207f00d30>
variable[masks] assign[=] name[full_masks]
variable[results] assign[=] <ast.ListComp object at 0x7da207f00c10>
return[name[results]] | keyword[def] identifier[predict_image] ( identifier[img] , identifier[model_func] ):
literal[string]
identifier[orig_shape] = identifier[img] . identifier[shape] [: literal[int] ]
identifier[resizer] = identifier[CustomResize] ( identifier[cfg] . identifier[PREPROC] . identifier[TEST_SHORT_EDGE_SIZE] , identifier[cfg] . identifier[PREPROC] . identifier[MAX_SIZE] )
identifier[resized_img] = identifier[resizer] . identifier[augment] ( identifier[img] )
identifier[scale] = identifier[np] . identifier[sqrt] ( identifier[resized_img] . identifier[shape] [ literal[int] ]* literal[int] / identifier[img] . identifier[shape] [ literal[int] ]* identifier[resized_img] . identifier[shape] [ literal[int] ]/ identifier[img] . identifier[shape] [ literal[int] ])
identifier[boxes] , identifier[probs] , identifier[labels] ,* identifier[masks] = identifier[model_func] ( identifier[resized_img] )
identifier[boxes] = identifier[boxes] / identifier[scale]
identifier[boxes] = identifier[clip_boxes] ( identifier[boxes] , identifier[orig_shape] )
keyword[if] identifier[masks] :
identifier[full_masks] =[ identifier[_paste_mask] ( identifier[box] , identifier[mask] , identifier[orig_shape] )
keyword[for] identifier[box] , identifier[mask] keyword[in] identifier[zip] ( identifier[boxes] , identifier[masks] [ literal[int] ])]
identifier[masks] = identifier[full_masks]
keyword[else] :
identifier[masks] =[ keyword[None] ]* identifier[len] ( identifier[boxes] )
identifier[results] =[ identifier[DetectionResult] (* identifier[args] ) keyword[for] identifier[args] keyword[in] identifier[zip] ( identifier[boxes] , identifier[probs] , identifier[labels] . identifier[tolist] (), identifier[masks] )]
keyword[return] identifier[results] | def predict_image(img, model_func):
"""
Run detection on one image, using the TF callable.
This function should handle the preprocessing internally.
Args:
img: an image
model_func: a callable from the TF model.
It takes image and returns (boxes, probs, labels, [masks])
Returns:
[DetectionResult]
"""
orig_shape = img.shape[:2]
resizer = CustomResize(cfg.PREPROC.TEST_SHORT_EDGE_SIZE, cfg.PREPROC.MAX_SIZE)
resized_img = resizer.augment(img)
scale = np.sqrt(resized_img.shape[0] * 1.0 / img.shape[0] * resized_img.shape[1] / img.shape[1])
(boxes, probs, labels, *masks) = model_func(resized_img)
boxes = boxes / scale
# boxes are already clipped inside the graph, but after the floating point scaling, this may not be true any more.
boxes = clip_boxes(boxes, orig_shape)
if masks:
# has mask
full_masks = [_paste_mask(box, mask, orig_shape) for (box, mask) in zip(boxes, masks[0])]
masks = full_masks # depends on [control=['if'], data=[]]
else:
# fill with none
masks = [None] * len(boxes)
results = [DetectionResult(*args) for args in zip(boxes, probs, labels.tolist(), masks)]
return results |
def getattr(self, obj, attribute):
"""Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
"""
try:
return getattr(obj, attribute)
except AttributeError:
pass
try:
return obj[attribute]
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute) | def function[getattr, parameter[self, obj, attribute]]:
constant[Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
]
<ast.Try object at 0x7da18ede6d70>
<ast.Try object at 0x7da1b1f82e60> | keyword[def] identifier[getattr] ( identifier[self] , identifier[obj] , identifier[attribute] ):
literal[string]
keyword[try] :
keyword[return] identifier[getattr] ( identifier[obj] , identifier[attribute] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[try] :
keyword[return] identifier[obj] [ identifier[attribute] ]
keyword[except] ( identifier[TypeError] , identifier[LookupError] , identifier[AttributeError] ):
keyword[return] identifier[self] . identifier[undefined] ( identifier[obj] = identifier[obj] , identifier[name] = identifier[attribute] ) | def getattr(self, obj, attribute):
"""Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
"""
try:
return getattr(obj, attribute) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
try:
return obj[attribute] # depends on [control=['try'], data=[]]
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute) # depends on [control=['except'], data=[]] |
def build_extra_args_dict(cl_args):
""" Build extra args map """
# Check parameters
component_parallelism = cl_args['component_parallelism']
runtime_configs = cl_args['runtime_config']
container_number = cl_args['container_number']
# Users need to provide either (component-parallelism || container_number) or runtime-config
if (component_parallelism and runtime_configs) or (container_number and runtime_configs):
raise Exception(
"(component-parallelism or container_num) and runtime-config " +
"can't be updated at the same time")
dict_extra_args = {}
nothing_set = True
if component_parallelism:
dict_extra_args.update({'component_parallelism': component_parallelism})
nothing_set = False
if container_number:
dict_extra_args.update({'container_number': container_number})
nothing_set = False
if runtime_configs:
dict_extra_args.update({'runtime_config': runtime_configs})
nothing_set = False
if nothing_set:
raise Exception(
"Missing arguments --component-parallelism or --runtime-config or --container-number")
if cl_args['dry_run']:
dict_extra_args.update({'dry_run': True})
if 'dry_run_format' in cl_args:
dict_extra_args.update({'dry_run_format': cl_args["dry_run_format"]})
return dict_extra_args | def function[build_extra_args_dict, parameter[cl_args]]:
constant[ Build extra args map ]
variable[component_parallelism] assign[=] call[name[cl_args]][constant[component_parallelism]]
variable[runtime_configs] assign[=] call[name[cl_args]][constant[runtime_config]]
variable[container_number] assign[=] call[name[cl_args]][constant[container_number]]
if <ast.BoolOp object at 0x7da18c4cd300> begin[:]
<ast.Raise object at 0x7da18c4ceda0>
variable[dict_extra_args] assign[=] dictionary[[], []]
variable[nothing_set] assign[=] constant[True]
if name[component_parallelism] begin[:]
call[name[dict_extra_args].update, parameter[dictionary[[<ast.Constant object at 0x7da18c4cc460>], [<ast.Name object at 0x7da18c4cd510>]]]]
variable[nothing_set] assign[=] constant[False]
if name[container_number] begin[:]
call[name[dict_extra_args].update, parameter[dictionary[[<ast.Constant object at 0x7da18c4cd7e0>], [<ast.Name object at 0x7da18c4cd930>]]]]
variable[nothing_set] assign[=] constant[False]
if name[runtime_configs] begin[:]
call[name[dict_extra_args].update, parameter[dictionary[[<ast.Constant object at 0x7da18c4ccb50>], [<ast.Name object at 0x7da18c4cf250>]]]]
variable[nothing_set] assign[=] constant[False]
if name[nothing_set] begin[:]
<ast.Raise object at 0x7da18c4cd0f0>
if call[name[cl_args]][constant[dry_run]] begin[:]
call[name[dict_extra_args].update, parameter[dictionary[[<ast.Constant object at 0x7da2041db190>], [<ast.Constant object at 0x7da2041d8b80>]]]]
if compare[constant[dry_run_format] in name[cl_args]] begin[:]
call[name[dict_extra_args].update, parameter[dictionary[[<ast.Constant object at 0x7da2041dab30>], [<ast.Subscript object at 0x7da2041d8250>]]]]
return[name[dict_extra_args]] | keyword[def] identifier[build_extra_args_dict] ( identifier[cl_args] ):
literal[string]
identifier[component_parallelism] = identifier[cl_args] [ literal[string] ]
identifier[runtime_configs] = identifier[cl_args] [ literal[string] ]
identifier[container_number] = identifier[cl_args] [ literal[string] ]
keyword[if] ( identifier[component_parallelism] keyword[and] identifier[runtime_configs] ) keyword[or] ( identifier[container_number] keyword[and] identifier[runtime_configs] ):
keyword[raise] identifier[Exception] (
literal[string] +
literal[string] )
identifier[dict_extra_args] ={}
identifier[nothing_set] = keyword[True]
keyword[if] identifier[component_parallelism] :
identifier[dict_extra_args] . identifier[update] ({ literal[string] : identifier[component_parallelism] })
identifier[nothing_set] = keyword[False]
keyword[if] identifier[container_number] :
identifier[dict_extra_args] . identifier[update] ({ literal[string] : identifier[container_number] })
identifier[nothing_set] = keyword[False]
keyword[if] identifier[runtime_configs] :
identifier[dict_extra_args] . identifier[update] ({ literal[string] : identifier[runtime_configs] })
identifier[nothing_set] = keyword[False]
keyword[if] identifier[nothing_set] :
keyword[raise] identifier[Exception] (
literal[string] )
keyword[if] identifier[cl_args] [ literal[string] ]:
identifier[dict_extra_args] . identifier[update] ({ literal[string] : keyword[True] })
keyword[if] literal[string] keyword[in] identifier[cl_args] :
identifier[dict_extra_args] . identifier[update] ({ literal[string] : identifier[cl_args] [ literal[string] ]})
keyword[return] identifier[dict_extra_args] | def build_extra_args_dict(cl_args):
""" Build extra args map """
# Check parameters
component_parallelism = cl_args['component_parallelism']
runtime_configs = cl_args['runtime_config']
container_number = cl_args['container_number']
# Users need to provide either (component-parallelism || container_number) or runtime-config
if component_parallelism and runtime_configs or (container_number and runtime_configs):
raise Exception('(component-parallelism or container_num) and runtime-config ' + "can't be updated at the same time") # depends on [control=['if'], data=[]]
dict_extra_args = {}
nothing_set = True
if component_parallelism:
dict_extra_args.update({'component_parallelism': component_parallelism})
nothing_set = False # depends on [control=['if'], data=[]]
if container_number:
dict_extra_args.update({'container_number': container_number})
nothing_set = False # depends on [control=['if'], data=[]]
if runtime_configs:
dict_extra_args.update({'runtime_config': runtime_configs})
nothing_set = False # depends on [control=['if'], data=[]]
if nothing_set:
raise Exception('Missing arguments --component-parallelism or --runtime-config or --container-number') # depends on [control=['if'], data=[]]
if cl_args['dry_run']:
dict_extra_args.update({'dry_run': True})
if 'dry_run_format' in cl_args:
dict_extra_args.update({'dry_run_format': cl_args['dry_run_format']}) # depends on [control=['if'], data=['cl_args']] # depends on [control=['if'], data=[]]
return dict_extra_args |
def create_actor_pts(pts, color, **kwargs):
""" Creates a VTK actor for rendering scatter plots.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
point_size = kwargs.get('size', 5)
point_sphere = kwargs.get('point_as_sphere', True)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create a PolyData object and add points
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
# Run vertex glyph filter on the points array
vertex_filter = vtk.vtkVertexGlyphFilter()
vertex_filter.SetInputData(polydata)
# Map ploy data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(vertex_filter.GetOutputPort())
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetPointSize(point_size)
actor.GetProperty().SetRenderPointsAsSpheres(point_sphere)
# Return the actor
return actor | def function[create_actor_pts, parameter[pts, color]]:
constant[ Creates a VTK actor for rendering scatter plots.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
]
variable[array_name] assign[=] call[name[kwargs].get, parameter[constant[name], constant[]]]
variable[array_index] assign[=] call[name[kwargs].get, parameter[constant[index], constant[0]]]
variable[point_size] assign[=] call[name[kwargs].get, parameter[constant[size], constant[5]]]
variable[point_sphere] assign[=] call[name[kwargs].get, parameter[constant[point_as_sphere], constant[True]]]
variable[points] assign[=] call[name[vtk].vtkPoints, parameter[]]
call[name[points].SetData, parameter[name[pts]]]
variable[polydata] assign[=] call[name[vtk].vtkPolyData, parameter[]]
call[name[polydata].SetPoints, parameter[name[points]]]
variable[vertex_filter] assign[=] call[name[vtk].vtkVertexGlyphFilter, parameter[]]
call[name[vertex_filter].SetInputData, parameter[name[polydata]]]
variable[mapper] assign[=] call[name[vtk].vtkPolyDataMapper, parameter[]]
call[name[mapper].SetInputConnection, parameter[call[name[vertex_filter].GetOutputPort, parameter[]]]]
call[name[mapper].SetArrayName, parameter[name[array_name]]]
call[name[mapper].SetArrayId, parameter[name[array_index]]]
variable[actor] assign[=] call[name[vtk].vtkActor, parameter[]]
call[name[actor].SetMapper, parameter[name[mapper]]]
call[call[name[actor].GetProperty, parameter[]].SetColor, parameter[<ast.Starred object at 0x7da1b16320e0>]]
call[call[name[actor].GetProperty, parameter[]].SetPointSize, parameter[name[point_size]]]
call[call[name[actor].GetProperty, parameter[]].SetRenderPointsAsSpheres, parameter[name[point_sphere]]]
return[name[actor]] | keyword[def] identifier[create_actor_pts] ( identifier[pts] , identifier[color] ,** identifier[kwargs] ):
literal[string]
identifier[array_name] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[array_index] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[point_size] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[point_sphere] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] )
identifier[points] = identifier[vtk] . identifier[vtkPoints] ()
identifier[points] . identifier[SetData] ( identifier[pts] )
identifier[polydata] = identifier[vtk] . identifier[vtkPolyData] ()
identifier[polydata] . identifier[SetPoints] ( identifier[points] )
identifier[vertex_filter] = identifier[vtk] . identifier[vtkVertexGlyphFilter] ()
identifier[vertex_filter] . identifier[SetInputData] ( identifier[polydata] )
identifier[mapper] = identifier[vtk] . identifier[vtkPolyDataMapper] ()
identifier[mapper] . identifier[SetInputConnection] ( identifier[vertex_filter] . identifier[GetOutputPort] ())
identifier[mapper] . identifier[SetArrayName] ( identifier[array_name] )
identifier[mapper] . identifier[SetArrayId] ( identifier[array_index] )
identifier[actor] = identifier[vtk] . identifier[vtkActor] ()
identifier[actor] . identifier[SetMapper] ( identifier[mapper] )
identifier[actor] . identifier[GetProperty] (). identifier[SetColor] (* identifier[color] )
identifier[actor] . identifier[GetProperty] (). identifier[SetPointSize] ( identifier[point_size] )
identifier[actor] . identifier[GetProperty] (). identifier[SetRenderPointsAsSpheres] ( identifier[point_sphere] )
keyword[return] identifier[actor] | def create_actor_pts(pts, color, **kwargs):
""" Creates a VTK actor for rendering scatter plots.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', '')
array_index = kwargs.get('index', 0)
point_size = kwargs.get('size', 5)
point_sphere = kwargs.get('point_as_sphere', True)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create a PolyData object and add points
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
# Run vertex glyph filter on the points array
vertex_filter = vtk.vtkVertexGlyphFilter()
vertex_filter.SetInputData(polydata)
# Map ploy data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(vertex_filter.GetOutputPort())
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetPointSize(point_size)
actor.GetProperty().SetRenderPointsAsSpheres(point_sphere)
# Return the actor
return actor |
def spin_z(particles, index):
"""Generates the spin_z projection operator for a system of
N=particles and for the selected spin index name. where index=0..N-1"""
mat = np.zeros((2**particles, 2**particles))
for i in range(2**particles):
ispin = btest(i, index)
if ispin == 1:
mat[i, i] = 1
else:
mat[i, i] = -1
return 1/2.*mat | def function[spin_z, parameter[particles, index]]:
constant[Generates the spin_z projection operator for a system of
N=particles and for the selected spin index name. where index=0..N-1]
variable[mat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da204345630>, <ast.BinOp object at 0x7da204346b00>]]]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[constant[2] ** name[particles]]]]] begin[:]
variable[ispin] assign[=] call[name[btest], parameter[name[i], name[index]]]
if compare[name[ispin] equal[==] constant[1]] begin[:]
call[name[mat]][tuple[[<ast.Name object at 0x7da204347520>, <ast.Name object at 0x7da2043471f0>]]] assign[=] constant[1]
return[binary_operation[binary_operation[constant[1] / constant[2.0]] * name[mat]]] | keyword[def] identifier[spin_z] ( identifier[particles] , identifier[index] ):
literal[string]
identifier[mat] = identifier[np] . identifier[zeros] (( literal[int] ** identifier[particles] , literal[int] ** identifier[particles] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ** identifier[particles] ):
identifier[ispin] = identifier[btest] ( identifier[i] , identifier[index] )
keyword[if] identifier[ispin] == literal[int] :
identifier[mat] [ identifier[i] , identifier[i] ]= literal[int]
keyword[else] :
identifier[mat] [ identifier[i] , identifier[i] ]=- literal[int]
keyword[return] literal[int] / literal[int] * identifier[mat] | def spin_z(particles, index):
"""Generates the spin_z projection operator for a system of
N=particles and for the selected spin index name. where index=0..N-1"""
mat = np.zeros((2 ** particles, 2 ** particles))
for i in range(2 ** particles):
ispin = btest(i, index)
if ispin == 1:
mat[i, i] = 1 # depends on [control=['if'], data=[]]
else:
mat[i, i] = -1 # depends on [control=['for'], data=['i']]
return 1 / 2.0 * mat |
def get_udim(cls, name):
""" Checks a string for a possible base name of an object (no prefix, no suffix)
:param name: str, string that represents a possible name of an object
:returns: int, the last found match because convention keeps UDIM markers at the end.
"""
match = cls._get_regex_search(name, cls.REGEX_UDIM, match_index=-1)
if match:
match.update({'match_int': int(match['match'])})
return match
return None | def function[get_udim, parameter[cls, name]]:
constant[ Checks a string for a possible base name of an object (no prefix, no suffix)
:param name: str, string that represents a possible name of an object
:returns: int, the last found match because convention keeps UDIM markers at the end.
]
variable[match] assign[=] call[name[cls]._get_regex_search, parameter[name[name], name[cls].REGEX_UDIM]]
if name[match] begin[:]
call[name[match].update, parameter[dictionary[[<ast.Constant object at 0x7da207f02b60>], [<ast.Call object at 0x7da207f015d0>]]]]
return[name[match]]
return[constant[None]] | keyword[def] identifier[get_udim] ( identifier[cls] , identifier[name] ):
literal[string]
identifier[match] = identifier[cls] . identifier[_get_regex_search] ( identifier[name] , identifier[cls] . identifier[REGEX_UDIM] , identifier[match_index] =- literal[int] )
keyword[if] identifier[match] :
identifier[match] . identifier[update] ({ literal[string] : identifier[int] ( identifier[match] [ literal[string] ])})
keyword[return] identifier[match]
keyword[return] keyword[None] | def get_udim(cls, name):
""" Checks a string for a possible base name of an object (no prefix, no suffix)
:param name: str, string that represents a possible name of an object
:returns: int, the last found match because convention keeps UDIM markers at the end.
"""
match = cls._get_regex_search(name, cls.REGEX_UDIM, match_index=-1)
if match:
match.update({'match_int': int(match['match'])})
return match # depends on [control=['if'], data=[]]
return None |
def _process_connect_init(self):
"""
Handles the initial part of the NATS protocol, moving from
the (RE)CONNECTING to CONNECTED states when establishing
a connection with the server.
"""
# INFO {...}
line = yield self.io.read_until(_CRLF_, max_bytes=None)
_, args = line.split(INFO_OP + _SPC_, 1)
self._server_info = tornado.escape.json_decode((args))
if 'max_payload' in self._server_info:
self._max_payload_size = self._server_info["max_payload"]
# Check whether we need to upgrade to TLS first of all
if 'tls_required' in self._server_info and self._server_info['tls_required']:
# Detach and prepare for upgrading the TLS connection.
self._loop.remove_handler(self._socket.fileno())
tls_opts = {}
if "tls" in self.options:
# Allow customizing the TLS version though default
# to one that the server supports at least.
tls_opts = self.options["tls"]
# Rewrap using a TLS connection, can't do handshake on connect
# as the socket is non blocking.
self._socket = ssl.wrap_socket(
self._socket, do_handshake_on_connect=False, **tls_opts)
# Use the TLS stream instead from now
self.io = tornado.iostream.SSLIOStream(self._socket)
self.io.set_close_callback(self._process_op_err)
self.io._do_ssl_handshake()
# Refresh state of the parser upon reconnect.
if self.is_reconnecting:
self._ps.reset()
# CONNECT then send a PING expecting a PONG to make a
# roundtrip to the server and assert that sent commands sent
# this far have been processed already.
cmd = self.connect_command()
yield self.io.write(cmd)
yield self.io.write(PING_PROTO)
# FIXME: Add readline timeout for these.
next_op = yield self.io.read_until(
_CRLF_, max_bytes=MAX_CONTROL_LINE_SIZE)
if self.options["verbose"] and OK_OP in next_op:
next_op = yield self.io.read_until(
_CRLF_, max_bytes=MAX_CONTROL_LINE_SIZE)
if ERR_OP in next_op:
err_line = next_op.decode()
_, err_msg = err_line.split(_SPC_, 1)
# FIXME: Maybe handling could be more special here,
# checking for ErrAuthorization for example.
# yield from self._process_err(err_msg)
raise NatsError("nats: " + err_msg.rstrip('\r\n'))
if PONG_PROTO in next_op:
self._status = Client.CONNECTED
self._loop.spawn_callback(self._read_loop)
self._pongs = []
self._pings_outstanding = 0
self._ping_timer = tornado.ioloop.PeriodicCallback(
self._ping_interval, self.options["ping_interval"] * 1000)
self._ping_timer.start()
# Queue and flusher for coalescing writes to the server.
self._flush_queue = tornado.queues.Queue(maxsize=1024)
self._loop.spawn_callback(self._flusher_loop) | def function[_process_connect_init, parameter[self]]:
constant[
Handles the initial part of the NATS protocol, moving from
the (RE)CONNECTING to CONNECTED states when establishing
a connection with the server.
]
variable[line] assign[=] <ast.Yield object at 0x7da1b040d390>
<ast.Tuple object at 0x7da1b040c8e0> assign[=] call[name[line].split, parameter[binary_operation[name[INFO_OP] + name[_SPC_]], constant[1]]]
name[self]._server_info assign[=] call[name[tornado].escape.json_decode, parameter[name[args]]]
if compare[constant[max_payload] in name[self]._server_info] begin[:]
name[self]._max_payload_size assign[=] call[name[self]._server_info][constant[max_payload]]
if <ast.BoolOp object at 0x7da1b0499e40> begin[:]
call[name[self]._loop.remove_handler, parameter[call[name[self]._socket.fileno, parameter[]]]]
variable[tls_opts] assign[=] dictionary[[], []]
if compare[constant[tls] in name[self].options] begin[:]
variable[tls_opts] assign[=] call[name[self].options][constant[tls]]
name[self]._socket assign[=] call[name[ssl].wrap_socket, parameter[name[self]._socket]]
name[self].io assign[=] call[name[tornado].iostream.SSLIOStream, parameter[name[self]._socket]]
call[name[self].io.set_close_callback, parameter[name[self]._process_op_err]]
call[name[self].io._do_ssl_handshake, parameter[]]
if name[self].is_reconnecting begin[:]
call[name[self]._ps.reset, parameter[]]
variable[cmd] assign[=] call[name[self].connect_command, parameter[]]
<ast.Yield object at 0x7da1b040fd00>
<ast.Yield object at 0x7da1b040e680>
variable[next_op] assign[=] <ast.Yield object at 0x7da1b040d660>
if <ast.BoolOp object at 0x7da1b040e530> begin[:]
variable[next_op] assign[=] <ast.Yield object at 0x7da1b040cd90>
if compare[name[ERR_OP] in name[next_op]] begin[:]
variable[err_line] assign[=] call[name[next_op].decode, parameter[]]
<ast.Tuple object at 0x7da1b040f3a0> assign[=] call[name[err_line].split, parameter[name[_SPC_], constant[1]]]
<ast.Raise object at 0x7da1b040c9a0>
if compare[name[PONG_PROTO] in name[next_op]] begin[:]
name[self]._status assign[=] name[Client].CONNECTED
call[name[self]._loop.spawn_callback, parameter[name[self]._read_loop]]
name[self]._pongs assign[=] list[[]]
name[self]._pings_outstanding assign[=] constant[0]
name[self]._ping_timer assign[=] call[name[tornado].ioloop.PeriodicCallback, parameter[name[self]._ping_interval, binary_operation[call[name[self].options][constant[ping_interval]] * constant[1000]]]]
call[name[self]._ping_timer.start, parameter[]]
name[self]._flush_queue assign[=] call[name[tornado].queues.Queue, parameter[]]
call[name[self]._loop.spawn_callback, parameter[name[self]._flusher_loop]] | keyword[def] identifier[_process_connect_init] ( identifier[self] ):
literal[string]
identifier[line] = keyword[yield] identifier[self] . identifier[io] . identifier[read_until] ( identifier[_CRLF_] , identifier[max_bytes] = keyword[None] )
identifier[_] , identifier[args] = identifier[line] . identifier[split] ( identifier[INFO_OP] + identifier[_SPC_] , literal[int] )
identifier[self] . identifier[_server_info] = identifier[tornado] . identifier[escape] . identifier[json_decode] (( identifier[args] ))
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_server_info] :
identifier[self] . identifier[_max_payload_size] = identifier[self] . identifier[_server_info] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_server_info] keyword[and] identifier[self] . identifier[_server_info] [ literal[string] ]:
identifier[self] . identifier[_loop] . identifier[remove_handler] ( identifier[self] . identifier[_socket] . identifier[fileno] ())
identifier[tls_opts] ={}
keyword[if] literal[string] keyword[in] identifier[self] . identifier[options] :
identifier[tls_opts] = identifier[self] . identifier[options] [ literal[string] ]
identifier[self] . identifier[_socket] = identifier[ssl] . identifier[wrap_socket] (
identifier[self] . identifier[_socket] , identifier[do_handshake_on_connect] = keyword[False] ,** identifier[tls_opts] )
identifier[self] . identifier[io] = identifier[tornado] . identifier[iostream] . identifier[SSLIOStream] ( identifier[self] . identifier[_socket] )
identifier[self] . identifier[io] . identifier[set_close_callback] ( identifier[self] . identifier[_process_op_err] )
identifier[self] . identifier[io] . identifier[_do_ssl_handshake] ()
keyword[if] identifier[self] . identifier[is_reconnecting] :
identifier[self] . identifier[_ps] . identifier[reset] ()
identifier[cmd] = identifier[self] . identifier[connect_command] ()
keyword[yield] identifier[self] . identifier[io] . identifier[write] ( identifier[cmd] )
keyword[yield] identifier[self] . identifier[io] . identifier[write] ( identifier[PING_PROTO] )
identifier[next_op] = keyword[yield] identifier[self] . identifier[io] . identifier[read_until] (
identifier[_CRLF_] , identifier[max_bytes] = identifier[MAX_CONTROL_LINE_SIZE] )
keyword[if] identifier[self] . identifier[options] [ literal[string] ] keyword[and] identifier[OK_OP] keyword[in] identifier[next_op] :
identifier[next_op] = keyword[yield] identifier[self] . identifier[io] . identifier[read_until] (
identifier[_CRLF_] , identifier[max_bytes] = identifier[MAX_CONTROL_LINE_SIZE] )
keyword[if] identifier[ERR_OP] keyword[in] identifier[next_op] :
identifier[err_line] = identifier[next_op] . identifier[decode] ()
identifier[_] , identifier[err_msg] = identifier[err_line] . identifier[split] ( identifier[_SPC_] , literal[int] )
keyword[raise] identifier[NatsError] ( literal[string] + identifier[err_msg] . identifier[rstrip] ( literal[string] ))
keyword[if] identifier[PONG_PROTO] keyword[in] identifier[next_op] :
identifier[self] . identifier[_status] = identifier[Client] . identifier[CONNECTED]
identifier[self] . identifier[_loop] . identifier[spawn_callback] ( identifier[self] . identifier[_read_loop] )
identifier[self] . identifier[_pongs] =[]
identifier[self] . identifier[_pings_outstanding] = literal[int]
identifier[self] . identifier[_ping_timer] = identifier[tornado] . identifier[ioloop] . identifier[PeriodicCallback] (
identifier[self] . identifier[_ping_interval] , identifier[self] . identifier[options] [ literal[string] ]* literal[int] )
identifier[self] . identifier[_ping_timer] . identifier[start] ()
identifier[self] . identifier[_flush_queue] = identifier[tornado] . identifier[queues] . identifier[Queue] ( identifier[maxsize] = literal[int] )
identifier[self] . identifier[_loop] . identifier[spawn_callback] ( identifier[self] . identifier[_flusher_loop] ) | def _process_connect_init(self):
"""
Handles the initial part of the NATS protocol, moving from
the (RE)CONNECTING to CONNECTED states when establishing
a connection with the server.
"""
# INFO {...}
line = (yield self.io.read_until(_CRLF_, max_bytes=None))
(_, args) = line.split(INFO_OP + _SPC_, 1)
self._server_info = tornado.escape.json_decode(args)
if 'max_payload' in self._server_info:
self._max_payload_size = self._server_info['max_payload'] # depends on [control=['if'], data=[]]
# Check whether we need to upgrade to TLS first of all
if 'tls_required' in self._server_info and self._server_info['tls_required']:
# Detach and prepare for upgrading the TLS connection.
self._loop.remove_handler(self._socket.fileno())
tls_opts = {}
if 'tls' in self.options:
# Allow customizing the TLS version though default
# to one that the server supports at least.
tls_opts = self.options['tls'] # depends on [control=['if'], data=[]]
# Rewrap using a TLS connection, can't do handshake on connect
# as the socket is non blocking.
self._socket = ssl.wrap_socket(self._socket, do_handshake_on_connect=False, **tls_opts)
# Use the TLS stream instead from now
self.io = tornado.iostream.SSLIOStream(self._socket)
self.io.set_close_callback(self._process_op_err)
self.io._do_ssl_handshake() # depends on [control=['if'], data=[]]
# Refresh state of the parser upon reconnect.
if self.is_reconnecting:
self._ps.reset() # depends on [control=['if'], data=[]]
# CONNECT then send a PING expecting a PONG to make a
# roundtrip to the server and assert that sent commands sent
# this far have been processed already.
cmd = self.connect_command()
yield self.io.write(cmd)
yield self.io.write(PING_PROTO)
# FIXME: Add readline timeout for these.
next_op = (yield self.io.read_until(_CRLF_, max_bytes=MAX_CONTROL_LINE_SIZE))
if self.options['verbose'] and OK_OP in next_op:
next_op = (yield self.io.read_until(_CRLF_, max_bytes=MAX_CONTROL_LINE_SIZE)) # depends on [control=['if'], data=[]]
if ERR_OP in next_op:
err_line = next_op.decode()
(_, err_msg) = err_line.split(_SPC_, 1)
# FIXME: Maybe handling could be more special here,
# checking for ErrAuthorization for example.
# yield from self._process_err(err_msg)
raise NatsError('nats: ' + err_msg.rstrip('\r\n')) # depends on [control=['if'], data=['next_op']]
if PONG_PROTO in next_op:
self._status = Client.CONNECTED # depends on [control=['if'], data=[]]
self._loop.spawn_callback(self._read_loop)
self._pongs = []
self._pings_outstanding = 0
self._ping_timer = tornado.ioloop.PeriodicCallback(self._ping_interval, self.options['ping_interval'] * 1000)
self._ping_timer.start()
# Queue and flusher for coalescing writes to the server.
self._flush_queue = tornado.queues.Queue(maxsize=1024)
self._loop.spawn_callback(self._flusher_loop) |
def get_signum(val, err, max_sig=numpy.inf):
"""
Given an error, returns a string for val formated to the appropriate
number of significant figures.
"""
coeff, pwr = ('%e' % err).split('e')
if pwr.startswith('-'):
pwr = int(pwr[1:])
if round(float(coeff)) == 10.:
pwr -= 1
pwr = min(pwr, max_sig)
tmplt = '%.' + str(pwr+1) + 'f'
return tmplt % val
else:
pwr = int(pwr[1:])
if round(float(coeff)) == 10.:
pwr += 1
# if the error is large, we can sometimes get 0;
# adjust the round until we don't get 0 (assuming the actual
# value isn't 0)
return_val = round(val, -pwr+1)
if val != 0.:
loop_count = 0
max_recursion = 100
while return_val == 0.:
pwr -= 1
return_val = round(val, -pwr+1)
loop_count += 1
if loop_count > max_recursion:
raise ValueError("Maximum recursion depth hit! Input " +\
"values are: val = %f, err = %f" %(val, err))
return drop_trailing_zeros(return_val) | def function[get_signum, parameter[val, err, max_sig]]:
constant[
Given an error, returns a string for val formated to the appropriate
number of significant figures.
]
<ast.Tuple object at 0x7da204962c50> assign[=] call[binary_operation[constant[%e] <ast.Mod object at 0x7da2590d6920> name[err]].split, parameter[constant[e]]]
if call[name[pwr].startswith, parameter[constant[-]]] begin[:]
variable[pwr] assign[=] call[name[int], parameter[call[name[pwr]][<ast.Slice object at 0x7da204963130>]]]
if compare[call[name[round], parameter[call[name[float], parameter[name[coeff]]]]] equal[==] constant[10.0]] begin[:]
<ast.AugAssign object at 0x7da204963460>
variable[pwr] assign[=] call[name[min], parameter[name[pwr], name[max_sig]]]
variable[tmplt] assign[=] binary_operation[binary_operation[constant[%.] + call[name[str], parameter[binary_operation[name[pwr] + constant[1]]]]] + constant[f]]
return[binary_operation[name[tmplt] <ast.Mod object at 0x7da2590d6920> name[val]]] | keyword[def] identifier[get_signum] ( identifier[val] , identifier[err] , identifier[max_sig] = identifier[numpy] . identifier[inf] ):
literal[string]
identifier[coeff] , identifier[pwr] =( literal[string] % identifier[err] ). identifier[split] ( literal[string] )
keyword[if] identifier[pwr] . identifier[startswith] ( literal[string] ):
identifier[pwr] = identifier[int] ( identifier[pwr] [ literal[int] :])
keyword[if] identifier[round] ( identifier[float] ( identifier[coeff] ))== literal[int] :
identifier[pwr] -= literal[int]
identifier[pwr] = identifier[min] ( identifier[pwr] , identifier[max_sig] )
identifier[tmplt] = literal[string] + identifier[str] ( identifier[pwr] + literal[int] )+ literal[string]
keyword[return] identifier[tmplt] % identifier[val]
keyword[else] :
identifier[pwr] = identifier[int] ( identifier[pwr] [ literal[int] :])
keyword[if] identifier[round] ( identifier[float] ( identifier[coeff] ))== literal[int] :
identifier[pwr] += literal[int]
identifier[return_val] = identifier[round] ( identifier[val] ,- identifier[pwr] + literal[int] )
keyword[if] identifier[val] != literal[int] :
identifier[loop_count] = literal[int]
identifier[max_recursion] = literal[int]
keyword[while] identifier[return_val] == literal[int] :
identifier[pwr] -= literal[int]
identifier[return_val] = identifier[round] ( identifier[val] ,- identifier[pwr] + literal[int] )
identifier[loop_count] += literal[int]
keyword[if] identifier[loop_count] > identifier[max_recursion] :
keyword[raise] identifier[ValueError] ( literal[string] + literal[string] %( identifier[val] , identifier[err] ))
keyword[return] identifier[drop_trailing_zeros] ( identifier[return_val] ) | def get_signum(val, err, max_sig=numpy.inf):
"""
Given an error, returns a string for val formated to the appropriate
number of significant figures.
"""
(coeff, pwr) = ('%e' % err).split('e')
if pwr.startswith('-'):
pwr = int(pwr[1:])
if round(float(coeff)) == 10.0:
pwr -= 1 # depends on [control=['if'], data=[]]
pwr = min(pwr, max_sig)
tmplt = '%.' + str(pwr + 1) + 'f'
return tmplt % val # depends on [control=['if'], data=[]]
else:
pwr = int(pwr[1:])
if round(float(coeff)) == 10.0:
pwr += 1 # depends on [control=['if'], data=[]]
# if the error is large, we can sometimes get 0;
# adjust the round until we don't get 0 (assuming the actual
# value isn't 0)
return_val = round(val, -pwr + 1)
if val != 0.0:
loop_count = 0
max_recursion = 100
while return_val == 0.0:
pwr -= 1
return_val = round(val, -pwr + 1)
loop_count += 1
if loop_count > max_recursion:
raise ValueError('Maximum recursion depth hit! Input ' + 'values are: val = %f, err = %f' % (val, err)) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['return_val']] # depends on [control=['if'], data=['val']]
return drop_trailing_zeros(return_val) |
def register(self, app, options, first_registration=False):
"""Called by :meth:`Flask.register_blueprint` to register a blueprint
on the application. This can be overridden to customize the register
behavior. Keyword arguments from
:func:`~flask.Flask.register_blueprint` are directly forwarded to this
method in the `options` dictionary.
"""
self.jsonrpc_site = options.get('jsonrpc_site')
self._got_registered_once = True
state = self.make_setup_state(app, options, first_registration)
if self.has_static_folder and \
not self.name + '.static' in state.app.view_functions.keys():
state.add_url_rule(self.static_url_path + '/<path:filename>',
view_func=self.send_static_file,
endpoint='static')
for deferred in self.deferred_functions:
deferred(state) | def function[register, parameter[self, app, options, first_registration]]:
constant[Called by :meth:`Flask.register_blueprint` to register a blueprint
on the application. This can be overridden to customize the register
behavior. Keyword arguments from
:func:`~flask.Flask.register_blueprint` are directly forwarded to this
method in the `options` dictionary.
]
name[self].jsonrpc_site assign[=] call[name[options].get, parameter[constant[jsonrpc_site]]]
name[self]._got_registered_once assign[=] constant[True]
variable[state] assign[=] call[name[self].make_setup_state, parameter[name[app], name[options], name[first_registration]]]
if <ast.BoolOp object at 0x7da1b009f370> begin[:]
call[name[state].add_url_rule, parameter[binary_operation[name[self].static_url_path + constant[/<path:filename>]]]]
for taget[name[deferred]] in starred[name[self].deferred_functions] begin[:]
call[name[deferred], parameter[name[state]]] | keyword[def] identifier[register] ( identifier[self] , identifier[app] , identifier[options] , identifier[first_registration] = keyword[False] ):
literal[string]
identifier[self] . identifier[jsonrpc_site] = identifier[options] . identifier[get] ( literal[string] )
identifier[self] . identifier[_got_registered_once] = keyword[True]
identifier[state] = identifier[self] . identifier[make_setup_state] ( identifier[app] , identifier[options] , identifier[first_registration] )
keyword[if] identifier[self] . identifier[has_static_folder] keyword[and] keyword[not] identifier[self] . identifier[name] + literal[string] keyword[in] identifier[state] . identifier[app] . identifier[view_functions] . identifier[keys] ():
identifier[state] . identifier[add_url_rule] ( identifier[self] . identifier[static_url_path] + literal[string] ,
identifier[view_func] = identifier[self] . identifier[send_static_file] ,
identifier[endpoint] = literal[string] )
keyword[for] identifier[deferred] keyword[in] identifier[self] . identifier[deferred_functions] :
identifier[deferred] ( identifier[state] ) | def register(self, app, options, first_registration=False):
"""Called by :meth:`Flask.register_blueprint` to register a blueprint
on the application. This can be overridden to customize the register
behavior. Keyword arguments from
:func:`~flask.Flask.register_blueprint` are directly forwarded to this
method in the `options` dictionary.
"""
self.jsonrpc_site = options.get('jsonrpc_site')
self._got_registered_once = True
state = self.make_setup_state(app, options, first_registration)
if self.has_static_folder and (not self.name + '.static' in state.app.view_functions.keys()):
state.add_url_rule(self.static_url_path + '/<path:filename>', view_func=self.send_static_file, endpoint='static') # depends on [control=['if'], data=[]]
for deferred in self.deferred_functions:
deferred(state) # depends on [control=['for'], data=['deferred']] |
def use_plenary_resource_view(self):
"""Pass through to provider ResourceLookupSession.use_plenary_resource_view"""
self._object_views['resource'] = PLENARY
# self._get_provider_session('resource_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_plenary_resource_view()
except AttributeError:
pass | def function[use_plenary_resource_view, parameter[self]]:
constant[Pass through to provider ResourceLookupSession.use_plenary_resource_view]
call[name[self]._object_views][constant[resource]] assign[=] name[PLENARY]
for taget[name[session]] in starred[call[name[self]._get_provider_sessions, parameter[]]] begin[:]
<ast.Try object at 0x7da1b0a650c0> | keyword[def] identifier[use_plenary_resource_view] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_object_views] [ literal[string] ]= identifier[PLENARY]
keyword[for] identifier[session] keyword[in] identifier[self] . identifier[_get_provider_sessions] ():
keyword[try] :
identifier[session] . identifier[use_plenary_resource_view] ()
keyword[except] identifier[AttributeError] :
keyword[pass] | def use_plenary_resource_view(self):
"""Pass through to provider ResourceLookupSession.use_plenary_resource_view"""
self._object_views['resource'] = PLENARY
# self._get_provider_session('resource_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_plenary_resource_view() # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['session']] |
def d2Sbr_dV2(self, Cbr, Ybr, V, lam):
""" Based on d2Sbr_dV2.m from MATPOWER by Ray Zimmerman, developed
at PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for
more information.
@rtype: tuple
@return: The 2nd derivatives of complex power flow w.r.t. voltage.
"""
nb = len(V)
nl = len(lam)
ib = range(nb)
il = range(nl)
diaglam = csr_matrix((lam, (il, il)))
diagV = csr_matrix((V, (ib, ib)))
A = Ybr.H * diaglam * Cbr
B = conj(diagV) * A * diagV
D = csr_matrix( ((A * V) * conj(V), (ib, ib)) )
E = csr_matrix( ((A.T * conj(V) * V), (ib, ib)) )
F = B + B.T
G = csr_matrix((ones(nb) / abs(V), (ib, ib)))
Haa = F - D - E
Hva = 1j * G * (B - B.T - D + E)
Hav = Hva.T
Hvv = G * F * G
return Haa, Hav, Hva, Hvv | def function[d2Sbr_dV2, parameter[self, Cbr, Ybr, V, lam]]:
constant[ Based on d2Sbr_dV2.m from MATPOWER by Ray Zimmerman, developed
at PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for
more information.
@rtype: tuple
@return: The 2nd derivatives of complex power flow w.r.t. voltage.
]
variable[nb] assign[=] call[name[len], parameter[name[V]]]
variable[nl] assign[=] call[name[len], parameter[name[lam]]]
variable[ib] assign[=] call[name[range], parameter[name[nb]]]
variable[il] assign[=] call[name[range], parameter[name[nl]]]
variable[diaglam] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.Name object at 0x7da1b25d32e0>, <ast.Tuple object at 0x7da1b25d1120>]]]]
variable[diagV] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.Name object at 0x7da1b25d37c0>, <ast.Tuple object at 0x7da1b25d3220>]]]]
variable[A] assign[=] binary_operation[binary_operation[name[Ybr].H * name[diaglam]] * name[Cbr]]
variable[B] assign[=] binary_operation[binary_operation[call[name[conj], parameter[name[diagV]]] * name[A]] * name[diagV]]
variable[D] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da1b25d0f40>, <ast.Tuple object at 0x7da1b25d2aa0>]]]]
variable[E] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da1b25d0cd0>, <ast.Tuple object at 0x7da1b25d2a40>]]]]
variable[F] assign[=] binary_operation[name[B] + name[B].T]
variable[G] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da18dc993c0>, <ast.Tuple object at 0x7da18dc98430>]]]]
variable[Haa] assign[=] binary_operation[binary_operation[name[F] - name[D]] - name[E]]
variable[Hva] assign[=] binary_operation[binary_operation[constant[1j] * name[G]] * binary_operation[binary_operation[binary_operation[name[B] - name[B].T] - name[D]] + name[E]]]
variable[Hav] assign[=] name[Hva].T
variable[Hvv] assign[=] binary_operation[binary_operation[name[G] * name[F]] * name[G]]
return[tuple[[<ast.Name object at 0x7da18dc9a590>, <ast.Name object at 0x7da18dc9b0d0>, <ast.Name object at 0x7da18dc99930>, <ast.Name object at 0x7da18dc99c30>]]] | keyword[def] identifier[d2Sbr_dV2] ( identifier[self] , identifier[Cbr] , identifier[Ybr] , identifier[V] , identifier[lam] ):
literal[string]
identifier[nb] = identifier[len] ( identifier[V] )
identifier[nl] = identifier[len] ( identifier[lam] )
identifier[ib] = identifier[range] ( identifier[nb] )
identifier[il] = identifier[range] ( identifier[nl] )
identifier[diaglam] = identifier[csr_matrix] (( identifier[lam] ,( identifier[il] , identifier[il] )))
identifier[diagV] = identifier[csr_matrix] (( identifier[V] ,( identifier[ib] , identifier[ib] )))
identifier[A] = identifier[Ybr] . identifier[H] * identifier[diaglam] * identifier[Cbr]
identifier[B] = identifier[conj] ( identifier[diagV] )* identifier[A] * identifier[diagV]
identifier[D] = identifier[csr_matrix] ((( identifier[A] * identifier[V] )* identifier[conj] ( identifier[V] ),( identifier[ib] , identifier[ib] )))
identifier[E] = identifier[csr_matrix] ((( identifier[A] . identifier[T] * identifier[conj] ( identifier[V] )* identifier[V] ),( identifier[ib] , identifier[ib] )))
identifier[F] = identifier[B] + identifier[B] . identifier[T]
identifier[G] = identifier[csr_matrix] (( identifier[ones] ( identifier[nb] )/ identifier[abs] ( identifier[V] ),( identifier[ib] , identifier[ib] )))
identifier[Haa] = identifier[F] - identifier[D] - identifier[E]
identifier[Hva] = literal[int] * identifier[G] *( identifier[B] - identifier[B] . identifier[T] - identifier[D] + identifier[E] )
identifier[Hav] = identifier[Hva] . identifier[T]
identifier[Hvv] = identifier[G] * identifier[F] * identifier[G]
keyword[return] identifier[Haa] , identifier[Hav] , identifier[Hva] , identifier[Hvv] | def d2Sbr_dV2(self, Cbr, Ybr, V, lam):
""" Based on d2Sbr_dV2.m from MATPOWER by Ray Zimmerman, developed
at PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for
more information.
@rtype: tuple
@return: The 2nd derivatives of complex power flow w.r.t. voltage.
"""
nb = len(V)
nl = len(lam)
ib = range(nb)
il = range(nl)
diaglam = csr_matrix((lam, (il, il)))
diagV = csr_matrix((V, (ib, ib)))
A = Ybr.H * diaglam * Cbr
B = conj(diagV) * A * diagV
D = csr_matrix((A * V * conj(V), (ib, ib)))
E = csr_matrix((A.T * conj(V) * V, (ib, ib)))
F = B + B.T
G = csr_matrix((ones(nb) / abs(V), (ib, ib)))
Haa = F - D - E
Hva = 1j * G * (B - B.T - D + E)
Hav = Hva.T
Hvv = G * F * G
return (Haa, Hav, Hva, Hvv) |
def set_install_name(filename, oldname, newname):
""" Set install name `oldname` to `newname` in library filename
Parameters
----------
filename : str
filename of library
oldname : str
current install name in library
newname : str
replacement name for `oldname`
"""
names = get_install_names(filename)
if oldname not in names:
raise InstallNameError('{0} not in install names for {1}'.format(
oldname, filename))
back_tick(['install_name_tool', '-change', oldname, newname, filename]) | def function[set_install_name, parameter[filename, oldname, newname]]:
constant[ Set install name `oldname` to `newname` in library filename
Parameters
----------
filename : str
filename of library
oldname : str
current install name in library
newname : str
replacement name for `oldname`
]
variable[names] assign[=] call[name[get_install_names], parameter[name[filename]]]
if compare[name[oldname] <ast.NotIn object at 0x7da2590d7190> name[names]] begin[:]
<ast.Raise object at 0x7da18f58d6c0>
call[name[back_tick], parameter[list[[<ast.Constant object at 0x7da18f58c220>, <ast.Constant object at 0x7da18f58cfd0>, <ast.Name object at 0x7da18f58c6d0>, <ast.Name object at 0x7da18f58f8b0>, <ast.Name object at 0x7da18f58f0a0>]]]] | keyword[def] identifier[set_install_name] ( identifier[filename] , identifier[oldname] , identifier[newname] ):
literal[string]
identifier[names] = identifier[get_install_names] ( identifier[filename] )
keyword[if] identifier[oldname] keyword[not] keyword[in] identifier[names] :
keyword[raise] identifier[InstallNameError] ( literal[string] . identifier[format] (
identifier[oldname] , identifier[filename] ))
identifier[back_tick] ([ literal[string] , literal[string] , identifier[oldname] , identifier[newname] , identifier[filename] ]) | def set_install_name(filename, oldname, newname):
""" Set install name `oldname` to `newname` in library filename
Parameters
----------
filename : str
filename of library
oldname : str
current install name in library
newname : str
replacement name for `oldname`
"""
names = get_install_names(filename)
if oldname not in names:
raise InstallNameError('{0} not in install names for {1}'.format(oldname, filename)) # depends on [control=['if'], data=['oldname']]
back_tick(['install_name_tool', '-change', oldname, newname, filename]) |
def list_tab(user):
'''
Return the contents of the specified user's incrontab
CLI Example:
.. code-block:: bash
salt '*' incron.list_tab root
'''
if user == 'system':
data = raw_system_incron()
else:
data = raw_incron(user)
log.debug('incron user data %s', data)
ret = {'crons': [],
'pre': []
}
flag = False
comment = None
tag = '# Line managed by Salt, do not edit'
for line in data.splitlines():
if line.endswith(tag):
if len(line.split()) > 3:
# Appears to be a standard incron line
comps = line.split()
path = comps[0]
mask = comps[1]
(cmd, comment) = ' '.join(comps[2:]).split(' # ')
dat = {'path': path,
'mask': mask,
'cmd': cmd,
'comment': comment}
ret['crons'].append(dat)
comment = None
else:
ret['pre'].append(line)
return ret | def function[list_tab, parameter[user]]:
constant[
Return the contents of the specified user's incrontab
CLI Example:
.. code-block:: bash
salt '*' incron.list_tab root
]
if compare[name[user] equal[==] constant[system]] begin[:]
variable[data] assign[=] call[name[raw_system_incron], parameter[]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b26afa00>, <ast.Constant object at 0x7da1b26ae3e0>], [<ast.List object at 0x7da1b26adbd0>, <ast.List object at 0x7da1b26aff70>]]
variable[flag] assign[=] constant[False]
variable[comment] assign[=] constant[None]
variable[tag] assign[=] constant[# Line managed by Salt, do not edit]
for taget[name[line]] in starred[call[name[data].splitlines, parameter[]]] begin[:]
if call[name[line].endswith, parameter[name[tag]]] begin[:]
if compare[call[name[len], parameter[call[name[line].split, parameter[]]]] greater[>] constant[3]] begin[:]
variable[comps] assign[=] call[name[line].split, parameter[]]
variable[path] assign[=] call[name[comps]][constant[0]]
variable[mask] assign[=] call[name[comps]][constant[1]]
<ast.Tuple object at 0x7da1b26ac670> assign[=] call[call[constant[ ].join, parameter[call[name[comps]][<ast.Slice object at 0x7da1b26ad330>]]].split, parameter[constant[ # ]]]
variable[dat] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ac1f0>, <ast.Constant object at 0x7da1b26aeec0>, <ast.Constant object at 0x7da1b26adea0>, <ast.Constant object at 0x7da1b26af6d0>], [<ast.Name object at 0x7da1b26ac9a0>, <ast.Name object at 0x7da1b26ad3c0>, <ast.Name object at 0x7da1b26ad5a0>, <ast.Name object at 0x7da1b26ac040>]]
call[call[name[ret]][constant[crons]].append, parameter[name[dat]]]
variable[comment] assign[=] constant[None]
return[name[ret]] | keyword[def] identifier[list_tab] ( identifier[user] ):
literal[string]
keyword[if] identifier[user] == literal[string] :
identifier[data] = identifier[raw_system_incron] ()
keyword[else] :
identifier[data] = identifier[raw_incron] ( identifier[user] )
identifier[log] . identifier[debug] ( literal[string] , identifier[data] )
identifier[ret] ={ literal[string] :[],
literal[string] :[]
}
identifier[flag] = keyword[False]
identifier[comment] = keyword[None]
identifier[tag] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[data] . identifier[splitlines] ():
keyword[if] identifier[line] . identifier[endswith] ( identifier[tag] ):
keyword[if] identifier[len] ( identifier[line] . identifier[split] ())> literal[int] :
identifier[comps] = identifier[line] . identifier[split] ()
identifier[path] = identifier[comps] [ literal[int] ]
identifier[mask] = identifier[comps] [ literal[int] ]
( identifier[cmd] , identifier[comment] )= literal[string] . identifier[join] ( identifier[comps] [ literal[int] :]). identifier[split] ( literal[string] )
identifier[dat] ={ literal[string] : identifier[path] ,
literal[string] : identifier[mask] ,
literal[string] : identifier[cmd] ,
literal[string] : identifier[comment] }
identifier[ret] [ literal[string] ]. identifier[append] ( identifier[dat] )
identifier[comment] = keyword[None]
keyword[else] :
identifier[ret] [ literal[string] ]. identifier[append] ( identifier[line] )
keyword[return] identifier[ret] | def list_tab(user):
"""
Return the contents of the specified user's incrontab
CLI Example:
.. code-block:: bash
salt '*' incron.list_tab root
"""
if user == 'system':
data = raw_system_incron() # depends on [control=['if'], data=[]]
else:
data = raw_incron(user)
log.debug('incron user data %s', data)
ret = {'crons': [], 'pre': []}
flag = False
comment = None
tag = '# Line managed by Salt, do not edit'
for line in data.splitlines():
if line.endswith(tag):
if len(line.split()) > 3:
# Appears to be a standard incron line
comps = line.split()
path = comps[0]
mask = comps[1]
(cmd, comment) = ' '.join(comps[2:]).split(' # ')
dat = {'path': path, 'mask': mask, 'cmd': cmd, 'comment': comment}
ret['crons'].append(dat)
comment = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
ret['pre'].append(line) # depends on [control=['for'], data=['line']]
return ret |
def igmp_snooping_ip_pim_snooping_pimv4_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
igmp_snooping = ET.SubElement(config, "igmp-snooping", xmlns="urn:brocade.com:mgmt:brocade-igmp-snooping")
ip = ET.SubElement(igmp_snooping, "ip")
pim = ET.SubElement(ip, "pim")
snooping = ET.SubElement(pim, "snooping")
pimv4_enable = ET.SubElement(snooping, "pimv4-enable")
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[igmp_snooping_ip_pim_snooping_pimv4_enable, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[igmp_snooping] assign[=] call[name[ET].SubElement, parameter[name[config], constant[igmp-snooping]]]
variable[ip] assign[=] call[name[ET].SubElement, parameter[name[igmp_snooping], constant[ip]]]
variable[pim] assign[=] call[name[ET].SubElement, parameter[name[ip], constant[pim]]]
variable[snooping] assign[=] call[name[ET].SubElement, parameter[name[pim], constant[snooping]]]
variable[pimv4_enable] assign[=] call[name[ET].SubElement, parameter[name[snooping], constant[pimv4-enable]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[igmp_snooping_ip_pim_snooping_pimv4_enable] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[igmp_snooping] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[igmp_snooping] , literal[string] )
identifier[pim] = identifier[ET] . identifier[SubElement] ( identifier[ip] , literal[string] )
identifier[snooping] = identifier[ET] . identifier[SubElement] ( identifier[pim] , literal[string] )
identifier[pimv4_enable] = identifier[ET] . identifier[SubElement] ( identifier[snooping] , literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def igmp_snooping_ip_pim_snooping_pimv4_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
igmp_snooping = ET.SubElement(config, 'igmp-snooping', xmlns='urn:brocade.com:mgmt:brocade-igmp-snooping')
ip = ET.SubElement(igmp_snooping, 'ip')
pim = ET.SubElement(ip, 'pim')
snooping = ET.SubElement(pim, 'snooping')
pimv4_enable = ET.SubElement(snooping, 'pimv4-enable')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def convert_pnm(self, infile, outfile):
"""
Convert a PNM file containing raw pixel data into a PNG file
with the parameters set in the writer object. Works for
(binary) PGM, PPM, and PAM formats.
"""
if self.interlace:
pixels = array('B')
pixels.fromfile(infile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.file_scanlines(infile)) | def function[convert_pnm, parameter[self, infile, outfile]]:
constant[
Convert a PNM file containing raw pixel data into a PNG file
with the parameters set in the writer object. Works for
(binary) PGM, PPM, and PAM formats.
]
if name[self].interlace begin[:]
variable[pixels] assign[=] call[name[array], parameter[constant[B]]]
call[name[pixels].fromfile, parameter[name[infile], binary_operation[binary_operation[binary_operation[binary_operation[name[self].bitdepth / constant[8]] * name[self].color_planes] * name[self].width] * name[self].height]]]
call[name[self].write_passes, parameter[name[outfile], call[name[self].array_scanlines_interlace, parameter[name[pixels]]]]] | keyword[def] identifier[convert_pnm] ( identifier[self] , identifier[infile] , identifier[outfile] ):
literal[string]
keyword[if] identifier[self] . identifier[interlace] :
identifier[pixels] = identifier[array] ( literal[string] )
identifier[pixels] . identifier[fromfile] ( identifier[infile] ,
( identifier[self] . identifier[bitdepth] / literal[int] )* identifier[self] . identifier[color_planes] *
identifier[self] . identifier[width] * identifier[self] . identifier[height] )
identifier[self] . identifier[write_passes] ( identifier[outfile] , identifier[self] . identifier[array_scanlines_interlace] ( identifier[pixels] ))
keyword[else] :
identifier[self] . identifier[write_passes] ( identifier[outfile] , identifier[self] . identifier[file_scanlines] ( identifier[infile] )) | def convert_pnm(self, infile, outfile):
"""
Convert a PNM file containing raw pixel data into a PNG file
with the parameters set in the writer object. Works for
(binary) PGM, PPM, and PAM formats.
"""
if self.interlace:
pixels = array('B')
pixels.fromfile(infile, self.bitdepth / 8 * self.color_planes * self.width * self.height)
self.write_passes(outfile, self.array_scanlines_interlace(pixels)) # depends on [control=['if'], data=[]]
else:
self.write_passes(outfile, self.file_scanlines(infile)) |
def _apply(self, method, *args, **kwargs):
"""Create a new representation with ``method`` applied to the arrays.
In typical usage, the method is any of the shape-changing methods for
`~numpy.ndarray` (``reshape``, ``swapaxes``, etc.), as well as those
picking particular elements (``__getitem__``, ``take``, etc.), which
are all defined in `~astropy.utils.misc.ShapedLikeNDArray`. It will be
applied to the underlying arrays (e.g., ``x``, ``y``, and ``z`` for
`~astropy.coordinates.CartesianRepresentation`), with the results used
to create a new instance.
Internally, it is also used to apply functions to the components
(in particular, `~numpy.broadcast_to`).
Parameters
----------
method : str or callable
If str, it is the name of a method that is applied to the internal
``components``. If callable, the function is applied.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``.
"""
if callable(method):
apply_method = lambda array: method(array, *args, **kwargs)
else:
apply_method = operator.methodcaller(method, *args, **kwargs)
return self.__class__([apply_method(getattr(self, component))
for component in self.components], copy=False) | def function[_apply, parameter[self, method]]:
constant[Create a new representation with ``method`` applied to the arrays.
In typical usage, the method is any of the shape-changing methods for
`~numpy.ndarray` (``reshape``, ``swapaxes``, etc.), as well as those
picking particular elements (``__getitem__``, ``take``, etc.), which
are all defined in `~astropy.utils.misc.ShapedLikeNDArray`. It will be
applied to the underlying arrays (e.g., ``x``, ``y``, and ``z`` for
`~astropy.coordinates.CartesianRepresentation`), with the results used
to create a new instance.
Internally, it is also used to apply functions to the components
(in particular, `~numpy.broadcast_to`).
Parameters
----------
method : str or callable
If str, it is the name of a method that is applied to the internal
``components``. If callable, the function is applied.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``.
]
if call[name[callable], parameter[name[method]]] begin[:]
variable[apply_method] assign[=] <ast.Lambda object at 0x7da1b0e59600>
return[call[name[self].__class__, parameter[<ast.ListComp object at 0x7da1b0e59c00>]]] | keyword[def] identifier[_apply] ( identifier[self] , identifier[method] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[callable] ( identifier[method] ):
identifier[apply_method] = keyword[lambda] identifier[array] : identifier[method] ( identifier[array] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[apply_method] = identifier[operator] . identifier[methodcaller] ( identifier[method] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[self] . identifier[__class__] ([ identifier[apply_method] ( identifier[getattr] ( identifier[self] , identifier[component] ))
keyword[for] identifier[component] keyword[in] identifier[self] . identifier[components] ], identifier[copy] = keyword[False] ) | def _apply(self, method, *args, **kwargs):
"""Create a new representation with ``method`` applied to the arrays.
In typical usage, the method is any of the shape-changing methods for
`~numpy.ndarray` (``reshape``, ``swapaxes``, etc.), as well as those
picking particular elements (``__getitem__``, ``take``, etc.), which
are all defined in `~astropy.utils.misc.ShapedLikeNDArray`. It will be
applied to the underlying arrays (e.g., ``x``, ``y``, and ``z`` for
`~astropy.coordinates.CartesianRepresentation`), with the results used
to create a new instance.
Internally, it is also used to apply functions to the components
(in particular, `~numpy.broadcast_to`).
Parameters
----------
method : str or callable
If str, it is the name of a method that is applied to the internal
``components``. If callable, the function is applied.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``.
"""
if callable(method):
apply_method = lambda array: method(array, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
apply_method = operator.methodcaller(method, *args, **kwargs)
return self.__class__([apply_method(getattr(self, component)) for component in self.components], copy=False) |
def find_resource_ids(self):
"""Given a resource path and API Id, find resource Id."""
all_resources = self.client.get_resources(restApiId=self.api_id)
parent_id = None
resource_id = None
for resource in all_resources['items']:
if resource['path'] == "/":
parent_id = resource['id']
if resource['path'] == self.trigger_settings['resource']:
resource_id = resource['id']
self.log.info("Found Resource ID for: %s", resource['path'])
return resource_id, parent_id | def function[find_resource_ids, parameter[self]]:
constant[Given a resource path and API Id, find resource Id.]
variable[all_resources] assign[=] call[name[self].client.get_resources, parameter[]]
variable[parent_id] assign[=] constant[None]
variable[resource_id] assign[=] constant[None]
for taget[name[resource]] in starred[call[name[all_resources]][constant[items]]] begin[:]
if compare[call[name[resource]][constant[path]] equal[==] constant[/]] begin[:]
variable[parent_id] assign[=] call[name[resource]][constant[id]]
if compare[call[name[resource]][constant[path]] equal[==] call[name[self].trigger_settings][constant[resource]]] begin[:]
variable[resource_id] assign[=] call[name[resource]][constant[id]]
call[name[self].log.info, parameter[constant[Found Resource ID for: %s], call[name[resource]][constant[path]]]]
return[tuple[[<ast.Name object at 0x7da20c9937c0>, <ast.Name object at 0x7da20c991fc0>]]] | keyword[def] identifier[find_resource_ids] ( identifier[self] ):
literal[string]
identifier[all_resources] = identifier[self] . identifier[client] . identifier[get_resources] ( identifier[restApiId] = identifier[self] . identifier[api_id] )
identifier[parent_id] = keyword[None]
identifier[resource_id] = keyword[None]
keyword[for] identifier[resource] keyword[in] identifier[all_resources] [ literal[string] ]:
keyword[if] identifier[resource] [ literal[string] ]== literal[string] :
identifier[parent_id] = identifier[resource] [ literal[string] ]
keyword[if] identifier[resource] [ literal[string] ]== identifier[self] . identifier[trigger_settings] [ literal[string] ]:
identifier[resource_id] = identifier[resource] [ literal[string] ]
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[resource] [ literal[string] ])
keyword[return] identifier[resource_id] , identifier[parent_id] | def find_resource_ids(self):
"""Given a resource path and API Id, find resource Id."""
all_resources = self.client.get_resources(restApiId=self.api_id)
parent_id = None
resource_id = None
for resource in all_resources['items']:
if resource['path'] == '/':
parent_id = resource['id'] # depends on [control=['if'], data=[]]
if resource['path'] == self.trigger_settings['resource']:
resource_id = resource['id']
self.log.info('Found Resource ID for: %s', resource['path']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['resource']]
return (resource_id, parent_id) |
def get_template_context(src, container="div", classes="", inner_classes="", alt="", background_image=False, no_css=False, aria_hidden=False):
"""Returns a template context for a flexible image template
tag implementation."""
context = {
"container": container,
"classes": classes,
"aspect_padding_bottom": aspect_ratio_percent(src),
"alt": alt,
"background_image": background_image,
"no_css": no_css,
"inner_classes": inner_classes,
"aria_hidden": aria_hidden,
}
# We can't do any of the srcset (or JS switching fallback) if we don't
# have a thumbnail library installed.
if not get_thumbnail_engine():
context["image"] = src
return context
sizes = get_image_sizes(src)
context["image_sizes"] = sizes
# Set the first image in the list as the one to be rendered initially
# (pre-JS-fallback). `if sizes` might not be a necessary check...
context["image"] = sizes[0]
context["image_sizes_json"] = json.dumps(sizes)
srcset_items = ["{} {}w".format(size["url"], size["width"]) for size in sizes]
context["image_sizes_srcset"] = ", ".join(srcset_items)
return context | def function[get_template_context, parameter[src, container, classes, inner_classes, alt, background_image, no_css, aria_hidden]]:
constant[Returns a template context for a flexible image template
tag implementation.]
variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da1b0c494b0>, <ast.Constant object at 0x7da1b0c49480>, <ast.Constant object at 0x7da1b0c48640>, <ast.Constant object at 0x7da1b0c48610>, <ast.Constant object at 0x7da1b0c48310>, <ast.Constant object at 0x7da1b0c480a0>, <ast.Constant object at 0x7da1b0c49360>, <ast.Constant object at 0x7da1b0c49c00>], [<ast.Name object at 0x7da1b0c49420>, <ast.Name object at 0x7da1b0c489a0>, <ast.Call object at 0x7da1b0c48dc0>, <ast.Name object at 0x7da1b0c48400>, <ast.Name object at 0x7da1b0c495a0>, <ast.Name object at 0x7da1b0c481f0>, <ast.Name object at 0x7da1b0c492a0>, <ast.Name object at 0x7da1b0c493f0>]]
if <ast.UnaryOp object at 0x7da1b0c489d0> begin[:]
call[name[context]][constant[image]] assign[=] name[src]
return[name[context]]
variable[sizes] assign[=] call[name[get_image_sizes], parameter[name[src]]]
call[name[context]][constant[image_sizes]] assign[=] name[sizes]
call[name[context]][constant[image]] assign[=] call[name[sizes]][constant[0]]
call[name[context]][constant[image_sizes_json]] assign[=] call[name[json].dumps, parameter[name[sizes]]]
variable[srcset_items] assign[=] <ast.ListComp object at 0x7da20c9930a0>
call[name[context]][constant[image_sizes_srcset]] assign[=] call[constant[, ].join, parameter[name[srcset_items]]]
return[name[context]] | keyword[def] identifier[get_template_context] ( identifier[src] , identifier[container] = literal[string] , identifier[classes] = literal[string] , identifier[inner_classes] = literal[string] , identifier[alt] = literal[string] , identifier[background_image] = keyword[False] , identifier[no_css] = keyword[False] , identifier[aria_hidden] = keyword[False] ):
literal[string]
identifier[context] ={
literal[string] : identifier[container] ,
literal[string] : identifier[classes] ,
literal[string] : identifier[aspect_ratio_percent] ( identifier[src] ),
literal[string] : identifier[alt] ,
literal[string] : identifier[background_image] ,
literal[string] : identifier[no_css] ,
literal[string] : identifier[inner_classes] ,
literal[string] : identifier[aria_hidden] ,
}
keyword[if] keyword[not] identifier[get_thumbnail_engine] ():
identifier[context] [ literal[string] ]= identifier[src]
keyword[return] identifier[context]
identifier[sizes] = identifier[get_image_sizes] ( identifier[src] )
identifier[context] [ literal[string] ]= identifier[sizes]
identifier[context] [ literal[string] ]= identifier[sizes] [ literal[int] ]
identifier[context] [ literal[string] ]= identifier[json] . identifier[dumps] ( identifier[sizes] )
identifier[srcset_items] =[ literal[string] . identifier[format] ( identifier[size] [ literal[string] ], identifier[size] [ literal[string] ]) keyword[for] identifier[size] keyword[in] identifier[sizes] ]
identifier[context] [ literal[string] ]= literal[string] . identifier[join] ( identifier[srcset_items] )
keyword[return] identifier[context] | def get_template_context(src, container='div', classes='', inner_classes='', alt='', background_image=False, no_css=False, aria_hidden=False):
"""Returns a template context for a flexible image template
tag implementation."""
context = {'container': container, 'classes': classes, 'aspect_padding_bottom': aspect_ratio_percent(src), 'alt': alt, 'background_image': background_image, 'no_css': no_css, 'inner_classes': inner_classes, 'aria_hidden': aria_hidden}
# We can't do any of the srcset (or JS switching fallback) if we don't
# have a thumbnail library installed.
if not get_thumbnail_engine():
context['image'] = src
return context # depends on [control=['if'], data=[]]
sizes = get_image_sizes(src)
context['image_sizes'] = sizes
# Set the first image in the list as the one to be rendered initially
# (pre-JS-fallback). `if sizes` might not be a necessary check...
context['image'] = sizes[0]
context['image_sizes_json'] = json.dumps(sizes)
srcset_items = ['{} {}w'.format(size['url'], size['width']) for size in sizes]
context['image_sizes_srcset'] = ', '.join(srcset_items)
return context |
def scale(val, src, dst):
"""
Scale value from src range to dst range.
If value outside bounds, it is clipped and set to
the low or high bound of dst.
Ex:
scale(0, (0.0, 99.0), (-1.0, 1.0)) == -1.0
scale(-5, (0.0, 99.0), (-1.0, 1.0)) == -1.0
"""
if val < src[0]:
return dst[0]
if val > src[1]:
return dst[1]
return ((val - src[0]) / (src[1] - src[0])) * (dst[1] - dst[0]) + dst[0] | def function[scale, parameter[val, src, dst]]:
constant[
Scale value from src range to dst range.
If value outside bounds, it is clipped and set to
the low or high bound of dst.
Ex:
scale(0, (0.0, 99.0), (-1.0, 1.0)) == -1.0
scale(-5, (0.0, 99.0), (-1.0, 1.0)) == -1.0
]
if compare[name[val] less[<] call[name[src]][constant[0]]] begin[:]
return[call[name[dst]][constant[0]]]
if compare[name[val] greater[>] call[name[src]][constant[1]]] begin[:]
return[call[name[dst]][constant[1]]]
return[binary_operation[binary_operation[binary_operation[binary_operation[name[val] - call[name[src]][constant[0]]] / binary_operation[call[name[src]][constant[1]] - call[name[src]][constant[0]]]] * binary_operation[call[name[dst]][constant[1]] - call[name[dst]][constant[0]]]] + call[name[dst]][constant[0]]]] | keyword[def] identifier[scale] ( identifier[val] , identifier[src] , identifier[dst] ):
literal[string]
keyword[if] identifier[val] < identifier[src] [ literal[int] ]:
keyword[return] identifier[dst] [ literal[int] ]
keyword[if] identifier[val] > identifier[src] [ literal[int] ]:
keyword[return] identifier[dst] [ literal[int] ]
keyword[return] (( identifier[val] - identifier[src] [ literal[int] ])/( identifier[src] [ literal[int] ]- identifier[src] [ literal[int] ]))*( identifier[dst] [ literal[int] ]- identifier[dst] [ literal[int] ])+ identifier[dst] [ literal[int] ] | def scale(val, src, dst):
"""
Scale value from src range to dst range.
If value outside bounds, it is clipped and set to
the low or high bound of dst.
Ex:
scale(0, (0.0, 99.0), (-1.0, 1.0)) == -1.0
scale(-5, (0.0, 99.0), (-1.0, 1.0)) == -1.0
"""
if val < src[0]:
return dst[0] # depends on [control=['if'], data=[]]
if val > src[1]:
return dst[1] # depends on [control=['if'], data=[]]
return (val - src[0]) / (src[1] - src[0]) * (dst[1] - dst[0]) + dst[0] |
def randrange(order, entropy=None):
"""Return a random integer k such that 1 <= k < order, uniformly
distributed across that range. For simplicity, this only behaves well if
'order' is fairly close (but below) a power of 256. The try-try-again
algorithm we use takes longer and longer time (on average) to complete as
'order' falls, rising to a maximum of avg=512 loops for the worst-case
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
10k loops (which raises RuntimeError) to prevent an infinite loop when
something is really broken like the entropy function not working.
Note that this function is not declared to be forwards-compatible: we may
change the behavior in future releases. The entropy= argument (which
should get a callable that behaves like os.urandom) can be used to
achieve stability within a given release (for repeatable unit tests), but
should not be used as a long-term-compatible key generation algorithm.
"""
# we could handle arbitrary orders (even 256**k+1) better if we created
# candidates bit-wise instead of byte-wise, which would reduce the
# worst-case behavior to avg=2 loops, but that would be more complex. The
# change would be to round the order up to a power of 256, subtract one
# (to get 0xffff..), use that to get a byte-long mask for the top byte,
# generate the len-1 entropy bytes, generate one extra byte and mask off
# the top bits, then combine it with the rest. Requires jumping back and
# forth between strings and integers a lot.
if entropy is None:
entropy = os.urandom
assert order > 1
bytes = orderlen(order)
dont_try_forever = 10000 # gives about 2**-60 failures for worst case
while dont_try_forever > 0:
dont_try_forever -= 1
candidate = string_to_number(entropy(bytes)) + 1
if 1 <= candidate < order:
return candidate
continue
raise RuntimeError("randrange() tried hard but gave up, either something"
" is very wrong or you got realllly unlucky. Order was"
" %x" % order) | def function[randrange, parameter[order, entropy]]:
constant[Return a random integer k such that 1 <= k < order, uniformly
distributed across that range. For simplicity, this only behaves well if
'order' is fairly close (but below) a power of 256. The try-try-again
algorithm we use takes longer and longer time (on average) to complete as
'order' falls, rising to a maximum of avg=512 loops for the worst-case
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
10k loops (which raises RuntimeError) to prevent an infinite loop when
something is really broken like the entropy function not working.
Note that this function is not declared to be forwards-compatible: we may
change the behavior in future releases. The entropy= argument (which
should get a callable that behaves like os.urandom) can be used to
achieve stability within a given release (for repeatable unit tests), but
should not be used as a long-term-compatible key generation algorithm.
]
if compare[name[entropy] is constant[None]] begin[:]
variable[entropy] assign[=] name[os].urandom
assert[compare[name[order] greater[>] constant[1]]]
variable[bytes] assign[=] call[name[orderlen], parameter[name[order]]]
variable[dont_try_forever] assign[=] constant[10000]
while compare[name[dont_try_forever] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18ede4dc0>
variable[candidate] assign[=] binary_operation[call[name[string_to_number], parameter[call[name[entropy], parameter[name[bytes]]]]] + constant[1]]
if compare[constant[1] less_or_equal[<=] name[candidate]] begin[:]
return[name[candidate]]
continue
<ast.Raise object at 0x7da18ede4490> | keyword[def] identifier[randrange] ( identifier[order] , identifier[entropy] = keyword[None] ):
literal[string]
keyword[if] identifier[entropy] keyword[is] keyword[None] :
identifier[entropy] = identifier[os] . identifier[urandom]
keyword[assert] identifier[order] > literal[int]
identifier[bytes] = identifier[orderlen] ( identifier[order] )
identifier[dont_try_forever] = literal[int]
keyword[while] identifier[dont_try_forever] > literal[int] :
identifier[dont_try_forever] -= literal[int]
identifier[candidate] = identifier[string_to_number] ( identifier[entropy] ( identifier[bytes] ))+ literal[int]
keyword[if] literal[int] <= identifier[candidate] < identifier[order] :
keyword[return] identifier[candidate]
keyword[continue]
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string] % identifier[order] ) | def randrange(order, entropy=None):
"""Return a random integer k such that 1 <= k < order, uniformly
distributed across that range. For simplicity, this only behaves well if
'order' is fairly close (but below) a power of 256. The try-try-again
algorithm we use takes longer and longer time (on average) to complete as
'order' falls, rising to a maximum of avg=512 loops for the worst-case
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
10k loops (which raises RuntimeError) to prevent an infinite loop when
something is really broken like the entropy function not working.
Note that this function is not declared to be forwards-compatible: we may
change the behavior in future releases. The entropy= argument (which
should get a callable that behaves like os.urandom) can be used to
achieve stability within a given release (for repeatable unit tests), but
should not be used as a long-term-compatible key generation algorithm.
"""
# we could handle arbitrary orders (even 256**k+1) better if we created
# candidates bit-wise instead of byte-wise, which would reduce the
# worst-case behavior to avg=2 loops, but that would be more complex. The
# change would be to round the order up to a power of 256, subtract one
# (to get 0xffff..), use that to get a byte-long mask for the top byte,
# generate the len-1 entropy bytes, generate one extra byte and mask off
# the top bits, then combine it with the rest. Requires jumping back and
# forth between strings and integers a lot.
if entropy is None:
entropy = os.urandom # depends on [control=['if'], data=['entropy']]
assert order > 1
bytes = orderlen(order)
dont_try_forever = 10000 # gives about 2**-60 failures for worst case
while dont_try_forever > 0:
dont_try_forever -= 1
candidate = string_to_number(entropy(bytes)) + 1
if 1 <= candidate < order:
return candidate # depends on [control=['if'], data=['candidate']]
continue # depends on [control=['while'], data=['dont_try_forever']]
raise RuntimeError('randrange() tried hard but gave up, either something is very wrong or you got realllly unlucky. Order was %x' % order) |
def INDEX_OF_CP(string_expression, substring_expression, start=None, end=None):
"""
Searches a string for an occurence of a substring and returns the UTF-8 code point index (zero-based) of the first occurence.
If the substring is not found, returns -1.
https://docs.mongodb.com/manual/reference/operator/aggregation/indexOfCP/
for more details
:param string_expression: The string or expression of string
:param substring_expression: The string or expression of substring
:param start: A number that can be represented as integers (or expression), that specifies the starting index position for the search.
:param end: A number that can be represented as integers (or expression), that specifies the ending index position for the search.
:return: Aggregation operator
"""
res = [string_expression, substring_expression]
if start is not None:
res.append(start)
if end is not None:
res.append(end)
return {'$indexOfCP': res} | def function[INDEX_OF_CP, parameter[string_expression, substring_expression, start, end]]:
constant[
Searches a string for an occurence of a substring and returns the UTF-8 code point index (zero-based) of the first occurence.
If the substring is not found, returns -1.
https://docs.mongodb.com/manual/reference/operator/aggregation/indexOfCP/
for more details
:param string_expression: The string or expression of string
:param substring_expression: The string or expression of substring
:param start: A number that can be represented as integers (or expression), that specifies the starting index position for the search.
:param end: A number that can be represented as integers (or expression), that specifies the ending index position for the search.
:return: Aggregation operator
]
variable[res] assign[=] list[[<ast.Name object at 0x7da1b209c100>, <ast.Name object at 0x7da1b209cfd0>]]
if compare[name[start] is_not constant[None]] begin[:]
call[name[res].append, parameter[name[start]]]
if compare[name[end] is_not constant[None]] begin[:]
call[name[res].append, parameter[name[end]]]
return[dictionary[[<ast.Constant object at 0x7da1b209e3b0>], [<ast.Name object at 0x7da1b1e421d0>]]] | keyword[def] identifier[INDEX_OF_CP] ( identifier[string_expression] , identifier[substring_expression] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ):
literal[string]
identifier[res] =[ identifier[string_expression] , identifier[substring_expression] ]
keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] :
identifier[res] . identifier[append] ( identifier[start] )
keyword[if] identifier[end] keyword[is] keyword[not] keyword[None] :
identifier[res] . identifier[append] ( identifier[end] )
keyword[return] { literal[string] : identifier[res] } | def INDEX_OF_CP(string_expression, substring_expression, start=None, end=None):
"""
Searches a string for an occurence of a substring and returns the UTF-8 code point index (zero-based) of the first occurence.
If the substring is not found, returns -1.
https://docs.mongodb.com/manual/reference/operator/aggregation/indexOfCP/
for more details
:param string_expression: The string or expression of string
:param substring_expression: The string or expression of substring
:param start: A number that can be represented as integers (or expression), that specifies the starting index position for the search.
:param end: A number that can be represented as integers (or expression), that specifies the ending index position for the search.
:return: Aggregation operator
"""
res = [string_expression, substring_expression]
if start is not None:
res.append(start) # depends on [control=['if'], data=['start']]
if end is not None:
res.append(end) # depends on [control=['if'], data=['end']]
return {'$indexOfCP': res} |
def d3logpdf_dlink3(self, link_f, y, Y_metadata=None):
"""
Gradient of the log-likelihood function at y given f, w.r.t shape parameter
.. math::
:param inv_link_f: latent variables link(f)
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: includes censoring information in dictionary key 'censored'
:returns: derivative of likelihood evaluated at points f w.r.t variance parameter
:rtype: float
"""
c = np.zeros_like(y)
if Y_metadata is not None and 'censored' in Y_metadata.keys():
c = Y_metadata['censored']
val = np.log(y) - link_f
val_scaled = val/np.sqrt(self.variance)
val_scaled2 = val/self.variance
a = (1 - stats.norm.cdf(val_scaled))
uncensored = 0
censored = c *( 2*np.exp(-3*(val**2)/(2*self.variance)) / ((a**3)*(2*np.pi*self.variance)**(3/2.))
- val*np.exp(-(val**2)/self.variance)/ ( (a**2)*np.pi*self.variance**2)
- val*np.exp(-(val**2)/self.variance)/ ( (a**2)*2*np.pi*self.variance**2)
- np.exp(-(val**2)/(2*self.variance))/ ( a*(self.variance**(1.50))*np.sqrt(2*np.pi))
+ (val**2)*np.exp(-(val**2)/(2*self.variance))/ ( a*np.sqrt(2*np.pi*self.variance)*self.variance**2 ) )
d3pdf_dlink3 = uncensored + censored
return d3pdf_dlink3 | def function[d3logpdf_dlink3, parameter[self, link_f, y, Y_metadata]]:
constant[
Gradient of the log-likelihood function at y given f, w.r.t shape parameter
.. math::
:param inv_link_f: latent variables link(f)
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: includes censoring information in dictionary key 'censored'
:returns: derivative of likelihood evaluated at points f w.r.t variance parameter
:rtype: float
]
variable[c] assign[=] call[name[np].zeros_like, parameter[name[y]]]
if <ast.BoolOp object at 0x7da1b21d4970> begin[:]
variable[c] assign[=] call[name[Y_metadata]][constant[censored]]
variable[val] assign[=] binary_operation[call[name[np].log, parameter[name[y]]] - name[link_f]]
variable[val_scaled] assign[=] binary_operation[name[val] / call[name[np].sqrt, parameter[name[self].variance]]]
variable[val_scaled2] assign[=] binary_operation[name[val] / name[self].variance]
variable[a] assign[=] binary_operation[constant[1] - call[name[stats].norm.cdf, parameter[name[val_scaled]]]]
variable[uncensored] assign[=] constant[0]
variable[censored] assign[=] binary_operation[name[c] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b21d4610> * binary_operation[name[val] ** constant[2]]] / binary_operation[constant[2] * name[self].variance]]]]] / binary_operation[binary_operation[name[a] ** constant[3]] * binary_operation[binary_operation[binary_operation[constant[2] * name[np].pi] * name[self].variance] ** binary_operation[constant[3] / constant[2.0]]]]] - binary_operation[binary_operation[name[val] * call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b21d5300> / name[self].variance]]]] / binary_operation[binary_operation[binary_operation[name[a] ** constant[2]] * name[np].pi] * binary_operation[name[self].variance ** constant[2]]]]] - binary_operation[binary_operation[name[val] * call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b21d4730> / name[self].variance]]]] / binary_operation[binary_operation[binary_operation[binary_operation[name[a] ** constant[2]] * constant[2]] * name[np].pi] * binary_operation[name[self].variance ** constant[2]]]]] - binary_operation[call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da18fe93460> / binary_operation[constant[2] * name[self].variance]]]] / binary_operation[binary_operation[name[a] * binary_operation[name[self].variance ** constant[1.5]]] * call[name[np].sqrt, parameter[binary_operation[constant[2] * name[np].pi]]]]]] + binary_operation[binary_operation[binary_operation[name[val] ** constant[2]] * call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da18fe90490> / binary_operation[constant[2] * name[self].variance]]]]] / binary_operation[binary_operation[name[a] * call[name[np].sqrt, parameter[binary_operation[binary_operation[constant[2] * name[np].pi] * name[self].variance]]]] * binary_operation[name[self].variance ** constant[2]]]]]]
variable[d3pdf_dlink3] assign[=] binary_operation[name[uncensored] + name[censored]]
return[name[d3pdf_dlink3]] | keyword[def] identifier[d3logpdf_dlink3] ( identifier[self] , identifier[link_f] , identifier[y] , identifier[Y_metadata] = keyword[None] ):
literal[string]
identifier[c] = identifier[np] . identifier[zeros_like] ( identifier[y] )
keyword[if] identifier[Y_metadata] keyword[is] keyword[not] keyword[None] keyword[and] literal[string] keyword[in] identifier[Y_metadata] . identifier[keys] ():
identifier[c] = identifier[Y_metadata] [ literal[string] ]
identifier[val] = identifier[np] . identifier[log] ( identifier[y] )- identifier[link_f]
identifier[val_scaled] = identifier[val] / identifier[np] . identifier[sqrt] ( identifier[self] . identifier[variance] )
identifier[val_scaled2] = identifier[val] / identifier[self] . identifier[variance]
identifier[a] =( literal[int] - identifier[stats] . identifier[norm] . identifier[cdf] ( identifier[val_scaled] ))
identifier[uncensored] = literal[int]
identifier[censored] = identifier[c] *( literal[int] * identifier[np] . identifier[exp] (- literal[int] *( identifier[val] ** literal[int] )/( literal[int] * identifier[self] . identifier[variance] ))/(( identifier[a] ** literal[int] )*( literal[int] * identifier[np] . identifier[pi] * identifier[self] . identifier[variance] )**( literal[int] / literal[int] ))
- identifier[val] * identifier[np] . identifier[exp] (-( identifier[val] ** literal[int] )/ identifier[self] . identifier[variance] )/(( identifier[a] ** literal[int] )* identifier[np] . identifier[pi] * identifier[self] . identifier[variance] ** literal[int] )
- identifier[val] * identifier[np] . identifier[exp] (-( identifier[val] ** literal[int] )/ identifier[self] . identifier[variance] )/(( identifier[a] ** literal[int] )* literal[int] * identifier[np] . identifier[pi] * identifier[self] . identifier[variance] ** literal[int] )
- identifier[np] . identifier[exp] (-( identifier[val] ** literal[int] )/( literal[int] * identifier[self] . identifier[variance] ))/( identifier[a] *( identifier[self] . identifier[variance] **( literal[int] ))* identifier[np] . identifier[sqrt] ( literal[int] * identifier[np] . identifier[pi] ))
+( identifier[val] ** literal[int] )* identifier[np] . identifier[exp] (-( identifier[val] ** literal[int] )/( literal[int] * identifier[self] . identifier[variance] ))/( identifier[a] * identifier[np] . identifier[sqrt] ( literal[int] * identifier[np] . identifier[pi] * identifier[self] . identifier[variance] )* identifier[self] . identifier[variance] ** literal[int] ))
identifier[d3pdf_dlink3] = identifier[uncensored] + identifier[censored]
keyword[return] identifier[d3pdf_dlink3] | def d3logpdf_dlink3(self, link_f, y, Y_metadata=None):
"""
Gradient of the log-likelihood function at y given f, w.r.t shape parameter
.. math::
:param inv_link_f: latent variables link(f)
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: includes censoring information in dictionary key 'censored'
:returns: derivative of likelihood evaluated at points f w.r.t variance parameter
:rtype: float
"""
c = np.zeros_like(y)
if Y_metadata is not None and 'censored' in Y_metadata.keys():
c = Y_metadata['censored'] # depends on [control=['if'], data=[]]
val = np.log(y) - link_f
val_scaled = val / np.sqrt(self.variance)
val_scaled2 = val / self.variance
a = 1 - stats.norm.cdf(val_scaled)
uncensored = 0
censored = c * (2 * np.exp(-3 * val ** 2 / (2 * self.variance)) / (a ** 3 * (2 * np.pi * self.variance) ** (3 / 2.0)) - val * np.exp(-val ** 2 / self.variance) / (a ** 2 * np.pi * self.variance ** 2) - val * np.exp(-val ** 2 / self.variance) / (a ** 2 * 2 * np.pi * self.variance ** 2) - np.exp(-val ** 2 / (2 * self.variance)) / (a * self.variance ** 1.5 * np.sqrt(2 * np.pi)) + val ** 2 * np.exp(-val ** 2 / (2 * self.variance)) / (a * np.sqrt(2 * np.pi * self.variance) * self.variance ** 2))
d3pdf_dlink3 = uncensored + censored
return d3pdf_dlink3 |
def add_arguments(parser):
"""
adds arguments for the swap urls command
"""
parser.add_argument('-o', '--old-environment', help='Old environment name', required=True)
parser.add_argument('-n', '--new-environment', help='New environment name', required=True) | def function[add_arguments, parameter[parser]]:
constant[
adds arguments for the swap urls command
]
call[name[parser].add_argument, parameter[constant[-o], constant[--old-environment]]]
call[name[parser].add_argument, parameter[constant[-n], constant[--new-environment]]] | keyword[def] identifier[add_arguments] ( identifier[parser] ):
literal[string]
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[True] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[True] ) | def add_arguments(parser):
"""
adds arguments for the swap urls command
"""
parser.add_argument('-o', '--old-environment', help='Old environment name', required=True)
parser.add_argument('-n', '--new-environment', help='New environment name', required=True) |
def fidelity(rho0: Density, rho1: Density) -> float:
"""Return the fidelity F(rho0, rho1) between two mixed quantum states.
Note: Fidelity cannot be calculated entirely within the tensor backend.
"""
assert rho0.qubit_nb == rho1.qubit_nb # FIXME
rho1 = rho1.permute(rho0.qubits)
op0 = asarray(rho0.asoperator())
op1 = asarray(rho1.asoperator())
fid = np.real((np.trace(sqrtm(sqrtm(op0) @ op1 @ sqrtm(op0)))) ** 2)
fid = min(fid, 1.0)
fid = max(fid, 0.0) # Correct for rounding errors
return fid | def function[fidelity, parameter[rho0, rho1]]:
constant[Return the fidelity F(rho0, rho1) between two mixed quantum states.
Note: Fidelity cannot be calculated entirely within the tensor backend.
]
assert[compare[name[rho0].qubit_nb equal[==] name[rho1].qubit_nb]]
variable[rho1] assign[=] call[name[rho1].permute, parameter[name[rho0].qubits]]
variable[op0] assign[=] call[name[asarray], parameter[call[name[rho0].asoperator, parameter[]]]]
variable[op1] assign[=] call[name[asarray], parameter[call[name[rho1].asoperator, parameter[]]]]
variable[fid] assign[=] call[name[np].real, parameter[binary_operation[call[name[np].trace, parameter[call[name[sqrtm], parameter[binary_operation[binary_operation[call[name[sqrtm], parameter[name[op0]]] <ast.MatMult object at 0x7da2590d6860> name[op1]] <ast.MatMult object at 0x7da2590d6860> call[name[sqrtm], parameter[name[op0]]]]]]]] ** constant[2]]]]
variable[fid] assign[=] call[name[min], parameter[name[fid], constant[1.0]]]
variable[fid] assign[=] call[name[max], parameter[name[fid], constant[0.0]]]
return[name[fid]] | keyword[def] identifier[fidelity] ( identifier[rho0] : identifier[Density] , identifier[rho1] : identifier[Density] )-> identifier[float] :
literal[string]
keyword[assert] identifier[rho0] . identifier[qubit_nb] == identifier[rho1] . identifier[qubit_nb]
identifier[rho1] = identifier[rho1] . identifier[permute] ( identifier[rho0] . identifier[qubits] )
identifier[op0] = identifier[asarray] ( identifier[rho0] . identifier[asoperator] ())
identifier[op1] = identifier[asarray] ( identifier[rho1] . identifier[asoperator] ())
identifier[fid] = identifier[np] . identifier[real] (( identifier[np] . identifier[trace] ( identifier[sqrtm] ( identifier[sqrtm] ( identifier[op0] )@ identifier[op1] @ identifier[sqrtm] ( identifier[op0] ))))** literal[int] )
identifier[fid] = identifier[min] ( identifier[fid] , literal[int] )
identifier[fid] = identifier[max] ( identifier[fid] , literal[int] )
keyword[return] identifier[fid] | def fidelity(rho0: Density, rho1: Density) -> float:
"""Return the fidelity F(rho0, rho1) between two mixed quantum states.
Note: Fidelity cannot be calculated entirely within the tensor backend.
"""
assert rho0.qubit_nb == rho1.qubit_nb # FIXME
rho1 = rho1.permute(rho0.qubits)
op0 = asarray(rho0.asoperator())
op1 = asarray(rho1.asoperator())
fid = np.real(np.trace(sqrtm(sqrtm(op0) @ op1 @ sqrtm(op0))) ** 2)
fid = min(fid, 1.0)
fid = max(fid, 0.0) # Correct for rounding errors
return fid |
def _CollectHistoryAgg_(contactHist, fieldHistObj, fieldName):
"""
Return updated history dictionary with new field change
:param dict contactHist: Existing contact history dictionary
:param dict fieldHistObj: Output of _CollectHistory_
:param string fieldName: field name
"""
if fieldHistObj!={}:
if fieldName not in contactHist.keys():
contactHist[fieldName] = {}
for lookupType in fieldHistObj.keys():
contactHist[fieldName][lookupType] = fieldHistObj[lookupType]
return contactHist | def function[_CollectHistoryAgg_, parameter[contactHist, fieldHistObj, fieldName]]:
constant[
Return updated history dictionary with new field change
:param dict contactHist: Existing contact history dictionary
:param dict fieldHistObj: Output of _CollectHistory_
:param string fieldName: field name
]
if compare[name[fieldHistObj] not_equal[!=] dictionary[[], []]] begin[:]
if compare[name[fieldName] <ast.NotIn object at 0x7da2590d7190> call[name[contactHist].keys, parameter[]]] begin[:]
call[name[contactHist]][name[fieldName]] assign[=] dictionary[[], []]
for taget[name[lookupType]] in starred[call[name[fieldHistObj].keys, parameter[]]] begin[:]
call[call[name[contactHist]][name[fieldName]]][name[lookupType]] assign[=] call[name[fieldHistObj]][name[lookupType]]
return[name[contactHist]] | keyword[def] identifier[_CollectHistoryAgg_] ( identifier[contactHist] , identifier[fieldHistObj] , identifier[fieldName] ):
literal[string]
keyword[if] identifier[fieldHistObj] !={}:
keyword[if] identifier[fieldName] keyword[not] keyword[in] identifier[contactHist] . identifier[keys] ():
identifier[contactHist] [ identifier[fieldName] ]={}
keyword[for] identifier[lookupType] keyword[in] identifier[fieldHistObj] . identifier[keys] ():
identifier[contactHist] [ identifier[fieldName] ][ identifier[lookupType] ]= identifier[fieldHistObj] [ identifier[lookupType] ]
keyword[return] identifier[contactHist] | def _CollectHistoryAgg_(contactHist, fieldHistObj, fieldName):
"""
Return updated history dictionary with new field change
:param dict contactHist: Existing contact history dictionary
:param dict fieldHistObj: Output of _CollectHistory_
:param string fieldName: field name
"""
if fieldHistObj != {}:
if fieldName not in contactHist.keys():
contactHist[fieldName] = {} # depends on [control=['if'], data=['fieldName']]
for lookupType in fieldHistObj.keys():
contactHist[fieldName][lookupType] = fieldHistObj[lookupType] # depends on [control=['for'], data=['lookupType']] # depends on [control=['if'], data=['fieldHistObj']]
return contactHist |
def execute(self, correlation_id, args):
"""
Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
ApplicationException: when execution fails for whatever reason.
"""
# Validate arguments
if self._schema != None:
self.validate_and_throw_exception(correlation_id, args)
# Call the function
try:
return self._function(correlation_id, args)
# Intercept unhandled errors
except Exception as ex:
raise InvocationException(
correlation_id,
"EXEC_FAILED",
"Execution " + self._name + " failed: " + str(ex)
).with_details("command", self._name).wrap(ex) | def function[execute, parameter[self, correlation_id, args]]:
constant[
Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
ApplicationException: when execution fails for whatever reason.
]
if compare[name[self]._schema not_equal[!=] constant[None]] begin[:]
call[name[self].validate_and_throw_exception, parameter[name[correlation_id], name[args]]]
<ast.Try object at 0x7da2041dbe80> | keyword[def] identifier[execute] ( identifier[self] , identifier[correlation_id] , identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[_schema] != keyword[None] :
identifier[self] . identifier[validate_and_throw_exception] ( identifier[correlation_id] , identifier[args] )
keyword[try] :
keyword[return] identifier[self] . identifier[_function] ( identifier[correlation_id] , identifier[args] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[raise] identifier[InvocationException] (
identifier[correlation_id] ,
literal[string] ,
literal[string] + identifier[self] . identifier[_name] + literal[string] + identifier[str] ( identifier[ex] )
). identifier[with_details] ( literal[string] , identifier[self] . identifier[_name] ). identifier[wrap] ( identifier[ex] ) | def execute(self, correlation_id, args):
"""
Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
ApplicationException: when execution fails for whatever reason.
""" # Validate arguments
if self._schema != None:
self.validate_and_throw_exception(correlation_id, args) # depends on [control=['if'], data=[]] # Call the function
try:
return self._function(correlation_id, args) # depends on [control=['try'], data=[]] # Intercept unhandled errors
except Exception as ex:
raise InvocationException(correlation_id, 'EXEC_FAILED', 'Execution ' + self._name + ' failed: ' + str(ex)).with_details('command', self._name).wrap(ex) # depends on [control=['except'], data=['ex']] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.