code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def delete_channel(self, chname):
"""Delete a given channel from viewer."""
name = chname.lower()
if len(self.channel_names) < 1:
self.logger.error('Delete channel={0} failed. '
'No channels left.'.format(chname))
return
with self.lock:
channel = self.channel[name]
# Close local plugins open on this channel
self.close_plugins(channel)
try:
idx = self.channel_names.index(chname)
except ValueError:
idx = 0
# Update the channels control
self.channel_names.remove(channel.name)
self.channel_names.sort()
self.ds.remove_tab(chname)
del self.channel[name]
self.prefs.remove_settings('channel_' + chname)
# pick new channel
num_channels = len(self.channel_names)
if num_channels > 0:
if idx >= num_channels:
idx = num_channels - 1
self.change_channel(self.channel_names[idx])
else:
self.cur_channel = None
self.make_gui_callback('delete-channel', channel)
|
def function[delete_channel, parameter[self, chname]]:
constant[Delete a given channel from viewer.]
variable[name] assign[=] call[name[chname].lower, parameter[]]
if compare[call[name[len], parameter[name[self].channel_names]] less[<] constant[1]] begin[:]
call[name[self].logger.error, parameter[call[constant[Delete channel={0} failed. No channels left.].format, parameter[name[chname]]]]]
return[None]
with name[self].lock begin[:]
variable[channel] assign[=] call[name[self].channel][name[name]]
call[name[self].close_plugins, parameter[name[channel]]]
<ast.Try object at 0x7da18dc041f0>
call[name[self].channel_names.remove, parameter[name[channel].name]]
call[name[self].channel_names.sort, parameter[]]
call[name[self].ds.remove_tab, parameter[name[chname]]]
<ast.Delete object at 0x7da18dc06500>
call[name[self].prefs.remove_settings, parameter[binary_operation[constant[channel_] + name[chname]]]]
variable[num_channels] assign[=] call[name[len], parameter[name[self].channel_names]]
if compare[name[num_channels] greater[>] constant[0]] begin[:]
if compare[name[idx] greater_or_equal[>=] name[num_channels]] begin[:]
variable[idx] assign[=] binary_operation[name[num_channels] - constant[1]]
call[name[self].change_channel, parameter[call[name[self].channel_names][name[idx]]]]
call[name[self].make_gui_callback, parameter[constant[delete-channel], name[channel]]]
|
keyword[def] identifier[delete_channel] ( identifier[self] , identifier[chname] ):
literal[string]
identifier[name] = identifier[chname] . identifier[lower] ()
keyword[if] identifier[len] ( identifier[self] . identifier[channel_names] )< literal[int] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string]
literal[string] . identifier[format] ( identifier[chname] ))
keyword[return]
keyword[with] identifier[self] . identifier[lock] :
identifier[channel] = identifier[self] . identifier[channel] [ identifier[name] ]
identifier[self] . identifier[close_plugins] ( identifier[channel] )
keyword[try] :
identifier[idx] = identifier[self] . identifier[channel_names] . identifier[index] ( identifier[chname] )
keyword[except] identifier[ValueError] :
identifier[idx] = literal[int]
identifier[self] . identifier[channel_names] . identifier[remove] ( identifier[channel] . identifier[name] )
identifier[self] . identifier[channel_names] . identifier[sort] ()
identifier[self] . identifier[ds] . identifier[remove_tab] ( identifier[chname] )
keyword[del] identifier[self] . identifier[channel] [ identifier[name] ]
identifier[self] . identifier[prefs] . identifier[remove_settings] ( literal[string] + identifier[chname] )
identifier[num_channels] = identifier[len] ( identifier[self] . identifier[channel_names] )
keyword[if] identifier[num_channels] > literal[int] :
keyword[if] identifier[idx] >= identifier[num_channels] :
identifier[idx] = identifier[num_channels] - literal[int]
identifier[self] . identifier[change_channel] ( identifier[self] . identifier[channel_names] [ identifier[idx] ])
keyword[else] :
identifier[self] . identifier[cur_channel] = keyword[None]
identifier[self] . identifier[make_gui_callback] ( literal[string] , identifier[channel] )
|
def delete_channel(self, chname):
"""Delete a given channel from viewer."""
name = chname.lower()
if len(self.channel_names) < 1:
self.logger.error('Delete channel={0} failed. No channels left.'.format(chname))
return # depends on [control=['if'], data=[]]
with self.lock:
channel = self.channel[name]
# Close local plugins open on this channel
self.close_plugins(channel)
try:
idx = self.channel_names.index(chname) # depends on [control=['try'], data=[]]
except ValueError:
idx = 0 # depends on [control=['except'], data=[]]
# Update the channels control
self.channel_names.remove(channel.name)
self.channel_names.sort()
self.ds.remove_tab(chname)
del self.channel[name]
self.prefs.remove_settings('channel_' + chname)
# pick new channel
num_channels = len(self.channel_names)
if num_channels > 0:
if idx >= num_channels:
idx = num_channels - 1 # depends on [control=['if'], data=['idx', 'num_channels']]
self.change_channel(self.channel_names[idx]) # depends on [control=['if'], data=['num_channels']]
else:
self.cur_channel = None # depends on [control=['with'], data=[]]
self.make_gui_callback('delete-channel', channel)
|
def get_gid(path, follow_symlinks=True):
'''
Return the id of the group that owns a given file
path
file or directory of which to get the gid
follow_symlinks
indicated if symlinks should be followed
CLI Example:
.. code-block:: bash
salt '*' file.get_gid /etc/passwd
.. versionchanged:: 0.16.4
``follow_symlinks`` option added
'''
return stats(os.path.expanduser(path), follow_symlinks=follow_symlinks).get('gid', -1)
|
def function[get_gid, parameter[path, follow_symlinks]]:
constant[
Return the id of the group that owns a given file
path
file or directory of which to get the gid
follow_symlinks
indicated if symlinks should be followed
CLI Example:
.. code-block:: bash
salt '*' file.get_gid /etc/passwd
.. versionchanged:: 0.16.4
``follow_symlinks`` option added
]
return[call[call[name[stats], parameter[call[name[os].path.expanduser, parameter[name[path]]]]].get, parameter[constant[gid], <ast.UnaryOp object at 0x7da18f09eb60>]]]
|
keyword[def] identifier[get_gid] ( identifier[path] , identifier[follow_symlinks] = keyword[True] ):
literal[string]
keyword[return] identifier[stats] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path] ), identifier[follow_symlinks] = identifier[follow_symlinks] ). identifier[get] ( literal[string] ,- literal[int] )
|
def get_gid(path, follow_symlinks=True):
"""
Return the id of the group that owns a given file
path
file or directory of which to get the gid
follow_symlinks
indicated if symlinks should be followed
CLI Example:
.. code-block:: bash
salt '*' file.get_gid /etc/passwd
.. versionchanged:: 0.16.4
``follow_symlinks`` option added
"""
return stats(os.path.expanduser(path), follow_symlinks=follow_symlinks).get('gid', -1)
|
def list_databases(self):
"""
Runs the ``\\list`` command and returns a list of column values with
information about all databases.
"""
lines = output_lines(self.exec_psql('\\list'))
return [line.split('|') for line in lines]
|
def function[list_databases, parameter[self]]:
constant[
Runs the ``\list`` command and returns a list of column values with
information about all databases.
]
variable[lines] assign[=] call[name[output_lines], parameter[call[name[self].exec_psql, parameter[constant[\list]]]]]
return[<ast.ListComp object at 0x7da1b25d9090>]
|
keyword[def] identifier[list_databases] ( identifier[self] ):
literal[string]
identifier[lines] = identifier[output_lines] ( identifier[self] . identifier[exec_psql] ( literal[string] ))
keyword[return] [ identifier[line] . identifier[split] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[lines] ]
|
def list_databases(self):
"""
Runs the ``\\list`` command and returns a list of column values with
information about all databases.
"""
lines = output_lines(self.exec_psql('\\list'))
return [line.split('|') for line in lines]
|
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert Linear.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting Linear ...')
if names == 'short':
tf_name = 'FC' + random_string(6)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
bias_name = '{0}.bias'.format(w_name)
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
has_bias = False
if bias_name in weights:
bias = weights[bias_name].numpy()
keras_weights = [W, bias]
has_bias = True
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=has_bias, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
|
def function[convert_gemm, parameter[params, w_name, scope_name, inputs, layers, weights, names]]:
constant[
Convert Linear.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
]
call[name[print], parameter[constant[Converting Linear ...]]]
if compare[name[names] equal[==] constant[short]] begin[:]
variable[tf_name] assign[=] binary_operation[constant[FC] + call[name[random_string], parameter[constant[6]]]]
variable[bias_name] assign[=] call[constant[{0}.bias].format, parameter[name[w_name]]]
variable[weights_name] assign[=] call[constant[{0}.weight].format, parameter[name[w_name]]]
variable[W] assign[=] call[call[call[name[weights]][name[weights_name]].numpy, parameter[]].transpose, parameter[]]
<ast.Tuple object at 0x7da1b01ba8f0> assign[=] name[W].shape
variable[keras_weights] assign[=] list[[<ast.Name object at 0x7da1b01bb280>]]
variable[has_bias] assign[=] constant[False]
if compare[name[bias_name] in name[weights]] begin[:]
variable[bias] assign[=] call[call[name[weights]][name[bias_name]].numpy, parameter[]]
variable[keras_weights] assign[=] list[[<ast.Name object at 0x7da1b01ba8c0>, <ast.Name object at 0x7da1b01b9c90>]]
variable[has_bias] assign[=] constant[True]
variable[dense] assign[=] call[name[keras].layers.Dense, parameter[name[output_channels]]]
call[name[layers]][name[scope_name]] assign[=] call[name[dense], parameter[call[name[layers]][call[name[inputs]][constant[0]]]]]
|
keyword[def] identifier[convert_gemm] ( identifier[params] , identifier[w_name] , identifier[scope_name] , identifier[inputs] , identifier[layers] , identifier[weights] , identifier[names] ):
literal[string]
identifier[print] ( literal[string] )
keyword[if] identifier[names] == literal[string] :
identifier[tf_name] = literal[string] + identifier[random_string] ( literal[int] )
keyword[elif] identifier[names] == literal[string] :
identifier[tf_name] = identifier[w_name]
keyword[else] :
identifier[tf_name] = identifier[w_name] + identifier[str] ( identifier[random] . identifier[random] ())
identifier[bias_name] = literal[string] . identifier[format] ( identifier[w_name] )
identifier[weights_name] = literal[string] . identifier[format] ( identifier[w_name] )
identifier[W] = identifier[weights] [ identifier[weights_name] ]. identifier[numpy] (). identifier[transpose] ()
identifier[input_channels] , identifier[output_channels] = identifier[W] . identifier[shape]
identifier[keras_weights] =[ identifier[W] ]
identifier[has_bias] = keyword[False]
keyword[if] identifier[bias_name] keyword[in] identifier[weights] :
identifier[bias] = identifier[weights] [ identifier[bias_name] ]. identifier[numpy] ()
identifier[keras_weights] =[ identifier[W] , identifier[bias] ]
identifier[has_bias] = keyword[True]
identifier[dense] = identifier[keras] . identifier[layers] . identifier[Dense] (
identifier[output_channels] ,
identifier[weights] = identifier[keras_weights] , identifier[use_bias] = identifier[has_bias] , identifier[name] = identifier[tf_name] , identifier[bias_initializer] = literal[string] , identifier[kernel_initializer] = literal[string] ,
)
identifier[layers] [ identifier[scope_name] ]= identifier[dense] ( identifier[layers] [ identifier[inputs] [ literal[int] ]])
|
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert Linear.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting Linear ...')
if names == 'short':
tf_name = 'FC' + random_string(6) # depends on [control=['if'], data=[]]
elif names == 'keep':
tf_name = w_name # depends on [control=['if'], data=[]]
else:
tf_name = w_name + str(random.random())
bias_name = '{0}.bias'.format(w_name)
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
(input_channels, output_channels) = W.shape
keras_weights = [W]
has_bias = False
if bias_name in weights:
bias = weights[bias_name].numpy()
keras_weights = [W, bias]
has_bias = True # depends on [control=['if'], data=['bias_name', 'weights']]
dense = keras.layers.Dense(output_channels, weights=keras_weights, use_bias=has_bias, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros')
layers[scope_name] = dense(layers[inputs[0]])
|
def prepare_request(self, kwargs):
""" If session is in use, prepares the request headers and
checks if the session is expired.
"""
if self.session_id is not None:
actual = dt.datetime.now()
if (self.last_activity + self.inactivity_limit) < actual:
# session expired
if self.persist:
# request new session
self.create_session()
actual = dt.datetime.now()
else:
# raise error and recommend to manualy refresh session
raise RuntimeError('A non Persistent Session is expired. '
'For consistency reasons this exception is raised. '
'Please try again with manual refresh of the session ')
self.last_activity = actual
headers = kwargs.get('headers')
if headers is None:
kwargs['headers'] = headers = {}
headers['workbook-session-id'] = self.session_id
|
def function[prepare_request, parameter[self, kwargs]]:
constant[ If session is in use, prepares the request headers and
checks if the session is expired.
]
if compare[name[self].session_id is_not constant[None]] begin[:]
variable[actual] assign[=] call[name[dt].datetime.now, parameter[]]
if compare[binary_operation[name[self].last_activity + name[self].inactivity_limit] less[<] name[actual]] begin[:]
if name[self].persist begin[:]
call[name[self].create_session, parameter[]]
variable[actual] assign[=] call[name[dt].datetime.now, parameter[]]
name[self].last_activity assign[=] name[actual]
variable[headers] assign[=] call[name[kwargs].get, parameter[constant[headers]]]
if compare[name[headers] is constant[None]] begin[:]
call[name[kwargs]][constant[headers]] assign[=] dictionary[[], []]
call[name[headers]][constant[workbook-session-id]] assign[=] name[self].session_id
|
keyword[def] identifier[prepare_request] ( identifier[self] , identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[session_id] keyword[is] keyword[not] keyword[None] :
identifier[actual] = identifier[dt] . identifier[datetime] . identifier[now] ()
keyword[if] ( identifier[self] . identifier[last_activity] + identifier[self] . identifier[inactivity_limit] )< identifier[actual] :
keyword[if] identifier[self] . identifier[persist] :
identifier[self] . identifier[create_session] ()
identifier[actual] = identifier[dt] . identifier[datetime] . identifier[now] ()
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string] )
identifier[self] . identifier[last_activity] = identifier[actual]
identifier[headers] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[kwargs] [ literal[string] ]= identifier[headers] ={}
identifier[headers] [ literal[string] ]= identifier[self] . identifier[session_id]
|
def prepare_request(self, kwargs):
""" If session is in use, prepares the request headers and
checks if the session is expired.
"""
if self.session_id is not None:
actual = dt.datetime.now()
if self.last_activity + self.inactivity_limit < actual:
# session expired
if self.persist:
# request new session
self.create_session()
actual = dt.datetime.now() # depends on [control=['if'], data=[]]
else:
# raise error and recommend to manualy refresh session
raise RuntimeError('A non Persistent Session is expired. For consistency reasons this exception is raised. Please try again with manual refresh of the session ') # depends on [control=['if'], data=['actual']]
self.last_activity = actual
headers = kwargs.get('headers')
if headers is None:
kwargs['headers'] = headers = {} # depends on [control=['if'], data=['headers']]
headers['workbook-session-id'] = self.session_id # depends on [control=['if'], data=[]]
|
def p_localparamdecl_signed_width(self, p):
'localparamdecl : LOCALPARAM SIGNED width param_substitution_list SEMICOLON'
paramlist = [Localparam(rname, rvalue, p[3], signed=True, lineno=p.lineno(3))
for rname, rvalue in p[4]]
p[0] = Decl(tuple(paramlist), lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
def function[p_localparamdecl_signed_width, parameter[self, p]]:
constant[localparamdecl : LOCALPARAM SIGNED width param_substitution_list SEMICOLON]
variable[paramlist] assign[=] <ast.ListComp object at 0x7da1b1603910>
call[name[p]][constant[0]] assign[=] call[name[Decl], parameter[call[name[tuple], parameter[name[paramlist]]]]]
call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
|
keyword[def] identifier[p_localparamdecl_signed_width] ( identifier[self] , identifier[p] ):
literal[string]
identifier[paramlist] =[ identifier[Localparam] ( identifier[rname] , identifier[rvalue] , identifier[p] [ literal[int] ], identifier[signed] = keyword[True] , identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
keyword[for] identifier[rname] , identifier[rvalue] keyword[in] identifier[p] [ literal[int] ]]
identifier[p] [ literal[int] ]= identifier[Decl] ( identifier[tuple] ( identifier[paramlist] ), identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
|
def p_localparamdecl_signed_width(self, p):
"""localparamdecl : LOCALPARAM SIGNED width param_substitution_list SEMICOLON"""
paramlist = [Localparam(rname, rvalue, p[3], signed=True, lineno=p.lineno(3)) for (rname, rvalue) in p[4]]
p[0] = Decl(tuple(paramlist), lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
def add_cli_summarize(main: click.Group) -> click.Group: # noqa: D202
"""Add a ``summarize`` command to main :mod:`click` function."""
@main.command()
@click.pass_obj
def summarize(manager: AbstractManager):
"""Summarize the contents of the database."""
if not manager.is_populated():
click.secho(f'{manager.module_name} has not been populated', fg='red')
sys.exit(1)
for name, count in sorted(manager.summarize().items()):
click.echo(f'{name.capitalize()}: {count}')
return main
|
def function[add_cli_summarize, parameter[main]]:
constant[Add a ``summarize`` command to main :mod:`click` function.]
def function[summarize, parameter[manager]]:
constant[Summarize the contents of the database.]
if <ast.UnaryOp object at 0x7da1b00fbc70> begin[:]
call[name[click].secho, parameter[<ast.JoinedStr object at 0x7da1b00fab90>]]
call[name[sys].exit, parameter[constant[1]]]
for taget[tuple[[<ast.Name object at 0x7da1b00faf20>, <ast.Name object at 0x7da1b00fa650>]]] in starred[call[name[sorted], parameter[call[call[name[manager].summarize, parameter[]].items, parameter[]]]]] begin[:]
call[name[click].echo, parameter[<ast.JoinedStr object at 0x7da1b00f93f0>]]
return[name[main]]
|
keyword[def] identifier[add_cli_summarize] ( identifier[main] : identifier[click] . identifier[Group] )-> identifier[click] . identifier[Group] :
literal[string]
@ identifier[main] . identifier[command] ()
@ identifier[click] . identifier[pass_obj]
keyword[def] identifier[summarize] ( identifier[manager] : identifier[AbstractManager] ):
literal[string]
keyword[if] keyword[not] identifier[manager] . identifier[is_populated] ():
identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[for] identifier[name] , identifier[count] keyword[in] identifier[sorted] ( identifier[manager] . identifier[summarize] (). identifier[items] ()):
identifier[click] . identifier[echo] ( literal[string] )
keyword[return] identifier[main]
|
def add_cli_summarize(main: click.Group) -> click.Group: # noqa: D202
'Add a ``summarize`` command to main :mod:`click` function.'
@main.command()
@click.pass_obj
def summarize(manager: AbstractManager):
"""Summarize the contents of the database."""
if not manager.is_populated():
click.secho(f'{manager.module_name} has not been populated', fg='red')
sys.exit(1) # depends on [control=['if'], data=[]]
for (name, count) in sorted(manager.summarize().items()):
click.echo(f'{name.capitalize()}: {count}') # depends on [control=['for'], data=[]]
return main
|
def add(self, *items):
"""
Add items to be sorted.
@param items: One or more items to be added.
@type items: I{item}
@return: self
@rtype: L{DepList}
"""
for item in items:
self.unsorted.append(item)
key = item[0]
self.index[key] = item
return self
|
def function[add, parameter[self]]:
constant[
Add items to be sorted.
@param items: One or more items to be added.
@type items: I{item}
@return: self
@rtype: L{DepList}
]
for taget[name[item]] in starred[name[items]] begin[:]
call[name[self].unsorted.append, parameter[name[item]]]
variable[key] assign[=] call[name[item]][constant[0]]
call[name[self].index][name[key]] assign[=] name[item]
return[name[self]]
|
keyword[def] identifier[add] ( identifier[self] ,* identifier[items] ):
literal[string]
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[self] . identifier[unsorted] . identifier[append] ( identifier[item] )
identifier[key] = identifier[item] [ literal[int] ]
identifier[self] . identifier[index] [ identifier[key] ]= identifier[item]
keyword[return] identifier[self]
|
def add(self, *items):
"""
Add items to be sorted.
@param items: One or more items to be added.
@type items: I{item}
@return: self
@rtype: L{DepList}
"""
for item in items:
self.unsorted.append(item)
key = item[0]
self.index[key] = item # depends on [control=['for'], data=['item']]
return self
|
def triangular_membership(bin_center, bin_width, smoothness = 0.5):
r"""
Create a triangular membership function for a fuzzy histogram bin.
Parameters
----------
bin_center : number
The center of the bin of which to compute the membership function.
bin_width : number
The width of a single bin (all expected to be of equal width).
smoothness : number, optional
The smoothness of the function; determines the neighbourhood affected.
See below and `fuzzy_histogram` for a more detailed explanation
Returns
-------
triangular_membership : function
A triangular membership function centered on the bin.
Notes
-----
For the triangular function the smoothness factor has to be 0.5. Lower values
are accepted, but then the function assumes the shape of the trapezium membership
function. Higher values lead to an exception.
The triangular membership function is defined as
.. math::
\mu_{\triangle}(x) =
\left\{
\begin{array}{ll}
0, & x<a, x>c\\
\frac{x-a}{b-a}, & a\leq x\leq b \\
\frac{c-x}{c-b}, & b<x\leq c\\
\end{array}
\right.
where :math:`a` is the left border, :math:`c` the right border and :math:`b` the center of the triangular
function. The height of the triangle is chosen such, that all values contribute with
exactly one.
The standard triangular function (:math:`smoothness = 0.5`) is displayed in the following
figure
.. .. image:: images/triangular_01.png
"Triangular functions (1)"
where the bin width is :math:`2` with centers at :math:`-2`, :math:`0` and :math:`2`.
"""
if smoothness > 0.5: raise AttributeError('the triangular/trapezium membership functions supports only smoothnesses between 1/10 and 1/2.')
if smoothness < 0.5: return trapezoid_membership(bin_center, bin_width, smoothness)
a = bin_center - bin_width
b = float(bin_center)
c = bin_center + bin_width
def fun(x):
if x < a or x > c: return 0
elif x <= b: return (x-a)/(b-a)
else: return (c-x)/(c-b)
return fun
|
def function[triangular_membership, parameter[bin_center, bin_width, smoothness]]:
constant[
Create a triangular membership function for a fuzzy histogram bin.
Parameters
----------
bin_center : number
The center of the bin of which to compute the membership function.
bin_width : number
The width of a single bin (all expected to be of equal width).
smoothness : number, optional
The smoothness of the function; determines the neighbourhood affected.
See below and `fuzzy_histogram` for a more detailed explanation
Returns
-------
triangular_membership : function
A triangular membership function centered on the bin.
Notes
-----
For the triangular function the smoothness factor has to be 0.5. Lower values
are accepted, but then the function assumes the shape of the trapezium membership
function. Higher values lead to an exception.
The triangular membership function is defined as
.. math::
\mu_{\triangle}(x) =
\left\{
\begin{array}{ll}
0, & x<a, x>c\\
\frac{x-a}{b-a}, & a\leq x\leq b \\
\frac{c-x}{c-b}, & b<x\leq c\\
\end{array}
\right.
where :math:`a` is the left border, :math:`c` the right border and :math:`b` the center of the triangular
function. The height of the triangle is chosen such, that all values contribute with
exactly one.
The standard triangular function (:math:`smoothness = 0.5`) is displayed in the following
figure
.. .. image:: images/triangular_01.png
"Triangular functions (1)"
where the bin width is :math:`2` with centers at :math:`-2`, :math:`0` and :math:`2`.
]
if compare[name[smoothness] greater[>] constant[0.5]] begin[:]
<ast.Raise object at 0x7da18bc73d60>
if compare[name[smoothness] less[<] constant[0.5]] begin[:]
return[call[name[trapezoid_membership], parameter[name[bin_center], name[bin_width], name[smoothness]]]]
variable[a] assign[=] binary_operation[name[bin_center] - name[bin_width]]
variable[b] assign[=] call[name[float], parameter[name[bin_center]]]
variable[c] assign[=] binary_operation[name[bin_center] + name[bin_width]]
def function[fun, parameter[x]]:
if <ast.BoolOp object at 0x7da2054a4ca0> begin[:]
return[constant[0]]
return[name[fun]]
|
keyword[def] identifier[triangular_membership] ( identifier[bin_center] , identifier[bin_width] , identifier[smoothness] = literal[int] ):
literal[string]
keyword[if] identifier[smoothness] > literal[int] : keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[if] identifier[smoothness] < literal[int] : keyword[return] identifier[trapezoid_membership] ( identifier[bin_center] , identifier[bin_width] , identifier[smoothness] )
identifier[a] = identifier[bin_center] - identifier[bin_width]
identifier[b] = identifier[float] ( identifier[bin_center] )
identifier[c] = identifier[bin_center] + identifier[bin_width]
keyword[def] identifier[fun] ( identifier[x] ):
keyword[if] identifier[x] < identifier[a] keyword[or] identifier[x] > identifier[c] : keyword[return] literal[int]
keyword[elif] identifier[x] <= identifier[b] : keyword[return] ( identifier[x] - identifier[a] )/( identifier[b] - identifier[a] )
keyword[else] : keyword[return] ( identifier[c] - identifier[x] )/( identifier[c] - identifier[b] )
keyword[return] identifier[fun]
|
def triangular_membership(bin_center, bin_width, smoothness=0.5):
"""
Create a triangular membership function for a fuzzy histogram bin.
Parameters
----------
bin_center : number
The center of the bin of which to compute the membership function.
bin_width : number
The width of a single bin (all expected to be of equal width).
smoothness : number, optional
The smoothness of the function; determines the neighbourhood affected.
See below and `fuzzy_histogram` for a more detailed explanation
Returns
-------
triangular_membership : function
A triangular membership function centered on the bin.
Notes
-----
For the triangular function the smoothness factor has to be 0.5. Lower values
are accepted, but then the function assumes the shape of the trapezium membership
function. Higher values lead to an exception.
The triangular membership function is defined as
.. math::
\\mu_{\\triangle}(x) =
\\left\\{
\\begin{array}{ll}
0, & x<a, x>c\\\\
\\frac{x-a}{b-a}, & a\\leq x\\leq b \\\\
\\frac{c-x}{c-b}, & b<x\\leq c\\\\
\\end{array}
\\right.
where :math:`a` is the left border, :math:`c` the right border and :math:`b` the center of the triangular
function. The height of the triangle is chosen such, that all values contribute with
exactly one.
The standard triangular function (:math:`smoothness = 0.5`) is displayed in the following
figure
.. .. image:: images/triangular_01.png
"Triangular functions (1)"
where the bin width is :math:`2` with centers at :math:`-2`, :math:`0` and :math:`2`.
"""
if smoothness > 0.5:
raise AttributeError('the triangular/trapezium membership functions supports only smoothnesses between 1/10 and 1/2.') # depends on [control=['if'], data=[]]
if smoothness < 0.5:
return trapezoid_membership(bin_center, bin_width, smoothness) # depends on [control=['if'], data=['smoothness']]
a = bin_center - bin_width
b = float(bin_center)
c = bin_center + bin_width
def fun(x):
if x < a or x > c:
return 0 # depends on [control=['if'], data=[]]
elif x <= b:
return (x - a) / (b - a) # depends on [control=['if'], data=['x', 'b']]
else:
return (c - x) / (c - b)
return fun
|
def post(self, request, bot_id, id, format=None):
"""
Add a new kik recipient to a handler
---
serializer: KikRecipientSerializer
responseMessages:
- code: 401
message: Not authenticated
- code: 400
message: Not valid request
"""
return super(KikRecipientList, self).post(request, bot_id, id, format)
|
def function[post, parameter[self, request, bot_id, id, format]]:
constant[
Add a new kik recipient to a handler
---
serializer: KikRecipientSerializer
responseMessages:
- code: 401
message: Not authenticated
- code: 400
message: Not valid request
]
return[call[call[name[super], parameter[name[KikRecipientList], name[self]]].post, parameter[name[request], name[bot_id], name[id], name[format]]]]
|
keyword[def] identifier[post] ( identifier[self] , identifier[request] , identifier[bot_id] , identifier[id] , identifier[format] = keyword[None] ):
literal[string]
keyword[return] identifier[super] ( identifier[KikRecipientList] , identifier[self] ). identifier[post] ( identifier[request] , identifier[bot_id] , identifier[id] , identifier[format] )
|
def post(self, request, bot_id, id, format=None):
"""
Add a new kik recipient to a handler
---
serializer: KikRecipientSerializer
responseMessages:
- code: 401
message: Not authenticated
- code: 400
message: Not valid request
"""
return super(KikRecipientList, self).post(request, bot_id, id, format)
|
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
PluginBase.get_arguments(self)
if self.args.organizationName is not None:
self.organizationName = self.args.organizationName
if self.args.repositoryName is not None:
self.repositoryName = self.args.repositoryName
self.path = "v1/plugins/private/{0}/{1}/{2}".format(self.pluginName, self.organizationName, self.repositoryName)
|
def function[get_arguments, parameter[self]]:
constant[
Extracts the specific arguments of this CLI
]
call[name[PluginBase].get_arguments, parameter[name[self]]]
if compare[name[self].args.organizationName is_not constant[None]] begin[:]
name[self].organizationName assign[=] name[self].args.organizationName
if compare[name[self].args.repositoryName is_not constant[None]] begin[:]
name[self].repositoryName assign[=] name[self].args.repositoryName
name[self].path assign[=] call[constant[v1/plugins/private/{0}/{1}/{2}].format, parameter[name[self].pluginName, name[self].organizationName, name[self].repositoryName]]
|
keyword[def] identifier[get_arguments] ( identifier[self] ):
literal[string]
identifier[PluginBase] . identifier[get_arguments] ( identifier[self] )
keyword[if] identifier[self] . identifier[args] . identifier[organizationName] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[organizationName] = identifier[self] . identifier[args] . identifier[organizationName]
keyword[if] identifier[self] . identifier[args] . identifier[repositoryName] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[repositoryName] = identifier[self] . identifier[args] . identifier[repositoryName]
identifier[self] . identifier[path] = literal[string] . identifier[format] ( identifier[self] . identifier[pluginName] , identifier[self] . identifier[organizationName] , identifier[self] . identifier[repositoryName] )
|
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
PluginBase.get_arguments(self)
if self.args.organizationName is not None:
self.organizationName = self.args.organizationName # depends on [control=['if'], data=[]]
if self.args.repositoryName is not None:
self.repositoryName = self.args.repositoryName # depends on [control=['if'], data=[]]
self.path = 'v1/plugins/private/{0}/{1}/{2}'.format(self.pluginName, self.organizationName, self.repositoryName)
|
def simplify_list(maybe_list):
"""Turn a length one list loaded by cwltool into a scalar.
Anything else is passed as-is, by reference."""
if isinstance(maybe_list, MutableSequence):
is_list = aslist(maybe_list)
if len(is_list) == 1:
return is_list[0]
return maybe_list
|
def function[simplify_list, parameter[maybe_list]]:
constant[Turn a length one list loaded by cwltool into a scalar.
Anything else is passed as-is, by reference.]
if call[name[isinstance], parameter[name[maybe_list], name[MutableSequence]]] begin[:]
variable[is_list] assign[=] call[name[aslist], parameter[name[maybe_list]]]
if compare[call[name[len], parameter[name[is_list]]] equal[==] constant[1]] begin[:]
return[call[name[is_list]][constant[0]]]
return[name[maybe_list]]
|
keyword[def] identifier[simplify_list] ( identifier[maybe_list] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[maybe_list] , identifier[MutableSequence] ):
identifier[is_list] = identifier[aslist] ( identifier[maybe_list] )
keyword[if] identifier[len] ( identifier[is_list] )== literal[int] :
keyword[return] identifier[is_list] [ literal[int] ]
keyword[return] identifier[maybe_list]
|
def simplify_list(maybe_list):
"""Turn a length one list loaded by cwltool into a scalar.
Anything else is passed as-is, by reference."""
if isinstance(maybe_list, MutableSequence):
is_list = aslist(maybe_list)
if len(is_list) == 1:
return is_list[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return maybe_list
|
def iter_prefixes(path):
"""
Iterate through all (non-empty) prefixes of a dotted path.
Example:
>>> list(iter_prefixes('foo.bar.baz'))
['foo', 'foo.bar', 'foo.bar.baz']
"""
split_path = path.split('.')
for i in range(1, len(split_path) + 1):
yield join_paths(*split_path[:i])
|
def function[iter_prefixes, parameter[path]]:
constant[
Iterate through all (non-empty) prefixes of a dotted path.
Example:
>>> list(iter_prefixes('foo.bar.baz'))
['foo', 'foo.bar', 'foo.bar.baz']
]
variable[split_path] assign[=] call[name[path].split, parameter[constant[.]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[split_path]]] + constant[1]]]]] begin[:]
<ast.Yield object at 0x7da1b18311e0>
|
keyword[def] identifier[iter_prefixes] ( identifier[path] ):
literal[string]
identifier[split_path] = identifier[path] . identifier[split] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[split_path] )+ literal[int] ):
keyword[yield] identifier[join_paths] (* identifier[split_path] [: identifier[i] ])
|
def iter_prefixes(path):
"""
Iterate through all (non-empty) prefixes of a dotted path.
Example:
>>> list(iter_prefixes('foo.bar.baz'))
['foo', 'foo.bar', 'foo.bar.baz']
"""
split_path = path.split('.')
for i in range(1, len(split_path) + 1):
yield join_paths(*split_path[:i]) # depends on [control=['for'], data=['i']]
|
def _set_redist_connected(self, v, load=False):
"""
Setter method for redist_connected, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6/redist_connected (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_redist_connected is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_redist_connected() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=redist_connected.redist_connected, is_container='container', presence=False, yang_name="redist-connected", rest_name="redist-connected", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-redistribution-redist-connected-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """redist_connected must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=redist_connected.redist_connected, is_container='container', presence=False, yang_name="redist-connected", rest_name="redist-connected", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-redistribution-redist-connected-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__redist_connected = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_redist_connected, parameter[self, v, load]]:
constant[
Setter method for redist_connected, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6/redist_connected (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_redist_connected is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_redist_connected() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18dc052a0>
name[self].__redist_connected assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_redist_connected] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[redist_connected] . identifier[redist_connected] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__redist_connected] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_redist_connected(self, v, load=False):
"""
Setter method for redist_connected, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6/redist_connected (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_redist_connected is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_redist_connected() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=redist_connected.redist_connected, is_container='container', presence=False, yang_name='redist-connected', rest_name='redist-connected', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-redistribution-redist-connected-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'redist_connected must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=redist_connected.redist_connected, is_container=\'container\', presence=False, yang_name="redist-connected", rest_name="redist-connected", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'isis-redistribution-redist-connected-1\'}}, namespace=\'urn:brocade.com:mgmt:brocade-isis-operational\', defining_module=\'brocade-isis-operational\', yang_type=\'container\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__redist_connected = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def img2img_transformer2d_base():
"""Base params for img2img 2d attention."""
hparams = image_transformer2d_base()
# learning related flags
hparams.layer_preprocess_sequence = "n"
hparams.layer_postprocess_sequence = "da"
# This version seems to benefit from a higher learning rate.
hparams.learning_rate = 0.2
hparams.layer_prepostprocess_dropout = 0.1
hparams.learning_rate_warmup_steps = 12000
hparams.filter_size = 2048
hparams.num_encoder_layers = 4
hparams.num_decoder_layers = 8
hparams.bottom["inputs"] = modalities.image_channel_embeddings_bottom
hparams.dec_attention_type = cia.AttentionType.LOCAL_2D
hparams.block_raster_scan = True
return hparams
|
def function[img2img_transformer2d_base, parameter[]]:
constant[Base params for img2img 2d attention.]
variable[hparams] assign[=] call[name[image_transformer2d_base], parameter[]]
name[hparams].layer_preprocess_sequence assign[=] constant[n]
name[hparams].layer_postprocess_sequence assign[=] constant[da]
name[hparams].learning_rate assign[=] constant[0.2]
name[hparams].layer_prepostprocess_dropout assign[=] constant[0.1]
name[hparams].learning_rate_warmup_steps assign[=] constant[12000]
name[hparams].filter_size assign[=] constant[2048]
name[hparams].num_encoder_layers assign[=] constant[4]
name[hparams].num_decoder_layers assign[=] constant[8]
call[name[hparams].bottom][constant[inputs]] assign[=] name[modalities].image_channel_embeddings_bottom
name[hparams].dec_attention_type assign[=] name[cia].AttentionType.LOCAL_2D
name[hparams].block_raster_scan assign[=] constant[True]
return[name[hparams]]
|
keyword[def] identifier[img2img_transformer2d_base] ():
literal[string]
identifier[hparams] = identifier[image_transformer2d_base] ()
identifier[hparams] . identifier[layer_preprocess_sequence] = literal[string]
identifier[hparams] . identifier[layer_postprocess_sequence] = literal[string]
identifier[hparams] . identifier[learning_rate] = literal[int]
identifier[hparams] . identifier[layer_prepostprocess_dropout] = literal[int]
identifier[hparams] . identifier[learning_rate_warmup_steps] = literal[int]
identifier[hparams] . identifier[filter_size] = literal[int]
identifier[hparams] . identifier[num_encoder_layers] = literal[int]
identifier[hparams] . identifier[num_decoder_layers] = literal[int]
identifier[hparams] . identifier[bottom] [ literal[string] ]= identifier[modalities] . identifier[image_channel_embeddings_bottom]
identifier[hparams] . identifier[dec_attention_type] = identifier[cia] . identifier[AttentionType] . identifier[LOCAL_2D]
identifier[hparams] . identifier[block_raster_scan] = keyword[True]
keyword[return] identifier[hparams]
|
def img2img_transformer2d_base():
"""Base params for img2img 2d attention."""
hparams = image_transformer2d_base()
# learning related flags
hparams.layer_preprocess_sequence = 'n'
hparams.layer_postprocess_sequence = 'da'
# This version seems to benefit from a higher learning rate.
hparams.learning_rate = 0.2
hparams.layer_prepostprocess_dropout = 0.1
hparams.learning_rate_warmup_steps = 12000
hparams.filter_size = 2048
hparams.num_encoder_layers = 4
hparams.num_decoder_layers = 8
hparams.bottom['inputs'] = modalities.image_channel_embeddings_bottom
hparams.dec_attention_type = cia.AttentionType.LOCAL_2D
hparams.block_raster_scan = True
return hparams
|
def p_importPart(self, p):
"""importPart : imports
| empty"""
# libsmi: TODO: ``IMPORTS ;'' allowed? refer ASN.1!
if p[1]:
importDict = {}
for imp in p[1]: # don't do just dict() because moduleNames may be repeated
fromModule, symbols = imp
if fromModule in importDict:
importDict[fromModule] += symbols
else:
importDict[fromModule] = symbols
p[0] = importDict
|
def function[p_importPart, parameter[self, p]]:
constant[importPart : imports
| empty]
if call[name[p]][constant[1]] begin[:]
variable[importDict] assign[=] dictionary[[], []]
for taget[name[imp]] in starred[call[name[p]][constant[1]]] begin[:]
<ast.Tuple object at 0x7da1b016f790> assign[=] name[imp]
if compare[name[fromModule] in name[importDict]] begin[:]
<ast.AugAssign object at 0x7da1b016d180>
call[name[p]][constant[0]] assign[=] name[importDict]
|
keyword[def] identifier[p_importPart] ( identifier[self] , identifier[p] ):
literal[string]
keyword[if] identifier[p] [ literal[int] ]:
identifier[importDict] ={}
keyword[for] identifier[imp] keyword[in] identifier[p] [ literal[int] ]:
identifier[fromModule] , identifier[symbols] = identifier[imp]
keyword[if] identifier[fromModule] keyword[in] identifier[importDict] :
identifier[importDict] [ identifier[fromModule] ]+= identifier[symbols]
keyword[else] :
identifier[importDict] [ identifier[fromModule] ]= identifier[symbols]
identifier[p] [ literal[int] ]= identifier[importDict]
|
def p_importPart(self, p):
"""importPart : imports
| empty"""
# libsmi: TODO: ``IMPORTS ;'' allowed? refer ASN.1!
if p[1]:
importDict = {}
for imp in p[1]: # don't do just dict() because moduleNames may be repeated
(fromModule, symbols) = imp
if fromModule in importDict:
importDict[fromModule] += symbols # depends on [control=['if'], data=['fromModule', 'importDict']]
else:
importDict[fromModule] = symbols # depends on [control=['for'], data=['imp']]
p[0] = importDict # depends on [control=['if'], data=[]]
|
def _decompose_tree(ttree, orient='right', use_edge_lengths=True):
""" decomposes tree into component parts for plotting """
## set attributes
ttree._orient = orient
ttree._use_edge_lengths = use_edge_lengths
ult = use_edge_lengths == False
## map numeric values to internal nodes from root to tips
names = {}
idx = 0
for node in ttree.tree.traverse("preorder"):
if not node.is_leaf():
if node.name:
names[idx] = node.name
else:
names[idx] = idx
node.name = str(idx)
node.idx = idx
idx += 1
## map number to the tips, these will be the highest numbers
for node in ttree.tree.get_leaves():
names[idx] = node.name
node.idx = idx
idx += 1
## create empty edges and coords arrays
ttree.node_labels = names
ttree.tip_labels = ttree.tree.get_leaf_names()
#self.tip_labels = self.tree.get_leaf_names()[::-1]
#self.node_labels = self.names
ttree.edges = np.zeros((idx - 1, 2), dtype=int)
ttree.verts = np.zeros((idx, 2), dtype=float)
ttree._lines = [] # np.zeros((ntips-1), dtype=int)
ttree._coords = [] # np.zeros((idx * 2 - ntips), dtype=float)
## postorder: first children and then parents. This moves up the list .
nidx = 0
tip_num = len(ttree.tree.get_leaves()) - 1
## tips to root to fill in the verts and edges
for node in ttree.tree.traverse("postorder"):
if node.is_leaf():
## set the xy-axis positions of the tips
node.y = ttree.tree.get_distance(node)
if ult:
node.y = 0.
node.x = tip_num
tip_num -= 1
## edges connect this vert to
ttree.verts[node.idx] = [node.x, node.y]
ttree.edges[nidx] = [node.up.idx, node.idx]
elif node.is_root():
node.y = ttree.tree.get_distance(node)
if ult:
node.y = -1 * node.get_farthest_leaf(True)[1] - 1
node.x = sum(i.x for i in node.children) / float(len(node.children))
ttree.verts[node.idx] = [node.x, node.y]
else:
## create new nodes left and right
node.y = ttree.tree.get_distance(node)
if ult:
node.y = -1 * node.get_farthest_leaf(True)[1] - 1
node.x = sum(i.x for i in node.children) / float(len(node.children))
ttree.edges[nidx, :] = [node.up.idx, node.idx]
ttree.verts[node.idx] = [node.x, node.y]
nidx += 1
## root to tips to fill in the coords and lines
cidx = 0
for node in ttree.tree.traverse():
## add yourself
if not node.is_leaf():
ttree._coords += [[node.x, node.y]]
pidx = cidx
cidx += 1
for child in node.children:
## add children
ttree._coords += [[child.x, node.y], [child.x, child.y]]
ttree._lines += [[pidx, cidx]] ## connect yourself to newx
ttree._lines += [[cidx, cidx+1]] ## connect newx to child
cidx += 2
ttree._coords = np.array(ttree._coords, dtype=float)
ttree._lines = np.array(ttree._lines, dtype=int)
## invert for sideways trees
if ttree._orient in ['up', 0]:
pass
if ttree._orient in ['left', 1]:
ttree.verts[:, 1] = ttree.verts[:, 1] * -1
ttree.verts = ttree.verts[:, [1, 0]]
ttree._coords[:, 1] = ttree._coords[:, 1] * -1
ttree._coords = ttree._coords[:, [1, 0]]
if ttree._orient in ['down', 0]:
ttree.verts[:, 1] = ttree.verts[:, 1] * -1
ttree._coords[:, 1] = ttree._coords[:, 1] * -1
if ttree._orient in ['right', 3]:
ttree.verts = ttree.verts[:, [1, 0]]
ttree._coords = ttree._coords[:, [1, 0]]
|
def function[_decompose_tree, parameter[ttree, orient, use_edge_lengths]]:
constant[ decomposes tree into component parts for plotting ]
name[ttree]._orient assign[=] name[orient]
name[ttree]._use_edge_lengths assign[=] name[use_edge_lengths]
variable[ult] assign[=] compare[name[use_edge_lengths] equal[==] constant[False]]
variable[names] assign[=] dictionary[[], []]
variable[idx] assign[=] constant[0]
for taget[name[node]] in starred[call[name[ttree].tree.traverse, parameter[constant[preorder]]]] begin[:]
if <ast.UnaryOp object at 0x7da20cabdb40> begin[:]
if name[node].name begin[:]
call[name[names]][name[idx]] assign[=] name[node].name
name[node].idx assign[=] name[idx]
<ast.AugAssign object at 0x7da18fe924a0>
for taget[name[node]] in starred[call[name[ttree].tree.get_leaves, parameter[]]] begin[:]
call[name[names]][name[idx]] assign[=] name[node].name
name[node].idx assign[=] name[idx]
<ast.AugAssign object at 0x7da18fe90670>
name[ttree].node_labels assign[=] name[names]
name[ttree].tip_labels assign[=] call[name[ttree].tree.get_leaf_names, parameter[]]
name[ttree].edges assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da18fe910c0>, <ast.Constant object at 0x7da18fe91840>]]]]
name[ttree].verts assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da18fe937f0>, <ast.Constant object at 0x7da18fe92e90>]]]]
name[ttree]._lines assign[=] list[[]]
name[ttree]._coords assign[=] list[[]]
variable[nidx] assign[=] constant[0]
variable[tip_num] assign[=] binary_operation[call[name[len], parameter[call[name[ttree].tree.get_leaves, parameter[]]]] - constant[1]]
for taget[name[node]] in starred[call[name[ttree].tree.traverse, parameter[constant[postorder]]]] begin[:]
if call[name[node].is_leaf, parameter[]] begin[:]
name[node].y assign[=] call[name[ttree].tree.get_distance, parameter[name[node]]]
if name[ult] begin[:]
name[node].y assign[=] constant[0.0]
name[node].x assign[=] name[tip_num]
<ast.AugAssign object at 0x7da18c4cda50>
call[name[ttree].verts][name[node].idx] assign[=] list[[<ast.Attribute object at 0x7da18c4ccf10>, <ast.Attribute object at 0x7da18c4cefb0>]]
call[name[ttree].edges][name[nidx]] assign[=] list[[<ast.Attribute object at 0x7da18c4cec80>, <ast.Attribute object at 0x7da18c4cc8e0>]]
<ast.AugAssign object at 0x7da18c4cd720>
variable[cidx] assign[=] constant[0]
for taget[name[node]] in starred[call[name[ttree].tree.traverse, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da18c4cc3d0> begin[:]
<ast.AugAssign object at 0x7da18c4ce440>
variable[pidx] assign[=] name[cidx]
<ast.AugAssign object at 0x7da18c4cebc0>
for taget[name[child]] in starred[name[node].children] begin[:]
<ast.AugAssign object at 0x7da18c4cc760>
<ast.AugAssign object at 0x7da18c4cddb0>
<ast.AugAssign object at 0x7da18c4ceaa0>
<ast.AugAssign object at 0x7da18c4cc220>
name[ttree]._coords assign[=] call[name[np].array, parameter[name[ttree]._coords]]
name[ttree]._lines assign[=] call[name[np].array, parameter[name[ttree]._lines]]
if compare[name[ttree]._orient in list[[<ast.Constant object at 0x7da18c4ce7d0>, <ast.Constant object at 0x7da18c4ce740>]]] begin[:]
pass
if compare[name[ttree]._orient in list[[<ast.Constant object at 0x7da18c4cfa00>, <ast.Constant object at 0x7da18c4cc820>]]] begin[:]
call[name[ttree].verts][tuple[[<ast.Slice object at 0x7da18c4cf370>, <ast.Constant object at 0x7da18c4cfe80>]]] assign[=] binary_operation[call[name[ttree].verts][tuple[[<ast.Slice object at 0x7da18c4cf100>, <ast.Constant object at 0x7da18c4cffd0>]]] * <ast.UnaryOp object at 0x7da18c4cf5e0>]
name[ttree].verts assign[=] call[name[ttree].verts][tuple[[<ast.Slice object at 0x7da18c4cea10>, <ast.List object at 0x7da18c4cfbe0>]]]
call[name[ttree]._coords][tuple[[<ast.Slice object at 0x7da18c4cd6c0>, <ast.Constant object at 0x7da18c4cd870>]]] assign[=] binary_operation[call[name[ttree]._coords][tuple[[<ast.Slice object at 0x7da18c4ccc70>, <ast.Constant object at 0x7da18c4ce410>]]] * <ast.UnaryOp object at 0x7da18c4ceb90>]
name[ttree]._coords assign[=] call[name[ttree]._coords][tuple[[<ast.Slice object at 0x7da18c4cdde0>, <ast.List object at 0x7da18c4cecb0>]]]
if compare[name[ttree]._orient in list[[<ast.Constant object at 0x7da18c4cd3f0>, <ast.Constant object at 0x7da18c4cd2a0>]]] begin[:]
call[name[ttree].verts][tuple[[<ast.Slice object at 0x7da20c9938b0>, <ast.Constant object at 0x7da20c992440>]]] assign[=] binary_operation[call[name[ttree].verts][tuple[[<ast.Slice object at 0x7da20c9926b0>, <ast.Constant object at 0x7da20c993880>]]] * <ast.UnaryOp object at 0x7da20c992770>]
call[name[ttree]._coords][tuple[[<ast.Slice object at 0x7da20c991150>, <ast.Constant object at 0x7da20c991570>]]] assign[=] binary_operation[call[name[ttree]._coords][tuple[[<ast.Slice object at 0x7da20c9901f0>, <ast.Constant object at 0x7da20c993250>]]] * <ast.UnaryOp object at 0x7da20c9907c0>]
if compare[name[ttree]._orient in list[[<ast.Constant object at 0x7da20c9923b0>, <ast.Constant object at 0x7da20c992590>]]] begin[:]
name[ttree].verts assign[=] call[name[ttree].verts][tuple[[<ast.Slice object at 0x7da20c9936d0>, <ast.List object at 0x7da20c992b60>]]]
name[ttree]._coords assign[=] call[name[ttree]._coords][tuple[[<ast.Slice object at 0x7da20c991480>, <ast.List object at 0x7da20c991390>]]]
|
keyword[def] identifier[_decompose_tree] ( identifier[ttree] , identifier[orient] = literal[string] , identifier[use_edge_lengths] = keyword[True] ):
literal[string]
identifier[ttree] . identifier[_orient] = identifier[orient]
identifier[ttree] . identifier[_use_edge_lengths] = identifier[use_edge_lengths]
identifier[ult] = identifier[use_edge_lengths] == keyword[False]
identifier[names] ={}
identifier[idx] = literal[int]
keyword[for] identifier[node] keyword[in] identifier[ttree] . identifier[tree] . identifier[traverse] ( literal[string] ):
keyword[if] keyword[not] identifier[node] . identifier[is_leaf] ():
keyword[if] identifier[node] . identifier[name] :
identifier[names] [ identifier[idx] ]= identifier[node] . identifier[name]
keyword[else] :
identifier[names] [ identifier[idx] ]= identifier[idx]
identifier[node] . identifier[name] = identifier[str] ( identifier[idx] )
identifier[node] . identifier[idx] = identifier[idx]
identifier[idx] += literal[int]
keyword[for] identifier[node] keyword[in] identifier[ttree] . identifier[tree] . identifier[get_leaves] ():
identifier[names] [ identifier[idx] ]= identifier[node] . identifier[name]
identifier[node] . identifier[idx] = identifier[idx]
identifier[idx] += literal[int]
identifier[ttree] . identifier[node_labels] = identifier[names]
identifier[ttree] . identifier[tip_labels] = identifier[ttree] . identifier[tree] . identifier[get_leaf_names] ()
identifier[ttree] . identifier[edges] = identifier[np] . identifier[zeros] (( identifier[idx] - literal[int] , literal[int] ), identifier[dtype] = identifier[int] )
identifier[ttree] . identifier[verts] = identifier[np] . identifier[zeros] (( identifier[idx] , literal[int] ), identifier[dtype] = identifier[float] )
identifier[ttree] . identifier[_lines] =[]
identifier[ttree] . identifier[_coords] =[]
identifier[nidx] = literal[int]
identifier[tip_num] = identifier[len] ( identifier[ttree] . identifier[tree] . identifier[get_leaves] ())- literal[int]
keyword[for] identifier[node] keyword[in] identifier[ttree] . identifier[tree] . identifier[traverse] ( literal[string] ):
keyword[if] identifier[node] . identifier[is_leaf] ():
identifier[node] . identifier[y] = identifier[ttree] . identifier[tree] . identifier[get_distance] ( identifier[node] )
keyword[if] identifier[ult] :
identifier[node] . identifier[y] = literal[int]
identifier[node] . identifier[x] = identifier[tip_num]
identifier[tip_num] -= literal[int]
identifier[ttree] . identifier[verts] [ identifier[node] . identifier[idx] ]=[ identifier[node] . identifier[x] , identifier[node] . identifier[y] ]
identifier[ttree] . identifier[edges] [ identifier[nidx] ]=[ identifier[node] . identifier[up] . identifier[idx] , identifier[node] . identifier[idx] ]
keyword[elif] identifier[node] . identifier[is_root] ():
identifier[node] . identifier[y] = identifier[ttree] . identifier[tree] . identifier[get_distance] ( identifier[node] )
keyword[if] identifier[ult] :
identifier[node] . identifier[y] =- literal[int] * identifier[node] . identifier[get_farthest_leaf] ( keyword[True] )[ literal[int] ]- literal[int]
identifier[node] . identifier[x] = identifier[sum] ( identifier[i] . identifier[x] keyword[for] identifier[i] keyword[in] identifier[node] . identifier[children] )/ identifier[float] ( identifier[len] ( identifier[node] . identifier[children] ))
identifier[ttree] . identifier[verts] [ identifier[node] . identifier[idx] ]=[ identifier[node] . identifier[x] , identifier[node] . identifier[y] ]
keyword[else] :
identifier[node] . identifier[y] = identifier[ttree] . identifier[tree] . identifier[get_distance] ( identifier[node] )
keyword[if] identifier[ult] :
identifier[node] . identifier[y] =- literal[int] * identifier[node] . identifier[get_farthest_leaf] ( keyword[True] )[ literal[int] ]- literal[int]
identifier[node] . identifier[x] = identifier[sum] ( identifier[i] . identifier[x] keyword[for] identifier[i] keyword[in] identifier[node] . identifier[children] )/ identifier[float] ( identifier[len] ( identifier[node] . identifier[children] ))
identifier[ttree] . identifier[edges] [ identifier[nidx] ,:]=[ identifier[node] . identifier[up] . identifier[idx] , identifier[node] . identifier[idx] ]
identifier[ttree] . identifier[verts] [ identifier[node] . identifier[idx] ]=[ identifier[node] . identifier[x] , identifier[node] . identifier[y] ]
identifier[nidx] += literal[int]
identifier[cidx] = literal[int]
keyword[for] identifier[node] keyword[in] identifier[ttree] . identifier[tree] . identifier[traverse] ():
keyword[if] keyword[not] identifier[node] . identifier[is_leaf] ():
identifier[ttree] . identifier[_coords] +=[[ identifier[node] . identifier[x] , identifier[node] . identifier[y] ]]
identifier[pidx] = identifier[cidx]
identifier[cidx] += literal[int]
keyword[for] identifier[child] keyword[in] identifier[node] . identifier[children] :
identifier[ttree] . identifier[_coords] +=[[ identifier[child] . identifier[x] , identifier[node] . identifier[y] ],[ identifier[child] . identifier[x] , identifier[child] . identifier[y] ]]
identifier[ttree] . identifier[_lines] +=[[ identifier[pidx] , identifier[cidx] ]]
identifier[ttree] . identifier[_lines] +=[[ identifier[cidx] , identifier[cidx] + literal[int] ]]
identifier[cidx] += literal[int]
identifier[ttree] . identifier[_coords] = identifier[np] . identifier[array] ( identifier[ttree] . identifier[_coords] , identifier[dtype] = identifier[float] )
identifier[ttree] . identifier[_lines] = identifier[np] . identifier[array] ( identifier[ttree] . identifier[_lines] , identifier[dtype] = identifier[int] )
keyword[if] identifier[ttree] . identifier[_orient] keyword[in] [ literal[string] , literal[int] ]:
keyword[pass]
keyword[if] identifier[ttree] . identifier[_orient] keyword[in] [ literal[string] , literal[int] ]:
identifier[ttree] . identifier[verts] [:, literal[int] ]= identifier[ttree] . identifier[verts] [:, literal[int] ]*- literal[int]
identifier[ttree] . identifier[verts] = identifier[ttree] . identifier[verts] [:,[ literal[int] , literal[int] ]]
identifier[ttree] . identifier[_coords] [:, literal[int] ]= identifier[ttree] . identifier[_coords] [:, literal[int] ]*- literal[int]
identifier[ttree] . identifier[_coords] = identifier[ttree] . identifier[_coords] [:,[ literal[int] , literal[int] ]]
keyword[if] identifier[ttree] . identifier[_orient] keyword[in] [ literal[string] , literal[int] ]:
identifier[ttree] . identifier[verts] [:, literal[int] ]= identifier[ttree] . identifier[verts] [:, literal[int] ]*- literal[int]
identifier[ttree] . identifier[_coords] [:, literal[int] ]= identifier[ttree] . identifier[_coords] [:, literal[int] ]*- literal[int]
keyword[if] identifier[ttree] . identifier[_orient] keyword[in] [ literal[string] , literal[int] ]:
identifier[ttree] . identifier[verts] = identifier[ttree] . identifier[verts] [:,[ literal[int] , literal[int] ]]
identifier[ttree] . identifier[_coords] = identifier[ttree] . identifier[_coords] [:,[ literal[int] , literal[int] ]]
|
def _decompose_tree(ttree, orient='right', use_edge_lengths=True):
""" decomposes tree into component parts for plotting """
## set attributes
ttree._orient = orient
ttree._use_edge_lengths = use_edge_lengths
ult = use_edge_lengths == False
## map numeric values to internal nodes from root to tips
names = {}
idx = 0
for node in ttree.tree.traverse('preorder'):
if not node.is_leaf():
if node.name:
names[idx] = node.name # depends on [control=['if'], data=[]]
else:
names[idx] = idx
node.name = str(idx)
node.idx = idx
idx += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
## map number to the tips, these will be the highest numbers
for node in ttree.tree.get_leaves():
names[idx] = node.name
node.idx = idx
idx += 1 # depends on [control=['for'], data=['node']]
## create empty edges and coords arrays
ttree.node_labels = names
ttree.tip_labels = ttree.tree.get_leaf_names()
#self.tip_labels = self.tree.get_leaf_names()[::-1]
#self.node_labels = self.names
ttree.edges = np.zeros((idx - 1, 2), dtype=int)
ttree.verts = np.zeros((idx, 2), dtype=float)
ttree._lines = [] # np.zeros((ntips-1), dtype=int)
ttree._coords = [] # np.zeros((idx * 2 - ntips), dtype=float)
## postorder: first children and then parents. This moves up the list .
nidx = 0
tip_num = len(ttree.tree.get_leaves()) - 1
## tips to root to fill in the verts and edges
for node in ttree.tree.traverse('postorder'):
if node.is_leaf():
## set the xy-axis positions of the tips
node.y = ttree.tree.get_distance(node)
if ult:
node.y = 0.0 # depends on [control=['if'], data=[]]
node.x = tip_num
tip_num -= 1
## edges connect this vert to
ttree.verts[node.idx] = [node.x, node.y]
ttree.edges[nidx] = [node.up.idx, node.idx] # depends on [control=['if'], data=[]]
elif node.is_root():
node.y = ttree.tree.get_distance(node)
if ult:
node.y = -1 * node.get_farthest_leaf(True)[1] - 1 # depends on [control=['if'], data=[]]
node.x = sum((i.x for i in node.children)) / float(len(node.children))
ttree.verts[node.idx] = [node.x, node.y] # depends on [control=['if'], data=[]]
else:
## create new nodes left and right
node.y = ttree.tree.get_distance(node)
if ult:
node.y = -1 * node.get_farthest_leaf(True)[1] - 1 # depends on [control=['if'], data=[]]
node.x = sum((i.x for i in node.children)) / float(len(node.children))
ttree.edges[nidx, :] = [node.up.idx, node.idx]
ttree.verts[node.idx] = [node.x, node.y]
nidx += 1 # depends on [control=['for'], data=['node']]
## root to tips to fill in the coords and lines
cidx = 0
for node in ttree.tree.traverse():
## add yourself
if not node.is_leaf():
ttree._coords += [[node.x, node.y]]
pidx = cidx
cidx += 1
for child in node.children:
## add children
ttree._coords += [[child.x, node.y], [child.x, child.y]]
ttree._lines += [[pidx, cidx]] ## connect yourself to newx
ttree._lines += [[cidx, cidx + 1]] ## connect newx to child
cidx += 2 # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
ttree._coords = np.array(ttree._coords, dtype=float)
ttree._lines = np.array(ttree._lines, dtype=int)
## invert for sideways trees
if ttree._orient in ['up', 0]:
pass # depends on [control=['if'], data=[]]
if ttree._orient in ['left', 1]:
ttree.verts[:, 1] = ttree.verts[:, 1] * -1
ttree.verts = ttree.verts[:, [1, 0]]
ttree._coords[:, 1] = ttree._coords[:, 1] * -1
ttree._coords = ttree._coords[:, [1, 0]] # depends on [control=['if'], data=[]]
if ttree._orient in ['down', 0]:
ttree.verts[:, 1] = ttree.verts[:, 1] * -1
ttree._coords[:, 1] = ttree._coords[:, 1] * -1 # depends on [control=['if'], data=[]]
if ttree._orient in ['right', 3]:
ttree.verts = ttree.verts[:, [1, 0]]
ttree._coords = ttree._coords[:, [1, 0]] # depends on [control=['if'], data=[]]
|
def handle_func(self, multiprocessing_enabled, func, data):
'''
Execute this method in a multiprocess or thread
'''
if salt.utils.platform.is_windows() \
or self.opts.get('transport') == 'zeromq':
# Since function references can't be pickled and pickling
# is required when spawning new processes on Windows, regenerate
# the functions and returners.
# This also needed for ZeroMQ transport to reset all functions
# context data that could keep paretns connections. ZeroMQ will
# hang on polling parents connections from the child process.
if self.opts['__role'] == 'master':
self.functions = salt.loader.runner(self.opts, utils=self.utils)
else:
self.functions = salt.loader.minion_mods(self.opts, proxy=self.proxy, utils=self.utils)
self.returners = salt.loader.returners(self.opts, self.functions, proxy=self.proxy)
ret = {'id': self.opts.get('id', 'master'),
'fun': func,
'fun_args': [],
'schedule': data['name'],
'jid': salt.utils.jid.gen_jid(self.opts)}
if 'metadata' in data:
if isinstance(data['metadata'], dict):
ret['metadata'] = data['metadata']
ret['metadata']['_TOS'] = self.time_offset
ret['metadata']['_TS'] = time.ctime()
ret['metadata']['_TT'] = time.strftime('%Y %B %d %a %H %m', time.gmtime())
else:
log.warning('schedule: The metadata parameter must be '
'specified as a dictionary. Ignoring.')
if multiprocessing_enabled:
# We just want to modify the process name if we're on a different process
salt.utils.process.appendproctitle('{0} {1}'.format(self.__class__.__name__, ret['jid']))
data_returner = data.get('returner', None)
if not self.standalone:
proc_fn = os.path.join(
salt.minion.get_proc_dir(self.opts['cachedir']),
ret['jid']
)
if multiprocessing_enabled and not salt.utils.platform.is_windows():
# Reconfigure multiprocessing logging after daemonizing
log_setup.setup_multiprocessing_logging()
if multiprocessing_enabled:
# Don't *BEFORE* to go into try to don't let it triple execute the finally section.
salt.utils.process.daemonize_if(self.opts)
# TODO: Make it readable! Splt to funcs, remove nested try-except-finally sections.
try:
minion_blackout_violation = False
if self.opts.get('pillar', {}).get('minion_blackout', False):
whitelist = self.opts.get('pillar', {}).get('minion_blackout_whitelist', [])
# this minion is blacked out. Only allow saltutil.refresh_pillar and the whitelist
if func != 'saltutil.refresh_pillar' and func not in whitelist:
minion_blackout_violation = True
elif self.opts.get('grains', {}).get('minion_blackout', False):
whitelist = self.opts.get('grains', {}).get('minion_blackout_whitelist', [])
if func != 'saltutil.refresh_pillar' and func not in whitelist:
minion_blackout_violation = True
if minion_blackout_violation:
raise SaltInvocationError('Minion in blackout mode. Set \'minion_blackout\' '
'to False in pillar or grains to resume operations. Only '
'saltutil.refresh_pillar allowed in blackout mode.')
ret['pid'] = os.getpid()
if not self.standalone:
if 'jid_include' not in data or data['jid_include']:
log.debug(
'schedule.handle_func: adding this job to the '
'jobcache with data %s', ret
)
# write this to /var/cache/salt/minion/proc
with salt.utils.files.fopen(proc_fn, 'w+b') as fp_:
fp_.write(salt.payload.Serial(self.opts).dumps(ret))
args = tuple()
if 'args' in data:
args = data['args']
ret['fun_args'].extend(data['args'])
kwargs = {}
if 'kwargs' in data:
kwargs = data['kwargs']
ret['fun_args'].append(copy.deepcopy(kwargs))
if func not in self.functions:
ret['return'] = self.functions.missing_fun_string(func)
salt.utils.error.raise_error(
message=self.functions.missing_fun_string(func))
# if the func support **kwargs, lets pack in the pub data we have
# TODO: pack the *same* pub data as a minion?
argspec = salt.utils.args.get_function_argspec(self.functions[func])
if argspec.keywords:
# this function accepts **kwargs, pack in the publish data
for key, val in six.iteritems(ret):
if key is not 'kwargs':
kwargs['__pub_{0}'.format(key)] = copy.deepcopy(val)
# Only include these when running runner modules
if self.opts['__role'] == 'master':
jid = salt.utils.jid.gen_jid(self.opts)
tag = salt.utils.event.tagify(jid, prefix='salt/scheduler/')
event = salt.utils.event.get_event(
self.opts['__role'],
self.opts['sock_dir'],
self.opts['transport'],
opts=self.opts,
listen=False)
namespaced_event = salt.utils.event.NamespacedEvent(
event,
tag,
print_func=None
)
func_globals = {
'__jid__': jid,
'__user__': salt.utils.user.get_user(),
'__tag__': tag,
'__jid_event__': weakref.proxy(namespaced_event),
}
self_functions = copy.copy(self.functions)
salt.utils.lazy.verify_fun(self_functions, func)
# Inject some useful globals to *all* the function's global
# namespace only once per module-- not per func
completed_funcs = []
for mod_name in six.iterkeys(self_functions):
if '.' not in mod_name:
continue
mod, _ = mod_name.split('.', 1)
if mod in completed_funcs:
continue
completed_funcs.append(mod)
for global_key, value in six.iteritems(func_globals):
self.functions[mod_name].__globals__[global_key] = value
self.functions.pack['__context__']['retcode'] = 0
ret['return'] = self.functions[func](*args, **kwargs)
if not self.standalone:
# runners do not provide retcode
if 'retcode' in self.functions.pack['__context__']:
ret['retcode'] = self.functions.pack['__context__']['retcode']
ret['success'] = True
if data_returner or self.schedule_returner:
if 'return_config' in data:
ret['ret_config'] = data['return_config']
if 'return_kwargs' in data:
ret['ret_kwargs'] = data['return_kwargs']
rets = []
for returner in [data_returner, self.schedule_returner]:
if isinstance(returner, six.string_types):
rets.append(returner)
elif isinstance(returner, list):
rets.extend(returner)
# simple de-duplication with order retained
for returner in OrderedDict.fromkeys(rets):
ret_str = '{0}.returner'.format(returner)
if ret_str in self.returners:
self.returners[ret_str](ret)
else:
log.info(
'Job %s using invalid returner: %s. Ignoring.',
func, returner
)
except Exception:
log.exception('Unhandled exception running %s', ret['fun'])
# Although catch-all exception handlers are bad, the exception here
# is to let the exception bubble up to the top of the thread context,
# where the thread will die silently, which is worse.
if 'return' not in ret:
ret['return'] = "Unhandled exception running {0}".format(ret['fun'])
ret['success'] = False
ret['retcode'] = 254
finally:
# Only attempt to return data to the master if the scheduled job is running
# on a master itself or a minion.
if '__role' in self.opts and self.opts['__role'] in ('master', 'minion'):
# The 'return_job' option is enabled by default even if not set
if 'return_job' in data and not data['return_job']:
pass
else:
# Send back to master so the job is included in the job list
mret = ret.copy()
# No returners defined, so we're only sending back to the master
if not data_returner and not self.schedule_returner:
mret['jid'] = 'req'
if data.get('return_job') == 'nocache':
# overwrite 'req' to signal to master that
# this job shouldn't be stored
mret['jid'] = 'nocache'
load = {'cmd': '_return', 'id': self.opts['id']}
for key, value in six.iteritems(mret):
load[key] = value
if '__role' in self.opts and self.opts['__role'] == 'minion':
event = salt.utils.event.get_event('minion',
opts=self.opts,
listen=False)
elif '__role' in self.opts and self.opts['__role'] == 'master':
event = salt.utils.event.get_master_event(self.opts,
self.opts['sock_dir'])
try:
event.fire_event(load, '__schedule_return')
except Exception as exc:
log.exception('Unhandled exception firing __schedule_return event')
if not self.standalone:
log.debug('schedule.handle_func: Removing %s', proc_fn)
try:
os.unlink(proc_fn)
except OSError as exc:
if exc.errno == errno.EEXIST or exc.errno == errno.ENOENT:
# EEXIST and ENOENT are OK because the file is gone and that's what
# we wanted
pass
else:
log.error("Failed to delete '%s': %s", proc_fn, exc.errno)
# Otherwise, failing to delete this file is not something
# we can cleanly handle.
raise
finally:
if multiprocessing_enabled:
# Let's make sure we exit the process!
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
def function[handle_func, parameter[self, multiprocessing_enabled, func, data]]:
constant[
Execute this method in a multiprocess or thread
]
if <ast.BoolOp object at 0x7da1b1f8f1f0> begin[:]
if compare[call[name[self].opts][constant[__role]] equal[==] constant[master]] begin[:]
name[self].functions assign[=] call[name[salt].loader.runner, parameter[name[self].opts]]
name[self].returners assign[=] call[name[salt].loader.returners, parameter[name[self].opts, name[self].functions]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f8fc40>, <ast.Constant object at 0x7da1b1f8fdc0>, <ast.Constant object at 0x7da1b1f8fcd0>, <ast.Constant object at 0x7da1b1f8fb20>, <ast.Constant object at 0x7da1b1f8fb80>], [<ast.Call object at 0x7da1b1f8f0a0>, <ast.Name object at 0x7da1b1f8e620>, <ast.List object at 0x7da1b1f8f040>, <ast.Subscript object at 0x7da1b1f8e6b0>, <ast.Call object at 0x7da1b1f8e590>]]
if compare[constant[metadata] in name[data]] begin[:]
if call[name[isinstance], parameter[call[name[data]][constant[metadata]], name[dict]]] begin[:]
call[name[ret]][constant[metadata]] assign[=] call[name[data]][constant[metadata]]
call[call[name[ret]][constant[metadata]]][constant[_TOS]] assign[=] name[self].time_offset
call[call[name[ret]][constant[metadata]]][constant[_TS]] assign[=] call[name[time].ctime, parameter[]]
call[call[name[ret]][constant[metadata]]][constant[_TT]] assign[=] call[name[time].strftime, parameter[constant[%Y %B %d %a %H %m], call[name[time].gmtime, parameter[]]]]
if name[multiprocessing_enabled] begin[:]
call[name[salt].utils.process.appendproctitle, parameter[call[constant[{0} {1}].format, parameter[name[self].__class__.__name__, call[name[ret]][constant[jid]]]]]]
variable[data_returner] assign[=] call[name[data].get, parameter[constant[returner], constant[None]]]
if <ast.UnaryOp object at 0x7da1b1f835b0> begin[:]
variable[proc_fn] assign[=] call[name[os].path.join, parameter[call[name[salt].minion.get_proc_dir, parameter[call[name[self].opts][constant[cachedir]]]], call[name[ret]][constant[jid]]]]
if <ast.BoolOp object at 0x7da207f98c70> begin[:]
call[name[log_setup].setup_multiprocessing_logging, parameter[]]
if name[multiprocessing_enabled] begin[:]
call[name[salt].utils.process.daemonize_if, parameter[name[self].opts]]
<ast.Try object at 0x7da207f99990>
|
keyword[def] identifier[handle_func] ( identifier[self] , identifier[multiprocessing_enabled] , identifier[func] , identifier[data] ):
literal[string]
keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] () keyword[or] identifier[self] . identifier[opts] . identifier[get] ( literal[string] )== literal[string] :
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]== literal[string] :
identifier[self] . identifier[functions] = identifier[salt] . identifier[loader] . identifier[runner] ( identifier[self] . identifier[opts] , identifier[utils] = identifier[self] . identifier[utils] )
keyword[else] :
identifier[self] . identifier[functions] = identifier[salt] . identifier[loader] . identifier[minion_mods] ( identifier[self] . identifier[opts] , identifier[proxy] = identifier[self] . identifier[proxy] , identifier[utils] = identifier[self] . identifier[utils] )
identifier[self] . identifier[returners] = identifier[salt] . identifier[loader] . identifier[returners] ( identifier[self] . identifier[opts] , identifier[self] . identifier[functions] , identifier[proxy] = identifier[self] . identifier[proxy] )
identifier[ret] ={ literal[string] : identifier[self] . identifier[opts] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[func] ,
literal[string] :[],
literal[string] : identifier[data] [ literal[string] ],
literal[string] : identifier[salt] . identifier[utils] . identifier[jid] . identifier[gen_jid] ( identifier[self] . identifier[opts] )}
keyword[if] literal[string] keyword[in] identifier[data] :
keyword[if] identifier[isinstance] ( identifier[data] [ literal[string] ], identifier[dict] ):
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[self] . identifier[time_offset]
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[time] . identifier[ctime] ()
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[time] . identifier[strftime] ( literal[string] , identifier[time] . identifier[gmtime] ())
keyword[else] :
identifier[log] . identifier[warning] ( literal[string]
literal[string] )
keyword[if] identifier[multiprocessing_enabled] :
identifier[salt] . identifier[utils] . identifier[process] . identifier[appendproctitle] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[ret] [ literal[string] ]))
identifier[data_returner] = identifier[data] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[self] . identifier[standalone] :
identifier[proc_fn] = identifier[os] . identifier[path] . identifier[join] (
identifier[salt] . identifier[minion] . identifier[get_proc_dir] ( identifier[self] . identifier[opts] [ literal[string] ]),
identifier[ret] [ literal[string] ]
)
keyword[if] identifier[multiprocessing_enabled] keyword[and] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
identifier[log_setup] . identifier[setup_multiprocessing_logging] ()
keyword[if] identifier[multiprocessing_enabled] :
identifier[salt] . identifier[utils] . identifier[process] . identifier[daemonize_if] ( identifier[self] . identifier[opts] )
keyword[try] :
identifier[minion_blackout_violation] = keyword[False]
keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[False] ):
identifier[whitelist] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[])
keyword[if] identifier[func] != literal[string] keyword[and] identifier[func] keyword[not] keyword[in] identifier[whitelist] :
identifier[minion_blackout_violation] = keyword[True]
keyword[elif] identifier[self] . identifier[opts] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[False] ):
identifier[whitelist] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[])
keyword[if] identifier[func] != literal[string] keyword[and] identifier[func] keyword[not] keyword[in] identifier[whitelist] :
identifier[minion_blackout_violation] = keyword[True]
keyword[if] identifier[minion_blackout_violation] :
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string]
literal[string] )
identifier[ret] [ literal[string] ]= identifier[os] . identifier[getpid] ()
keyword[if] keyword[not] identifier[self] . identifier[standalone] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] keyword[or] identifier[data] [ literal[string] ]:
identifier[log] . identifier[debug] (
literal[string]
literal[string] , identifier[ret]
)
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[proc_fn] , literal[string] ) keyword[as] identifier[fp_] :
identifier[fp_] . identifier[write] ( identifier[salt] . identifier[payload] . identifier[Serial] ( identifier[self] . identifier[opts] ). identifier[dumps] ( identifier[ret] ))
identifier[args] = identifier[tuple] ()
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[args] = identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]. identifier[extend] ( identifier[data] [ literal[string] ])
identifier[kwargs] ={}
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[kwargs] = identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]. identifier[append] ( identifier[copy] . identifier[deepcopy] ( identifier[kwargs] ))
keyword[if] identifier[func] keyword[not] keyword[in] identifier[self] . identifier[functions] :
identifier[ret] [ literal[string] ]= identifier[self] . identifier[functions] . identifier[missing_fun_string] ( identifier[func] )
identifier[salt] . identifier[utils] . identifier[error] . identifier[raise_error] (
identifier[message] = identifier[self] . identifier[functions] . identifier[missing_fun_string] ( identifier[func] ))
identifier[argspec] = identifier[salt] . identifier[utils] . identifier[args] . identifier[get_function_argspec] ( identifier[self] . identifier[functions] [ identifier[func] ])
keyword[if] identifier[argspec] . identifier[keywords] :
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[six] . identifier[iteritems] ( identifier[ret] ):
keyword[if] identifier[key] keyword[is] keyword[not] literal[string] :
identifier[kwargs] [ literal[string] . identifier[format] ( identifier[key] )]= identifier[copy] . identifier[deepcopy] ( identifier[val] )
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]== literal[string] :
identifier[jid] = identifier[salt] . identifier[utils] . identifier[jid] . identifier[gen_jid] ( identifier[self] . identifier[opts] )
identifier[tag] = identifier[salt] . identifier[utils] . identifier[event] . identifier[tagify] ( identifier[jid] , identifier[prefix] = literal[string] )
identifier[event] = identifier[salt] . identifier[utils] . identifier[event] . identifier[get_event] (
identifier[self] . identifier[opts] [ literal[string] ],
identifier[self] . identifier[opts] [ literal[string] ],
identifier[self] . identifier[opts] [ literal[string] ],
identifier[opts] = identifier[self] . identifier[opts] ,
identifier[listen] = keyword[False] )
identifier[namespaced_event] = identifier[salt] . identifier[utils] . identifier[event] . identifier[NamespacedEvent] (
identifier[event] ,
identifier[tag] ,
identifier[print_func] = keyword[None]
)
identifier[func_globals] ={
literal[string] : identifier[jid] ,
literal[string] : identifier[salt] . identifier[utils] . identifier[user] . identifier[get_user] (),
literal[string] : identifier[tag] ,
literal[string] : identifier[weakref] . identifier[proxy] ( identifier[namespaced_event] ),
}
identifier[self_functions] = identifier[copy] . identifier[copy] ( identifier[self] . identifier[functions] )
identifier[salt] . identifier[utils] . identifier[lazy] . identifier[verify_fun] ( identifier[self_functions] , identifier[func] )
identifier[completed_funcs] =[]
keyword[for] identifier[mod_name] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[self_functions] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[mod_name] :
keyword[continue]
identifier[mod] , identifier[_] = identifier[mod_name] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[mod] keyword[in] identifier[completed_funcs] :
keyword[continue]
identifier[completed_funcs] . identifier[append] ( identifier[mod] )
keyword[for] identifier[global_key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[func_globals] ):
identifier[self] . identifier[functions] [ identifier[mod_name] ]. identifier[__globals__] [ identifier[global_key] ]= identifier[value]
identifier[self] . identifier[functions] . identifier[pack] [ literal[string] ][ literal[string] ]= literal[int]
identifier[ret] [ literal[string] ]= identifier[self] . identifier[functions] [ identifier[func] ](* identifier[args] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[self] . identifier[standalone] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[functions] . identifier[pack] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[self] . identifier[functions] . identifier[pack] [ literal[string] ][ literal[string] ]
identifier[ret] [ literal[string] ]= keyword[True]
keyword[if] identifier[data_returner] keyword[or] identifier[self] . identifier[schedule_returner] :
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[rets] =[]
keyword[for] identifier[returner] keyword[in] [ identifier[data_returner] , identifier[self] . identifier[schedule_returner] ]:
keyword[if] identifier[isinstance] ( identifier[returner] , identifier[six] . identifier[string_types] ):
identifier[rets] . identifier[append] ( identifier[returner] )
keyword[elif] identifier[isinstance] ( identifier[returner] , identifier[list] ):
identifier[rets] . identifier[extend] ( identifier[returner] )
keyword[for] identifier[returner] keyword[in] identifier[OrderedDict] . identifier[fromkeys] ( identifier[rets] ):
identifier[ret_str] = literal[string] . identifier[format] ( identifier[returner] )
keyword[if] identifier[ret_str] keyword[in] identifier[self] . identifier[returners] :
identifier[self] . identifier[returners] [ identifier[ret_str] ]( identifier[ret] )
keyword[else] :
identifier[log] . identifier[info] (
literal[string] ,
identifier[func] , identifier[returner]
)
keyword[except] identifier[Exception] :
identifier[log] . identifier[exception] ( literal[string] , identifier[ret] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[ret] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[ret] [ literal[string] ])
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[int]
keyword[finally] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[opts] keyword[and] identifier[self] . identifier[opts] [ literal[string] ] keyword[in] ( literal[string] , literal[string] ):
keyword[if] literal[string] keyword[in] identifier[data] keyword[and] keyword[not] identifier[data] [ literal[string] ]:
keyword[pass]
keyword[else] :
identifier[mret] = identifier[ret] . identifier[copy] ()
keyword[if] keyword[not] identifier[data_returner] keyword[and] keyword[not] identifier[self] . identifier[schedule_returner] :
identifier[mret] [ literal[string] ]= literal[string]
keyword[if] identifier[data] . identifier[get] ( literal[string] )== literal[string] :
identifier[mret] [ literal[string] ]= literal[string]
identifier[load] ={ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[opts] [ literal[string] ]}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[mret] ):
identifier[load] [ identifier[key] ]= identifier[value]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[opts] keyword[and] identifier[self] . identifier[opts] [ literal[string] ]== literal[string] :
identifier[event] = identifier[salt] . identifier[utils] . identifier[event] . identifier[get_event] ( literal[string] ,
identifier[opts] = identifier[self] . identifier[opts] ,
identifier[listen] = keyword[False] )
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[opts] keyword[and] identifier[self] . identifier[opts] [ literal[string] ]== literal[string] :
identifier[event] = identifier[salt] . identifier[utils] . identifier[event] . identifier[get_master_event] ( identifier[self] . identifier[opts] ,
identifier[self] . identifier[opts] [ literal[string] ])
keyword[try] :
identifier[event] . identifier[fire_event] ( identifier[load] , literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[log] . identifier[exception] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[standalone] :
identifier[log] . identifier[debug] ( literal[string] , identifier[proc_fn] )
keyword[try] :
identifier[os] . identifier[unlink] ( identifier[proc_fn] )
keyword[except] identifier[OSError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] == identifier[errno] . identifier[EEXIST] keyword[or] identifier[exc] . identifier[errno] == identifier[errno] . identifier[ENOENT] :
keyword[pass]
keyword[else] :
identifier[log] . identifier[error] ( literal[string] , identifier[proc_fn] , identifier[exc] . identifier[errno] )
keyword[raise]
keyword[finally] :
keyword[if] identifier[multiprocessing_enabled] :
identifier[sys] . identifier[exit] ( identifier[salt] . identifier[defaults] . identifier[exitcodes] . identifier[EX_GENERIC] )
|
def handle_func(self, multiprocessing_enabled, func, data):
"""
Execute this method in a multiprocess or thread
"""
if salt.utils.platform.is_windows() or self.opts.get('transport') == 'zeromq':
# Since function references can't be pickled and pickling
# is required when spawning new processes on Windows, regenerate
# the functions and returners.
# This also needed for ZeroMQ transport to reset all functions
# context data that could keep paretns connections. ZeroMQ will
# hang on polling parents connections from the child process.
if self.opts['__role'] == 'master':
self.functions = salt.loader.runner(self.opts, utils=self.utils) # depends on [control=['if'], data=[]]
else:
self.functions = salt.loader.minion_mods(self.opts, proxy=self.proxy, utils=self.utils)
self.returners = salt.loader.returners(self.opts, self.functions, proxy=self.proxy) # depends on [control=['if'], data=[]]
ret = {'id': self.opts.get('id', 'master'), 'fun': func, 'fun_args': [], 'schedule': data['name'], 'jid': salt.utils.jid.gen_jid(self.opts)}
if 'metadata' in data:
if isinstance(data['metadata'], dict):
ret['metadata'] = data['metadata']
ret['metadata']['_TOS'] = self.time_offset
ret['metadata']['_TS'] = time.ctime()
ret['metadata']['_TT'] = time.strftime('%Y %B %d %a %H %m', time.gmtime()) # depends on [control=['if'], data=[]]
else:
log.warning('schedule: The metadata parameter must be specified as a dictionary. Ignoring.') # depends on [control=['if'], data=['data']]
if multiprocessing_enabled:
# We just want to modify the process name if we're on a different process
salt.utils.process.appendproctitle('{0} {1}'.format(self.__class__.__name__, ret['jid'])) # depends on [control=['if'], data=[]]
data_returner = data.get('returner', None)
if not self.standalone:
proc_fn = os.path.join(salt.minion.get_proc_dir(self.opts['cachedir']), ret['jid']) # depends on [control=['if'], data=[]]
if multiprocessing_enabled and (not salt.utils.platform.is_windows()):
# Reconfigure multiprocessing logging after daemonizing
log_setup.setup_multiprocessing_logging() # depends on [control=['if'], data=[]]
if multiprocessing_enabled:
# Don't *BEFORE* to go into try to don't let it triple execute the finally section.
salt.utils.process.daemonize_if(self.opts) # depends on [control=['if'], data=[]]
# TODO: Make it readable! Splt to funcs, remove nested try-except-finally sections.
try:
minion_blackout_violation = False
if self.opts.get('pillar', {}).get('minion_blackout', False):
whitelist = self.opts.get('pillar', {}).get('minion_blackout_whitelist', [])
# this minion is blacked out. Only allow saltutil.refresh_pillar and the whitelist
if func != 'saltutil.refresh_pillar' and func not in whitelist:
minion_blackout_violation = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self.opts.get('grains', {}).get('minion_blackout', False):
whitelist = self.opts.get('grains', {}).get('minion_blackout_whitelist', [])
if func != 'saltutil.refresh_pillar' and func not in whitelist:
minion_blackout_violation = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if minion_blackout_violation:
raise SaltInvocationError("Minion in blackout mode. Set 'minion_blackout' to False in pillar or grains to resume operations. Only saltutil.refresh_pillar allowed in blackout mode.") # depends on [control=['if'], data=[]]
ret['pid'] = os.getpid()
if not self.standalone:
if 'jid_include' not in data or data['jid_include']:
log.debug('schedule.handle_func: adding this job to the jobcache with data %s', ret)
# write this to /var/cache/salt/minion/proc
with salt.utils.files.fopen(proc_fn, 'w+b') as fp_:
fp_.write(salt.payload.Serial(self.opts).dumps(ret)) # depends on [control=['with'], data=['fp_']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
args = tuple()
if 'args' in data:
args = data['args']
ret['fun_args'].extend(data['args']) # depends on [control=['if'], data=['data']]
kwargs = {}
if 'kwargs' in data:
kwargs = data['kwargs']
ret['fun_args'].append(copy.deepcopy(kwargs)) # depends on [control=['if'], data=['data']]
if func not in self.functions:
ret['return'] = self.functions.missing_fun_string(func)
salt.utils.error.raise_error(message=self.functions.missing_fun_string(func)) # depends on [control=['if'], data=['func']]
# if the func support **kwargs, lets pack in the pub data we have
# TODO: pack the *same* pub data as a minion?
argspec = salt.utils.args.get_function_argspec(self.functions[func])
if argspec.keywords:
# this function accepts **kwargs, pack in the publish data
for (key, val) in six.iteritems(ret):
if key is not 'kwargs':
kwargs['__pub_{0}'.format(key)] = copy.deepcopy(val) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Only include these when running runner modules
if self.opts['__role'] == 'master':
jid = salt.utils.jid.gen_jid(self.opts)
tag = salt.utils.event.tagify(jid, prefix='salt/scheduler/')
event = salt.utils.event.get_event(self.opts['__role'], self.opts['sock_dir'], self.opts['transport'], opts=self.opts, listen=False)
namespaced_event = salt.utils.event.NamespacedEvent(event, tag, print_func=None)
func_globals = {'__jid__': jid, '__user__': salt.utils.user.get_user(), '__tag__': tag, '__jid_event__': weakref.proxy(namespaced_event)}
self_functions = copy.copy(self.functions)
salt.utils.lazy.verify_fun(self_functions, func)
# Inject some useful globals to *all* the function's global
# namespace only once per module-- not per func
completed_funcs = []
for mod_name in six.iterkeys(self_functions):
if '.' not in mod_name:
continue # depends on [control=['if'], data=[]]
(mod, _) = mod_name.split('.', 1)
if mod in completed_funcs:
continue # depends on [control=['if'], data=[]]
completed_funcs.append(mod)
for (global_key, value) in six.iteritems(func_globals):
self.functions[mod_name].__globals__[global_key] = value # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['mod_name']] # depends on [control=['if'], data=[]]
self.functions.pack['__context__']['retcode'] = 0
ret['return'] = self.functions[func](*args, **kwargs)
if not self.standalone:
# runners do not provide retcode
if 'retcode' in self.functions.pack['__context__']:
ret['retcode'] = self.functions.pack['__context__']['retcode'] # depends on [control=['if'], data=[]]
ret['success'] = True
if data_returner or self.schedule_returner:
if 'return_config' in data:
ret['ret_config'] = data['return_config'] # depends on [control=['if'], data=['data']]
if 'return_kwargs' in data:
ret['ret_kwargs'] = data['return_kwargs'] # depends on [control=['if'], data=['data']]
rets = []
for returner in [data_returner, self.schedule_returner]:
if isinstance(returner, six.string_types):
rets.append(returner) # depends on [control=['if'], data=[]]
elif isinstance(returner, list):
rets.extend(returner) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['returner']]
# simple de-duplication with order retained
for returner in OrderedDict.fromkeys(rets):
ret_str = '{0}.returner'.format(returner)
if ret_str in self.returners:
self.returners[ret_str](ret) # depends on [control=['if'], data=['ret_str']]
else:
log.info('Job %s using invalid returner: %s. Ignoring.', func, returner) # depends on [control=['for'], data=['returner']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
log.exception('Unhandled exception running %s', ret['fun'])
# Although catch-all exception handlers are bad, the exception here
# is to let the exception bubble up to the top of the thread context,
# where the thread will die silently, which is worse.
if 'return' not in ret:
ret['return'] = 'Unhandled exception running {0}'.format(ret['fun']) # depends on [control=['if'], data=['ret']]
ret['success'] = False
ret['retcode'] = 254 # depends on [control=['except'], data=[]]
finally:
# Only attempt to return data to the master if the scheduled job is running
# on a master itself or a minion.
if '__role' in self.opts and self.opts['__role'] in ('master', 'minion'):
# The 'return_job' option is enabled by default even if not set
if 'return_job' in data and (not data['return_job']):
pass # depends on [control=['if'], data=[]]
else:
# Send back to master so the job is included in the job list
mret = ret.copy()
# No returners defined, so we're only sending back to the master
if not data_returner and (not self.schedule_returner):
mret['jid'] = 'req'
if data.get('return_job') == 'nocache':
# overwrite 'req' to signal to master that
# this job shouldn't be stored
mret['jid'] = 'nocache' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
load = {'cmd': '_return', 'id': self.opts['id']}
for (key, value) in six.iteritems(mret):
load[key] = value # depends on [control=['for'], data=[]]
if '__role' in self.opts and self.opts['__role'] == 'minion':
event = salt.utils.event.get_event('minion', opts=self.opts, listen=False) # depends on [control=['if'], data=[]]
elif '__role' in self.opts and self.opts['__role'] == 'master':
event = salt.utils.event.get_master_event(self.opts, self.opts['sock_dir']) # depends on [control=['if'], data=[]]
try:
event.fire_event(load, '__schedule_return') # depends on [control=['try'], data=[]]
except Exception as exc:
log.exception('Unhandled exception firing __schedule_return event') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if not self.standalone:
log.debug('schedule.handle_func: Removing %s', proc_fn)
try:
os.unlink(proc_fn) # depends on [control=['try'], data=[]]
except OSError as exc:
if exc.errno == errno.EEXIST or exc.errno == errno.ENOENT:
# EEXIST and ENOENT are OK because the file is gone and that's what
# we wanted
pass # depends on [control=['if'], data=[]]
else:
log.error("Failed to delete '%s': %s", proc_fn, exc.errno)
# Otherwise, failing to delete this file is not something
# we can cleanly handle.
raise # depends on [control=['except'], data=['exc']]
finally:
if multiprocessing_enabled:
# Let's make sure we exit the process!
sys.exit(salt.defaults.exitcodes.EX_GENERIC) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def visit_field_class(self, item, descriptor=None, fields=None):
"""Visit a class node containing a list of field definitions."""
discovered_fields = collections.OrderedDict()
field_groups = {}
for node in item.body:
if isinstance(node, ast.ClassDef):
field_groups[node.name] = self.visit_field_class(node)
continue
if not isinstance(node, ast.Assign):
continue
if not isinstance(node.value, ast.Call):
continue
if not isinstance(node.targets[0], ast.Name):
continue
# Build accessible symbols table.
symtable = {}
# All field types.
symtable.update({
field.__name__: field
for field in get_available_fields()
})
# Field group classes.
symtable.update(field_groups)
evaluator = SafeEvaluator(symtable=symtable)
name = node.targets[0].id
try:
field = evaluator.run(node.value)
except Exception as ex:
# TODO: Handle errors.
raise ex
if descriptor is not None:
field.contribute_to_class(descriptor, fields, name)
else:
discovered_fields[name] = field
if descriptor is None:
class Fields:
"""Fields wrapper."""
for name, field in discovered_fields.items():
setattr(Fields, name, field)
return Fields
|
def function[visit_field_class, parameter[self, item, descriptor, fields]]:
constant[Visit a class node containing a list of field definitions.]
variable[discovered_fields] assign[=] call[name[collections].OrderedDict, parameter[]]
variable[field_groups] assign[=] dictionary[[], []]
for taget[name[node]] in starred[name[item].body] begin[:]
if call[name[isinstance], parameter[name[node], name[ast].ClassDef]] begin[:]
call[name[field_groups]][name[node].name] assign[=] call[name[self].visit_field_class, parameter[name[node]]]
continue
if <ast.UnaryOp object at 0x7da1b1a2f6a0> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b1a2db70> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b1a2f4c0> begin[:]
continue
variable[symtable] assign[=] dictionary[[], []]
call[name[symtable].update, parameter[<ast.DictComp object at 0x7da1b1a2df00>]]
call[name[symtable].update, parameter[name[field_groups]]]
variable[evaluator] assign[=] call[name[SafeEvaluator], parameter[]]
variable[name] assign[=] call[name[node].targets][constant[0]].id
<ast.Try object at 0x7da1b1a2d1b0>
if compare[name[descriptor] is_not constant[None]] begin[:]
call[name[field].contribute_to_class, parameter[name[descriptor], name[fields], name[name]]]
if compare[name[descriptor] is constant[None]] begin[:]
class class[Fields, parameter[]] begin[:]
constant[Fields wrapper.]
for taget[tuple[[<ast.Name object at 0x7da20e9b20b0>, <ast.Name object at 0x7da20e9b2cb0>]]] in starred[call[name[discovered_fields].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[Fields], name[name], name[field]]]
return[name[Fields]]
|
keyword[def] identifier[visit_field_class] ( identifier[self] , identifier[item] , identifier[descriptor] = keyword[None] , identifier[fields] = keyword[None] ):
literal[string]
identifier[discovered_fields] = identifier[collections] . identifier[OrderedDict] ()
identifier[field_groups] ={}
keyword[for] identifier[node] keyword[in] identifier[item] . identifier[body] :
keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[ClassDef] ):
identifier[field_groups] [ identifier[node] . identifier[name] ]= identifier[self] . identifier[visit_field_class] ( identifier[node] )
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Assign] ):
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[node] . identifier[value] , identifier[ast] . identifier[Call] ):
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[node] . identifier[targets] [ literal[int] ], identifier[ast] . identifier[Name] ):
keyword[continue]
identifier[symtable] ={}
identifier[symtable] . identifier[update] ({
identifier[field] . identifier[__name__] : identifier[field]
keyword[for] identifier[field] keyword[in] identifier[get_available_fields] ()
})
identifier[symtable] . identifier[update] ( identifier[field_groups] )
identifier[evaluator] = identifier[SafeEvaluator] ( identifier[symtable] = identifier[symtable] )
identifier[name] = identifier[node] . identifier[targets] [ literal[int] ]. identifier[id]
keyword[try] :
identifier[field] = identifier[evaluator] . identifier[run] ( identifier[node] . identifier[value] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[raise] identifier[ex]
keyword[if] identifier[descriptor] keyword[is] keyword[not] keyword[None] :
identifier[field] . identifier[contribute_to_class] ( identifier[descriptor] , identifier[fields] , identifier[name] )
keyword[else] :
identifier[discovered_fields] [ identifier[name] ]= identifier[field]
keyword[if] identifier[descriptor] keyword[is] keyword[None] :
keyword[class] identifier[Fields] :
literal[string]
keyword[for] identifier[name] , identifier[field] keyword[in] identifier[discovered_fields] . identifier[items] ():
identifier[setattr] ( identifier[Fields] , identifier[name] , identifier[field] )
keyword[return] identifier[Fields]
|
def visit_field_class(self, item, descriptor=None, fields=None):
"""Visit a class node containing a list of field definitions."""
discovered_fields = collections.OrderedDict()
field_groups = {}
for node in item.body:
if isinstance(node, ast.ClassDef):
field_groups[node.name] = self.visit_field_class(node)
continue # depends on [control=['if'], data=[]]
if not isinstance(node, ast.Assign):
continue # depends on [control=['if'], data=[]]
if not isinstance(node.value, ast.Call):
continue # depends on [control=['if'], data=[]]
if not isinstance(node.targets[0], ast.Name):
continue # depends on [control=['if'], data=[]]
# Build accessible symbols table.
symtable = {}
# All field types.
symtable.update({field.__name__: field for field in get_available_fields()})
# Field group classes.
symtable.update(field_groups)
evaluator = SafeEvaluator(symtable=symtable)
name = node.targets[0].id
try:
field = evaluator.run(node.value) # depends on [control=['try'], data=[]]
except Exception as ex:
# TODO: Handle errors.
raise ex # depends on [control=['except'], data=['ex']]
if descriptor is not None:
field.contribute_to_class(descriptor, fields, name) # depends on [control=['if'], data=['descriptor']]
else:
discovered_fields[name] = field # depends on [control=['for'], data=['node']]
if descriptor is None:
class Fields:
"""Fields wrapper."""
for (name, field) in discovered_fields.items():
setattr(Fields, name, field) # depends on [control=['for'], data=[]]
return Fields # depends on [control=['if'], data=[]]
|
def guess_archive_type(name):
'''
Guess an archive type (tar, zip, or rar) by its file extension
'''
name = name.lower()
for ending in ('tar', 'tar.gz', 'tgz',
'tar.bz2', 'tbz2', 'tbz',
'tar.xz', 'txz',
'tar.lzma', 'tlz'):
if name.endswith('.' + ending):
return 'tar'
for ending in ('zip', 'rar'):
if name.endswith('.' + ending):
return ending
return None
|
def function[guess_archive_type, parameter[name]]:
constant[
Guess an archive type (tar, zip, or rar) by its file extension
]
variable[name] assign[=] call[name[name].lower, parameter[]]
for taget[name[ending]] in starred[tuple[[<ast.Constant object at 0x7da1b1c36380>, <ast.Constant object at 0x7da1b1c37370>, <ast.Constant object at 0x7da1b1c344c0>, <ast.Constant object at 0x7da1b1c34bb0>, <ast.Constant object at 0x7da1b1c34100>, <ast.Constant object at 0x7da1b1c35c30>, <ast.Constant object at 0x7da1b1c37160>, <ast.Constant object at 0x7da1b1c36fb0>, <ast.Constant object at 0x7da1b1c36950>, <ast.Constant object at 0x7da1b1c360b0>]]] begin[:]
if call[name[name].endswith, parameter[binary_operation[constant[.] + name[ending]]]] begin[:]
return[constant[tar]]
for taget[name[ending]] in starred[tuple[[<ast.Constant object at 0x7da1b1c372b0>, <ast.Constant object at 0x7da1b1c34790>]]] begin[:]
if call[name[name].endswith, parameter[binary_operation[constant[.] + name[ending]]]] begin[:]
return[name[ending]]
return[constant[None]]
|
keyword[def] identifier[guess_archive_type] ( identifier[name] ):
literal[string]
identifier[name] = identifier[name] . identifier[lower] ()
keyword[for] identifier[ending] keyword[in] ( literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ):
keyword[if] identifier[name] . identifier[endswith] ( literal[string] + identifier[ending] ):
keyword[return] literal[string]
keyword[for] identifier[ending] keyword[in] ( literal[string] , literal[string] ):
keyword[if] identifier[name] . identifier[endswith] ( literal[string] + identifier[ending] ):
keyword[return] identifier[ending]
keyword[return] keyword[None]
|
def guess_archive_type(name):
"""
Guess an archive type (tar, zip, or rar) by its file extension
"""
name = name.lower()
for ending in ('tar', 'tar.gz', 'tgz', 'tar.bz2', 'tbz2', 'tbz', 'tar.xz', 'txz', 'tar.lzma', 'tlz'):
if name.endswith('.' + ending):
return 'tar' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ending']]
for ending in ('zip', 'rar'):
if name.endswith('.' + ending):
return ending # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ending']]
return None
|
def overflow(self, overflow):
"""
Update the overflow algorithm of successive INCRBY operations
:param overflow: Overflow algorithm, one of WRAP, SAT, FAIL. See the
Redis docs for descriptions of these algorithmsself.
:returns: a :py:class:`BitFieldOperation` instance.
"""
overflow = overflow.upper()
if overflow != self._last_overflow:
self._last_overflow = overflow
self.operations.append(('OVERFLOW', overflow))
return self
|
def function[overflow, parameter[self, overflow]]:
constant[
Update the overflow algorithm of successive INCRBY operations
:param overflow: Overflow algorithm, one of WRAP, SAT, FAIL. See the
Redis docs for descriptions of these algorithmsself.
:returns: a :py:class:`BitFieldOperation` instance.
]
variable[overflow] assign[=] call[name[overflow].upper, parameter[]]
if compare[name[overflow] not_equal[!=] name[self]._last_overflow] begin[:]
name[self]._last_overflow assign[=] name[overflow]
call[name[self].operations.append, parameter[tuple[[<ast.Constant object at 0x7da18c4cf2b0>, <ast.Name object at 0x7da18c4cd090>]]]]
return[name[self]]
|
keyword[def] identifier[overflow] ( identifier[self] , identifier[overflow] ):
literal[string]
identifier[overflow] = identifier[overflow] . identifier[upper] ()
keyword[if] identifier[overflow] != identifier[self] . identifier[_last_overflow] :
identifier[self] . identifier[_last_overflow] = identifier[overflow]
identifier[self] . identifier[operations] . identifier[append] (( literal[string] , identifier[overflow] ))
keyword[return] identifier[self]
|
def overflow(self, overflow):
"""
Update the overflow algorithm of successive INCRBY operations
:param overflow: Overflow algorithm, one of WRAP, SAT, FAIL. See the
Redis docs for descriptions of these algorithmsself.
:returns: a :py:class:`BitFieldOperation` instance.
"""
overflow = overflow.upper()
if overflow != self._last_overflow:
self._last_overflow = overflow
self.operations.append(('OVERFLOW', overflow)) # depends on [control=['if'], data=['overflow']]
return self
|
def calibrate_plunger(
self,
top=None,
bottom=None,
blow_out=None,
drop_tip=None):
"""Set calibration values for the pipette plunger.
This can be called multiple times as the user sets each value,
or you can set them all at once.
Parameters
----------
top : int
Touching but not engaging the plunger.
bottom: int
Must be above the pipette's physical hard-stop, while still
leaving enough room for 'blow_out'
blow_out : int
Plunger has been pushed down enough to expell all liquids.
drop_tip : int
This position that causes the tip to be released from the
pipette.
"""
if top is not None:
self.plunger_positions['top'] = top
if bottom is not None:
self.plunger_positions['bottom'] = bottom
if blow_out is not None:
self.plunger_positions['blow_out'] = blow_out
if drop_tip is not None:
self.plunger_positions['drop_tip'] = drop_tip
return self
|
def function[calibrate_plunger, parameter[self, top, bottom, blow_out, drop_tip]]:
constant[Set calibration values for the pipette plunger.
This can be called multiple times as the user sets each value,
or you can set them all at once.
Parameters
----------
top : int
Touching but not engaging the plunger.
bottom: int
Must be above the pipette's physical hard-stop, while still
leaving enough room for 'blow_out'
blow_out : int
Plunger has been pushed down enough to expell all liquids.
drop_tip : int
This position that causes the tip to be released from the
pipette.
]
if compare[name[top] is_not constant[None]] begin[:]
call[name[self].plunger_positions][constant[top]] assign[=] name[top]
if compare[name[bottom] is_not constant[None]] begin[:]
call[name[self].plunger_positions][constant[bottom]] assign[=] name[bottom]
if compare[name[blow_out] is_not constant[None]] begin[:]
call[name[self].plunger_positions][constant[blow_out]] assign[=] name[blow_out]
if compare[name[drop_tip] is_not constant[None]] begin[:]
call[name[self].plunger_positions][constant[drop_tip]] assign[=] name[drop_tip]
return[name[self]]
|
keyword[def] identifier[calibrate_plunger] (
identifier[self] ,
identifier[top] = keyword[None] ,
identifier[bottom] = keyword[None] ,
identifier[blow_out] = keyword[None] ,
identifier[drop_tip] = keyword[None] ):
literal[string]
keyword[if] identifier[top] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[plunger_positions] [ literal[string] ]= identifier[top]
keyword[if] identifier[bottom] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[plunger_positions] [ literal[string] ]= identifier[bottom]
keyword[if] identifier[blow_out] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[plunger_positions] [ literal[string] ]= identifier[blow_out]
keyword[if] identifier[drop_tip] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[plunger_positions] [ literal[string] ]= identifier[drop_tip]
keyword[return] identifier[self]
|
def calibrate_plunger(self, top=None, bottom=None, blow_out=None, drop_tip=None):
"""Set calibration values for the pipette plunger.
This can be called multiple times as the user sets each value,
or you can set them all at once.
Parameters
----------
top : int
Touching but not engaging the plunger.
bottom: int
Must be above the pipette's physical hard-stop, while still
leaving enough room for 'blow_out'
blow_out : int
Plunger has been pushed down enough to expell all liquids.
drop_tip : int
This position that causes the tip to be released from the
pipette.
"""
if top is not None:
self.plunger_positions['top'] = top # depends on [control=['if'], data=['top']]
if bottom is not None:
self.plunger_positions['bottom'] = bottom # depends on [control=['if'], data=['bottom']]
if blow_out is not None:
self.plunger_positions['blow_out'] = blow_out # depends on [control=['if'], data=['blow_out']]
if drop_tip is not None:
self.plunger_positions['drop_tip'] = drop_tip # depends on [control=['if'], data=['drop_tip']]
return self
|
def run_script(self, script_name, script_args=None, node_paths=None):
"""Returns a command to execute a package.json script.
:param script_name: Name of the script to name. Note that script name 'test'
can be used to run node tests.
:param script_args: Args to be passed to package.json script.
:param node_paths: A list of path that should be included in $PATH when
running the script.
"""
# TODO: consider add a pants.util function to manipulate command line.
package_manager_args = self._get_run_script_args()
package_manager_args.append(script_name)
if script_args:
package_manager_args.append('--')
package_manager_args.extend(script_args)
return self.run_command(args=package_manager_args, node_paths=node_paths)
|
def function[run_script, parameter[self, script_name, script_args, node_paths]]:
constant[Returns a command to execute a package.json script.
:param script_name: Name of the script to name. Note that script name 'test'
can be used to run node tests.
:param script_args: Args to be passed to package.json script.
:param node_paths: A list of path that should be included in $PATH when
running the script.
]
variable[package_manager_args] assign[=] call[name[self]._get_run_script_args, parameter[]]
call[name[package_manager_args].append, parameter[name[script_name]]]
if name[script_args] begin[:]
call[name[package_manager_args].append, parameter[constant[--]]]
call[name[package_manager_args].extend, parameter[name[script_args]]]
return[call[name[self].run_command, parameter[]]]
|
keyword[def] identifier[run_script] ( identifier[self] , identifier[script_name] , identifier[script_args] = keyword[None] , identifier[node_paths] = keyword[None] ):
literal[string]
identifier[package_manager_args] = identifier[self] . identifier[_get_run_script_args] ()
identifier[package_manager_args] . identifier[append] ( identifier[script_name] )
keyword[if] identifier[script_args] :
identifier[package_manager_args] . identifier[append] ( literal[string] )
identifier[package_manager_args] . identifier[extend] ( identifier[script_args] )
keyword[return] identifier[self] . identifier[run_command] ( identifier[args] = identifier[package_manager_args] , identifier[node_paths] = identifier[node_paths] )
|
def run_script(self, script_name, script_args=None, node_paths=None):
"""Returns a command to execute a package.json script.
:param script_name: Name of the script to name. Note that script name 'test'
can be used to run node tests.
:param script_args: Args to be passed to package.json script.
:param node_paths: A list of path that should be included in $PATH when
running the script.
"""
# TODO: consider add a pants.util function to manipulate command line.
package_manager_args = self._get_run_script_args()
package_manager_args.append(script_name)
if script_args:
package_manager_args.append('--')
package_manager_args.extend(script_args) # depends on [control=['if'], data=[]]
return self.run_command(args=package_manager_args, node_paths=node_paths)
|
def current_line(self):
'''Get the text of the current source line as a string, with a trailing
newline character'''
if self.is_optimized_out():
return '(frame information optimized out)'
with open(self.filename(), 'r') as f:
all_lines = f.readlines()
# Convert from 1-based current_line_num to 0-based list offset:
return all_lines[self.current_line_num()-1]
|
def function[current_line, parameter[self]]:
constant[Get the text of the current source line as a string, with a trailing
newline character]
if call[name[self].is_optimized_out, parameter[]] begin[:]
return[constant[(frame information optimized out)]]
with call[name[open], parameter[call[name[self].filename, parameter[]], constant[r]]] begin[:]
variable[all_lines] assign[=] call[name[f].readlines, parameter[]]
return[call[name[all_lines]][binary_operation[call[name[self].current_line_num, parameter[]] - constant[1]]]]
|
keyword[def] identifier[current_line] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_optimized_out] ():
keyword[return] literal[string]
keyword[with] identifier[open] ( identifier[self] . identifier[filename] (), literal[string] ) keyword[as] identifier[f] :
identifier[all_lines] = identifier[f] . identifier[readlines] ()
keyword[return] identifier[all_lines] [ identifier[self] . identifier[current_line_num] ()- literal[int] ]
|
def current_line(self):
"""Get the text of the current source line as a string, with a trailing
newline character"""
if self.is_optimized_out():
return '(frame information optimized out)' # depends on [control=['if'], data=[]]
with open(self.filename(), 'r') as f:
all_lines = f.readlines()
# Convert from 1-based current_line_num to 0-based list offset:
return all_lines[self.current_line_num() - 1] # depends on [control=['with'], data=['f']]
|
def _send_scp(self, cabinet, frame, board, *args, **kwargs):
"""Determine the best connection to use to send an SCP packet and use
it to transmit.
See the arguments for
:py:meth:`~rig.machine_control.scp_connection.SCPConnection` for
details.
"""
# Find the connection which best matches the specified coordinates,
# preferring direct connections to a board when available.
connection = self.connections.get((cabinet, frame, board), None)
if connection is None:
connection = self.connections.get((cabinet, frame), None)
assert connection is not None, \
"No connection available to ({}, {}, {})".format(cabinet,
frame,
board)
# Determine the size of packet we expect in return, this is usually the
# size that we are informed we should expect by SCAMP/SARK or else is
# the default.
if self._scp_data_length is None:
length = consts.SCP_SVER_RECEIVE_LENGTH_MAX
else:
length = self._scp_data_length
return connection.send_scp(length, 0, 0, board, *args, **kwargs)
|
def function[_send_scp, parameter[self, cabinet, frame, board]]:
constant[Determine the best connection to use to send an SCP packet and use
it to transmit.
See the arguments for
:py:meth:`~rig.machine_control.scp_connection.SCPConnection` for
details.
]
variable[connection] assign[=] call[name[self].connections.get, parameter[tuple[[<ast.Name object at 0x7da1b19700d0>, <ast.Name object at 0x7da1b19701f0>, <ast.Name object at 0x7da1b1971780>]], constant[None]]]
if compare[name[connection] is constant[None]] begin[:]
variable[connection] assign[=] call[name[self].connections.get, parameter[tuple[[<ast.Name object at 0x7da1b19712d0>, <ast.Name object at 0x7da1b1971a20>]], constant[None]]]
assert[compare[name[connection] is_not constant[None]]]
if compare[name[self]._scp_data_length is constant[None]] begin[:]
variable[length] assign[=] name[consts].SCP_SVER_RECEIVE_LENGTH_MAX
return[call[name[connection].send_scp, parameter[name[length], constant[0], constant[0], name[board], <ast.Starred object at 0x7da1b1970a00>]]]
|
keyword[def] identifier[_send_scp] ( identifier[self] , identifier[cabinet] , identifier[frame] , identifier[board] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[connection] = identifier[self] . identifier[connections] . identifier[get] (( identifier[cabinet] , identifier[frame] , identifier[board] ), keyword[None] )
keyword[if] identifier[connection] keyword[is] keyword[None] :
identifier[connection] = identifier[self] . identifier[connections] . identifier[get] (( identifier[cabinet] , identifier[frame] ), keyword[None] )
keyword[assert] identifier[connection] keyword[is] keyword[not] keyword[None] , literal[string] . identifier[format] ( identifier[cabinet] ,
identifier[frame] ,
identifier[board] )
keyword[if] identifier[self] . identifier[_scp_data_length] keyword[is] keyword[None] :
identifier[length] = identifier[consts] . identifier[SCP_SVER_RECEIVE_LENGTH_MAX]
keyword[else] :
identifier[length] = identifier[self] . identifier[_scp_data_length]
keyword[return] identifier[connection] . identifier[send_scp] ( identifier[length] , literal[int] , literal[int] , identifier[board] ,* identifier[args] ,** identifier[kwargs] )
|
def _send_scp(self, cabinet, frame, board, *args, **kwargs):
"""Determine the best connection to use to send an SCP packet and use
it to transmit.
See the arguments for
:py:meth:`~rig.machine_control.scp_connection.SCPConnection` for
details.
"""
# Find the connection which best matches the specified coordinates,
# preferring direct connections to a board when available.
connection = self.connections.get((cabinet, frame, board), None)
if connection is None:
connection = self.connections.get((cabinet, frame), None) # depends on [control=['if'], data=['connection']]
assert connection is not None, 'No connection available to ({}, {}, {})'.format(cabinet, frame, board)
# Determine the size of packet we expect in return, this is usually the
# size that we are informed we should expect by SCAMP/SARK or else is
# the default.
if self._scp_data_length is None:
length = consts.SCP_SVER_RECEIVE_LENGTH_MAX # depends on [control=['if'], data=[]]
else:
length = self._scp_data_length
return connection.send_scp(length, 0, 0, board, *args, **kwargs)
|
def print_row_failures(failing_items, verbose=False, outfile_name=None):
"""
Take output from get_row_failures (DataFrame), and output it to
stdout, an outfile, or both.
"""
if outfile_name:
outfile = open(outfile_name, "w")
outfile.write("\t".join(["name", "row_number", "problem_type",
"problem_col", "error_message"]))
outfile.write("\n")
else:
outfile = None
for ind, row in failing_items.iterrows():
issues = row['issues']
string = "{:10} | row number: {}".format(ind, str(row["num"]))
first_string = "\t".join([str(ind), str(row["num"])])
if verbose:
print(first_string)
#if outfile:
# ofile.write("{}\n".format(string))
for key, issue in list(issues.items()):
issue_type, issue_col = extract_col_name(key)
string = "{:10} | {:10} | {}".format(issue_type, issue_col, issue)
string = "\t".join([issue_type, issue_col, issue])
if verbose:
print(string)
if outfile:
outfile.write(first_string + "\t" + string + "\n")
if outfile:
outfile.close()
|
def function[print_row_failures, parameter[failing_items, verbose, outfile_name]]:
constant[
Take output from get_row_failures (DataFrame), and output it to
stdout, an outfile, or both.
]
if name[outfile_name] begin[:]
variable[outfile] assign[=] call[name[open], parameter[name[outfile_name], constant[w]]]
call[name[outfile].write, parameter[call[constant[ ].join, parameter[list[[<ast.Constant object at 0x7da1b047e950>, <ast.Constant object at 0x7da1b047e620>, <ast.Constant object at 0x7da1b047e3e0>, <ast.Constant object at 0x7da1b047e6e0>, <ast.Constant object at 0x7da1b047e410>]]]]]]
call[name[outfile].write, parameter[constant[
]]]
for taget[tuple[[<ast.Name object at 0x7da1b047eef0>, <ast.Name object at 0x7da1b047f5e0>]]] in starred[call[name[failing_items].iterrows, parameter[]]] begin[:]
variable[issues] assign[=] call[name[row]][constant[issues]]
variable[string] assign[=] call[constant[{:10} | row number: {}].format, parameter[name[ind], call[name[str], parameter[call[name[row]][constant[num]]]]]]
variable[first_string] assign[=] call[constant[ ].join, parameter[list[[<ast.Call object at 0x7da1b042c790>, <ast.Call object at 0x7da1b042cf70>]]]]
if name[verbose] begin[:]
call[name[print], parameter[name[first_string]]]
for taget[tuple[[<ast.Name object at 0x7da1b042e890>, <ast.Name object at 0x7da1b042ea10>]]] in starred[call[name[list], parameter[call[name[issues].items, parameter[]]]]] begin[:]
<ast.Tuple object at 0x7da1b042e3e0> assign[=] call[name[extract_col_name], parameter[name[key]]]
variable[string] assign[=] call[constant[{:10} | {:10} | {}].format, parameter[name[issue_type], name[issue_col], name[issue]]]
variable[string] assign[=] call[constant[ ].join, parameter[list[[<ast.Name object at 0x7da1b04affa0>, <ast.Name object at 0x7da1b042c1c0>, <ast.Name object at 0x7da1b042f280>]]]]
if name[verbose] begin[:]
call[name[print], parameter[name[string]]]
if name[outfile] begin[:]
call[name[outfile].write, parameter[binary_operation[binary_operation[binary_operation[name[first_string] + constant[ ]] + name[string]] + constant[
]]]]
if name[outfile] begin[:]
call[name[outfile].close, parameter[]]
|
keyword[def] identifier[print_row_failures] ( identifier[failing_items] , identifier[verbose] = keyword[False] , identifier[outfile_name] = keyword[None] ):
literal[string]
keyword[if] identifier[outfile_name] :
identifier[outfile] = identifier[open] ( identifier[outfile_name] , literal[string] )
identifier[outfile] . identifier[write] ( literal[string] . identifier[join] ([ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ]))
identifier[outfile] . identifier[write] ( literal[string] )
keyword[else] :
identifier[outfile] = keyword[None]
keyword[for] identifier[ind] , identifier[row] keyword[in] identifier[failing_items] . identifier[iterrows] ():
identifier[issues] = identifier[row] [ literal[string] ]
identifier[string] = literal[string] . identifier[format] ( identifier[ind] , identifier[str] ( identifier[row] [ literal[string] ]))
identifier[first_string] = literal[string] . identifier[join] ([ identifier[str] ( identifier[ind] ), identifier[str] ( identifier[row] [ literal[string] ])])
keyword[if] identifier[verbose] :
identifier[print] ( identifier[first_string] )
keyword[for] identifier[key] , identifier[issue] keyword[in] identifier[list] ( identifier[issues] . identifier[items] ()):
identifier[issue_type] , identifier[issue_col] = identifier[extract_col_name] ( identifier[key] )
identifier[string] = literal[string] . identifier[format] ( identifier[issue_type] , identifier[issue_col] , identifier[issue] )
identifier[string] = literal[string] . identifier[join] ([ identifier[issue_type] , identifier[issue_col] , identifier[issue] ])
keyword[if] identifier[verbose] :
identifier[print] ( identifier[string] )
keyword[if] identifier[outfile] :
identifier[outfile] . identifier[write] ( identifier[first_string] + literal[string] + identifier[string] + literal[string] )
keyword[if] identifier[outfile] :
identifier[outfile] . identifier[close] ()
|
def print_row_failures(failing_items, verbose=False, outfile_name=None):
"""
Take output from get_row_failures (DataFrame), and output it to
stdout, an outfile, or both.
"""
if outfile_name:
outfile = open(outfile_name, 'w')
outfile.write('\t'.join(['name', 'row_number', 'problem_type', 'problem_col', 'error_message']))
outfile.write('\n') # depends on [control=['if'], data=[]]
else:
outfile = None
for (ind, row) in failing_items.iterrows():
issues = row['issues']
string = '{:10} | row number: {}'.format(ind, str(row['num']))
first_string = '\t'.join([str(ind), str(row['num'])])
if verbose:
print(first_string) # depends on [control=['if'], data=[]]
#if outfile:
# ofile.write("{}\n".format(string))
for (key, issue) in list(issues.items()):
(issue_type, issue_col) = extract_col_name(key)
string = '{:10} | {:10} | {}'.format(issue_type, issue_col, issue)
string = '\t'.join([issue_type, issue_col, issue])
if verbose:
print(string) # depends on [control=['if'], data=[]]
if outfile:
outfile.write(first_string + '\t' + string + '\n') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
if outfile:
outfile.close() # depends on [control=['if'], data=[]]
|
def _is_twave(self, peak_num):
"""
Check whether a segment is a t-wave. Compare the maximum gradient of
the filtered signal segment with that of the previous qrs segment.
Parameters
----------
peak_num : int
The peak number of the mwi signal where the qrs is detected
"""
i = self.peak_inds_i[peak_num]
# Due to initialization parameters, last_qrs_ind may be negative.
# No way to check in this instance.
if self.last_qrs_ind - self.qrs_radius < 0:
return False
# Get half the qrs width of the signal to the left.
# Should this be squared?
sig_segment = normalize((self.sig_f[i - self.qrs_radius:i]
).reshape(-1, 1), axis=0)
last_qrs_segment = self.sig_f[self.last_qrs_ind - self.qrs_radius:
self.last_qrs_ind]
segment_slope = np.diff(sig_segment)
last_qrs_slope = np.diff(last_qrs_segment)
# Should we be using absolute values?
if max(segment_slope) < 0.5*max(abs(last_qrs_slope)):
return True
else:
return False
|
def function[_is_twave, parameter[self, peak_num]]:
constant[
Check whether a segment is a t-wave. Compare the maximum gradient of
the filtered signal segment with that of the previous qrs segment.
Parameters
----------
peak_num : int
The peak number of the mwi signal where the qrs is detected
]
variable[i] assign[=] call[name[self].peak_inds_i][name[peak_num]]
if compare[binary_operation[name[self].last_qrs_ind - name[self].qrs_radius] less[<] constant[0]] begin[:]
return[constant[False]]
variable[sig_segment] assign[=] call[name[normalize], parameter[call[call[name[self].sig_f][<ast.Slice object at 0x7da1b1915450>].reshape, parameter[<ast.UnaryOp object at 0x7da1b1917f10>, constant[1]]]]]
variable[last_qrs_segment] assign[=] call[name[self].sig_f][<ast.Slice object at 0x7da1b18dcb50>]
variable[segment_slope] assign[=] call[name[np].diff, parameter[name[sig_segment]]]
variable[last_qrs_slope] assign[=] call[name[np].diff, parameter[name[last_qrs_segment]]]
if compare[call[name[max], parameter[name[segment_slope]]] less[<] binary_operation[constant[0.5] * call[name[max], parameter[call[name[abs], parameter[name[last_qrs_slope]]]]]]] begin[:]
return[constant[True]]
|
keyword[def] identifier[_is_twave] ( identifier[self] , identifier[peak_num] ):
literal[string]
identifier[i] = identifier[self] . identifier[peak_inds_i] [ identifier[peak_num] ]
keyword[if] identifier[self] . identifier[last_qrs_ind] - identifier[self] . identifier[qrs_radius] < literal[int] :
keyword[return] keyword[False]
identifier[sig_segment] = identifier[normalize] (( identifier[self] . identifier[sig_f] [ identifier[i] - identifier[self] . identifier[qrs_radius] : identifier[i] ]
). identifier[reshape] (- literal[int] , literal[int] ), identifier[axis] = literal[int] )
identifier[last_qrs_segment] = identifier[self] . identifier[sig_f] [ identifier[self] . identifier[last_qrs_ind] - identifier[self] . identifier[qrs_radius] :
identifier[self] . identifier[last_qrs_ind] ]
identifier[segment_slope] = identifier[np] . identifier[diff] ( identifier[sig_segment] )
identifier[last_qrs_slope] = identifier[np] . identifier[diff] ( identifier[last_qrs_segment] )
keyword[if] identifier[max] ( identifier[segment_slope] )< literal[int] * identifier[max] ( identifier[abs] ( identifier[last_qrs_slope] )):
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
|
def _is_twave(self, peak_num):
"""
Check whether a segment is a t-wave. Compare the maximum gradient of
the filtered signal segment with that of the previous qrs segment.
Parameters
----------
peak_num : int
The peak number of the mwi signal where the qrs is detected
"""
i = self.peak_inds_i[peak_num]
# Due to initialization parameters, last_qrs_ind may be negative.
# No way to check in this instance.
if self.last_qrs_ind - self.qrs_radius < 0:
return False # depends on [control=['if'], data=[]]
# Get half the qrs width of the signal to the left.
# Should this be squared?
sig_segment = normalize(self.sig_f[i - self.qrs_radius:i].reshape(-1, 1), axis=0)
last_qrs_segment = self.sig_f[self.last_qrs_ind - self.qrs_radius:self.last_qrs_ind]
segment_slope = np.diff(sig_segment)
last_qrs_slope = np.diff(last_qrs_segment)
# Should we be using absolute values?
if max(segment_slope) < 0.5 * max(abs(last_qrs_slope)):
return True # depends on [control=['if'], data=[]]
else:
return False
|
def generate_project_name():
"""Generates a random project name."""
adjectives = [
'aged', 'ancient', 'autumn', 'billowing', 'bitter', 'black', 'blue', 'bold',
'broad', 'broken', 'calm', 'cold', 'cool', 'crimson', 'curly', 'damp',
'dark', 'dawn', 'delicate', 'divine', 'dry', 'empty', 'falling', 'fancy',
'flat', 'floral', 'fragrant', 'frosty', 'gentle', 'green', 'hidden', 'holy',
'icy', 'jolly', 'late', 'lingering', 'little', 'lively', 'long', 'lucky',
'misty', 'morning', 'muddy', 'mute', 'nameless', 'noisy', 'odd', 'old',
'orange', 'patient', 'plain', 'polished', 'proud', 'purple', 'quiet', 'rapid',
'raspy', 'red', 'restless', 'rough', 'round', 'royal', 'shiny', 'shrill',
'shy', 'silent', 'small', 'snowy', 'soft', 'solitary', 'sparkling', 'spring',
'square', 'steep', 'still', 'summer', 'super', 'sweet', 'throbbing', 'tight',
'tiny', 'twilight', 'wandering', 'weathered', 'white', 'wild', 'winter', 'wispy',
'withered', 'yellow', 'young'
]
nouns = [
'art', 'band', 'bar', 'base', 'bird', 'block', 'boat', 'bonus',
'bread', 'breeze', 'brook', 'bush', 'butterfly', 'cake', 'cell', 'cherry',
'cloud', 'credit', 'darkness', 'dawn', 'dew', 'disk', 'dream', 'dust',
'feather', 'field', 'fire', 'firefly', 'flower', 'fog', 'forest', 'frog',
'frost', 'glade', 'glitter', 'grass', 'hall', 'hat', 'haze', 'heart',
'hill', 'king', 'lab', 'lake', 'leaf', 'limit', 'math', 'meadow',
'mode', 'moon', 'morning', 'mountain', 'mouse', 'mud', 'night', 'paper',
'pine', 'poetry', 'pond', 'queen', 'rain', 'recipe', 'resonance', 'rice',
'river', 'salad', 'scene', 'sea', 'shadow', 'shape', 'silence', 'sky',
'smoke', 'snow', 'snowflake', 'sound', 'star', 'sun', 'sun', 'sunset',
'surf', 'term', 'thunder', 'tooth', 'tree', 'truth', 'union', 'unit',
'violet', 'voice', 'water', 'waterfall', 'wave', 'wildflower', 'wind', 'wood'
]
numbers = [str(x) for x in range(10)]
return ' '.join([
random.choice(adjectives).capitalize(),
random.choice(nouns).capitalize(),
random.choice(numbers) + random.choice(numbers),
])
|
def function[generate_project_name, parameter[]]:
constant[Generates a random project name.]
variable[adjectives] assign[=] list[[<ast.Constant object at 0x7da20c6aa440>, <ast.Constant object at 0x7da20c6abe80>, <ast.Constant object at 0x7da20c6abb20>, <ast.Constant object at 0x7da20c6aa410>, <ast.Constant object at 0x7da20c6a9720>, <ast.Constant object at 0x7da20c6a8c70>, <ast.Constant object at 0x7da20c6a9ff0>, <ast.Constant object at 0x7da20c6ab100>, <ast.Constant object at 0x7da20c6ab190>, <ast.Constant object at 0x7da20c6ab4c0>, <ast.Constant object at 0x7da20c6ab8b0>, <ast.Constant object at 0x7da20c6ab280>, <ast.Constant object at 0x7da20c6a8310>, <ast.Constant object at 0x7da20c6aac50>, <ast.Constant object at 0x7da20c6ab970>, <ast.Constant object at 0x7da20c6aa9b0>, <ast.Constant object at 0x7da20c6ab070>, <ast.Constant object at 0x7da20c6a8220>, <ast.Constant object at 0x7da20c6aab00>, <ast.Constant object at 0x7da20c6ab130>, <ast.Constant object at 0x7da20c6aae00>, <ast.Constant object at 0x7da20c6abb80>, <ast.Constant object at 0x7da20c6aa020>, <ast.Constant object at 0x7da20c6abcd0>, <ast.Constant object at 0x7da20c6a8e80>, <ast.Constant object at 0x7da20c6aa740>, <ast.Constant object at 0x7da20c6aa0b0>, <ast.Constant object at 0x7da20c6a93c0>, <ast.Constant object at 0x7da20c6a9a20>, <ast.Constant object at 0x7da20c6a8b80>, <ast.Constant object at 0x7da20c6a9ed0>, <ast.Constant object at 0x7da20c6ab8e0>, <ast.Constant object at 0x7da20c6a94e0>, <ast.Constant object at 0x7da20c6ab5e0>, <ast.Constant object at 0x7da20c6aafe0>, <ast.Constant object at 0x7da20c6ab940>, <ast.Constant object at 0x7da20c6abd90>, <ast.Constant object at 0x7da20c6a90f0>, <ast.Constant object at 0x7da20c6a8610>, <ast.Constant object at 0x7da20c6a9990>, <ast.Constant object at 0x7da20c6aac80>, <ast.Constant object at 0x7da20c6aac20>, <ast.Constant object at 0x7da20c6ab760>, <ast.Constant object at 0x7da20c6aa8f0>, <ast.Constant object at 0x7da20c6a9b40>, <ast.Constant object at 0x7da20c6a9b10>, <ast.Constant object at 0x7da20c6aaa40>, <ast.Constant object at 0x7da20c6abdf0>, <ast.Constant object at 0x7da20c6a96f0>, <ast.Constant object at 0x7da20c6aba00>, <ast.Constant object at 0x7da20c6a8340>, <ast.Constant object at 0x7da20c6aaa70>, <ast.Constant object at 0x7da20c6a87f0>, <ast.Constant object at 0x7da20c6a8fa0>, <ast.Constant object at 0x7da20c6a97e0>, <ast.Constant object at 0x7da20c6a9900>, <ast.Constant object at 0x7da20c6a84c0>, <ast.Constant object at 0x7da20c6abca0>, <ast.Constant object at 0x7da20c6a92a0>, <ast.Constant object at 0x7da20c6ab850>, <ast.Constant object at 0x7da20c6a9d50>, <ast.Constant object at 0x7da20c6aa9e0>, <ast.Constant object at 0x7da20c6a8fd0>, <ast.Constant object at 0x7da20c6aae30>, <ast.Constant object at 0x7da20c6a9840>, <ast.Constant object at 0x7da20c6ab430>, <ast.Constant object at 0x7da20c6aa380>, <ast.Constant object at 0x7da20c6a99f0>, <ast.Constant object at 0x7da20c6a9f90>, <ast.Constant object at 0x7da20c6aa170>, <ast.Constant object at 0x7da20c6abbb0>, <ast.Constant object at 0x7da20c6a91e0>, <ast.Constant object at 0x7da20c6a9570>, <ast.Constant object at 0x7da20c6ab2b0>, <ast.Constant object at 0x7da20c6ab0d0>, <ast.Constant object at 0x7da20c6abf10>, <ast.Constant object at 0x7da20c6ab3a0>, <ast.Constant object at 0x7da20c6a95d0>, <ast.Constant object at 0x7da20c6a9cf0>, <ast.Constant object at 0x7da20c6a8ca0>, <ast.Constant object at 0x7da20c6a9ba0>, <ast.Constant object at 0x7da20c6a9c90>, <ast.Constant object at 0x7da20c6aa800>, <ast.Constant object at 0x7da20c6a9330>, <ast.Constant object at 0x7da20c6ab370>, <ast.Constant object at 0x7da20c6abd00>, <ast.Constant object at 0x7da20c6a8b50>, <ast.Constant object at 0x7da20c6a8dc0>, <ast.Constant object at 0x7da20c6aacb0>, <ast.Constant object at 0x7da20c6a9a80>, <ast.Constant object at 0x7da20c6aba60>]]
variable[nouns] assign[=] list[[<ast.Constant object at 0x7da20c6a8ee0>, <ast.Constant object at 0x7da20c6a86d0>, <ast.Constant object at 0x7da20c6ab040>, <ast.Constant object at 0x7da20c6ab550>, <ast.Constant object at 0x7da20c6a9300>, <ast.Constant object at 0x7da20c6a8d60>, <ast.Constant object at 0x7da20c6a9750>, <ast.Constant object at 0x7da20c6a99c0>, <ast.Constant object at 0x7da20c6a9f00>, <ast.Constant object at 0x7da20c6a9c00>, <ast.Constant object at 0x7da20c6ab6a0>, <ast.Constant object at 0x7da20c6a89d0>, <ast.Constant object at 0x7da20c6a8640>, <ast.Constant object at 0x7da20c6aaf80>, <ast.Constant object at 0x7da20c6aaaa0>, <ast.Constant object at 0x7da20c6a85e0>, <ast.Constant object at 0x7da20c6a9f60>, <ast.Constant object at 0x7da20c6a8790>, <ast.Constant object at 0x7da20c6a9e10>, <ast.Constant object at 0x7da20c6abd30>, <ast.Constant object at 0x7da20c6ab880>, <ast.Constant object at 0x7da20c6aace0>, <ast.Constant object at 0x7da20c6aae60>, <ast.Constant object at 0x7da20c6aa3b0>, <ast.Constant object at 0x7da20c6ab400>, <ast.Constant object at 0x7da20c6aaf20>, <ast.Constant object at 0x7da20c6a80a0>, <ast.Constant object at 0x7da20c6a8580>, <ast.Constant object at 0x7da20c6ab4f0>, <ast.Constant object at 0x7da20c6ab730>, <ast.Constant object at 0x7da20c6a8490>, <ast.Constant object at 0x7da20c6ab580>, <ast.Constant object at 0x7da20c6abeb0>, <ast.Constant object at 0x7da18f00d510>, <ast.Constant object at 0x7da18f00d570>, <ast.Constant object at 0x7da18f00d4b0>, <ast.Constant object at 0x7da18f00f550>, <ast.Constant object at 0x7da18f00e620>, <ast.Constant object at 0x7da18f00c790>, <ast.Constant object at 0x7da18f00ca30>, <ast.Constant object at 0x7da18f00cdc0>, <ast.Constant object at 0x7da18f00fcd0>, <ast.Constant object at 0x7da18f00c610>, <ast.Constant object at 0x7da18f00ea70>, <ast.Constant object at 0x7da18f00f5b0>, <ast.Constant object at 0x7da18f00ded0>, <ast.Constant object at 0x7da18f00c880>, <ast.Constant object at 0x7da18f00e4d0>, <ast.Constant object at 0x7da18f00fbe0>, <ast.Constant object at 0x7da18f00e7d0>, <ast.Constant object at 0x7da18f00d330>, <ast.Constant object at 0x7da18f00ee30>, <ast.Constant object at 0x7da18f00dde0>, <ast.Constant object at 0x7da18f00e500>, <ast.Constant object at 0x7da18f00ccd0>, <ast.Constant object at 0x7da18f00c100>, <ast.Constant object at 0x7da18f00e5f0>, <ast.Constant object at 0x7da18f00c130>, <ast.Constant object at 0x7da18f00c4f0>, <ast.Constant object at 0x7da18f00e470>, <ast.Constant object at 0x7da18f00d5a0>, <ast.Constant object at 0x7da18f00da80>, <ast.Constant object at 0x7da18f00c190>, <ast.Constant object at 0x7da18f00ef50>, <ast.Constant object at 0x7da18f00d870>, <ast.Constant object at 0x7da18f00fdf0>, <ast.Constant object at 0x7da18f00ee60>, <ast.Constant object at 0x7da18f00e710>, <ast.Constant object at 0x7da18f00eef0>, <ast.Constant object at 0x7da18f00ec20>, <ast.Constant object at 0x7da18f00f2e0>, <ast.Constant object at 0x7da18f00d840>, <ast.Constant object at 0x7da18f00d3c0>, <ast.Constant object at 0x7da18f00dba0>, <ast.Constant object at 0x7da18f00e8c0>, <ast.Constant object at 0x7da18f00c8e0>, <ast.Constant object at 0x7da18f00eb30>, <ast.Constant object at 0x7da18f00cf40>, <ast.Constant object at 0x7da18f00eec0>, <ast.Constant object at 0x7da18f00efb0>, <ast.Constant object at 0x7da18f00f6a0>, <ast.Constant object at 0x7da18f00ca90>, <ast.Constant object at 0x7da18f00d660>, <ast.Constant object at 0x7da18f00f370>, <ast.Constant object at 0x7da18f00c040>, <ast.Constant object at 0x7da18f00e4a0>, <ast.Constant object at 0x7da18f00dcf0>, <ast.Constant object at 0x7da18f00f640>, <ast.Constant object at 0x7da18f00d6c0>, <ast.Constant object at 0x7da18f00e830>, <ast.Constant object at 0x7da18f00dab0>, <ast.Constant object at 0x7da18f00ceb0>, <ast.Constant object at 0x7da18f00c640>, <ast.Constant object at 0x7da18f00e8f0>, <ast.Constant object at 0x7da18f00c3d0>, <ast.Constant object at 0x7da18f00e740>]]
variable[numbers] assign[=] <ast.ListComp object at 0x7da18f00c520>
return[call[constant[ ].join, parameter[list[[<ast.Call object at 0x7da18f00cd90>, <ast.Call object at 0x7da18f00d8d0>, <ast.BinOp object at 0x7da18f00f3a0>]]]]]
|
keyword[def] identifier[generate_project_name] ():
literal[string]
identifier[adjectives] =[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string]
]
identifier[nouns] =[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string]
]
identifier[numbers] =[ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] )]
keyword[return] literal[string] . identifier[join] ([
identifier[random] . identifier[choice] ( identifier[adjectives] ). identifier[capitalize] (),
identifier[random] . identifier[choice] ( identifier[nouns] ). identifier[capitalize] (),
identifier[random] . identifier[choice] ( identifier[numbers] )+ identifier[random] . identifier[choice] ( identifier[numbers] ),
])
|
def generate_project_name():
"""Generates a random project name."""
adjectives = ['aged', 'ancient', 'autumn', 'billowing', 'bitter', 'black', 'blue', 'bold', 'broad', 'broken', 'calm', 'cold', 'cool', 'crimson', 'curly', 'damp', 'dark', 'dawn', 'delicate', 'divine', 'dry', 'empty', 'falling', 'fancy', 'flat', 'floral', 'fragrant', 'frosty', 'gentle', 'green', 'hidden', 'holy', 'icy', 'jolly', 'late', 'lingering', 'little', 'lively', 'long', 'lucky', 'misty', 'morning', 'muddy', 'mute', 'nameless', 'noisy', 'odd', 'old', 'orange', 'patient', 'plain', 'polished', 'proud', 'purple', 'quiet', 'rapid', 'raspy', 'red', 'restless', 'rough', 'round', 'royal', 'shiny', 'shrill', 'shy', 'silent', 'small', 'snowy', 'soft', 'solitary', 'sparkling', 'spring', 'square', 'steep', 'still', 'summer', 'super', 'sweet', 'throbbing', 'tight', 'tiny', 'twilight', 'wandering', 'weathered', 'white', 'wild', 'winter', 'wispy', 'withered', 'yellow', 'young']
nouns = ['art', 'band', 'bar', 'base', 'bird', 'block', 'boat', 'bonus', 'bread', 'breeze', 'brook', 'bush', 'butterfly', 'cake', 'cell', 'cherry', 'cloud', 'credit', 'darkness', 'dawn', 'dew', 'disk', 'dream', 'dust', 'feather', 'field', 'fire', 'firefly', 'flower', 'fog', 'forest', 'frog', 'frost', 'glade', 'glitter', 'grass', 'hall', 'hat', 'haze', 'heart', 'hill', 'king', 'lab', 'lake', 'leaf', 'limit', 'math', 'meadow', 'mode', 'moon', 'morning', 'mountain', 'mouse', 'mud', 'night', 'paper', 'pine', 'poetry', 'pond', 'queen', 'rain', 'recipe', 'resonance', 'rice', 'river', 'salad', 'scene', 'sea', 'shadow', 'shape', 'silence', 'sky', 'smoke', 'snow', 'snowflake', 'sound', 'star', 'sun', 'sun', 'sunset', 'surf', 'term', 'thunder', 'tooth', 'tree', 'truth', 'union', 'unit', 'violet', 'voice', 'water', 'waterfall', 'wave', 'wildflower', 'wind', 'wood']
numbers = [str(x) for x in range(10)]
return ' '.join([random.choice(adjectives).capitalize(), random.choice(nouns).capitalize(), random.choice(numbers) + random.choice(numbers)])
|
def plot_graph_folium(G, graph_map=None, popup_attribute=None,
tiles='cartodbpositron', zoom=1, fit_bounds=True,
edge_color='#333333', edge_width=5, edge_opacity=1):
"""
Plot a graph on an interactive folium web map.
Note that anything larger than a small city can take a long time to plot and
create a large web map file that is very slow to load as JavaScript.
Parameters
----------
G : networkx multidigraph
graph_map : folium.folium.Map
if not None, plot the graph on this preexisting folium map object
popup_attribute : string
edge attribute to display in a pop-up when an edge is clicked
tiles : string
name of a folium tileset
zoom : int
initial zoom level for the map
fit_bounds : bool
if True, fit the map to the boundaries of the route's edges
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : numeric
opacity of the edge lines
Returns
-------
graph_map : folium.folium.Map
"""
# check if we were able to import folium successfully
if not folium:
raise ImportError('The folium package must be installed to use this optional feature.')
# create gdf of the graph edges
gdf_edges = graph_to_gdfs(G, nodes=False, fill_edge_geometry=True)
# get graph centroid
x, y = gdf_edges.unary_union.centroid.xy
graph_centroid = (y[0], x[0])
# create the folium web map if one wasn't passed-in
if graph_map is None:
graph_map = folium.Map(location=graph_centroid, zoom_start=zoom, tiles=tiles)
# add each graph edge to the map
for _, row in gdf_edges.iterrows():
pl = make_folium_polyline(edge=row, edge_color=edge_color, edge_width=edge_width,
edge_opacity=edge_opacity, popup_attribute=popup_attribute)
pl.add_to(graph_map)
# if fit_bounds is True, fit the map to the bounds of the route by passing
# list of lat-lng points as [southwest, northeast]
if fit_bounds:
tb = gdf_edges.total_bounds
bounds = [(tb[1], tb[0]), (tb[3], tb[2])]
graph_map.fit_bounds(bounds)
return graph_map
|
def function[plot_graph_folium, parameter[G, graph_map, popup_attribute, tiles, zoom, fit_bounds, edge_color, edge_width, edge_opacity]]:
constant[
Plot a graph on an interactive folium web map.
Note that anything larger than a small city can take a long time to plot and
create a large web map file that is very slow to load as JavaScript.
Parameters
----------
G : networkx multidigraph
graph_map : folium.folium.Map
if not None, plot the graph on this preexisting folium map object
popup_attribute : string
edge attribute to display in a pop-up when an edge is clicked
tiles : string
name of a folium tileset
zoom : int
initial zoom level for the map
fit_bounds : bool
if True, fit the map to the boundaries of the route's edges
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : numeric
opacity of the edge lines
Returns
-------
graph_map : folium.folium.Map
]
if <ast.UnaryOp object at 0x7da1b1b10850> begin[:]
<ast.Raise object at 0x7da1b1b11840>
variable[gdf_edges] assign[=] call[name[graph_to_gdfs], parameter[name[G]]]
<ast.Tuple object at 0x7da1b1cee440> assign[=] name[gdf_edges].unary_union.centroid.xy
variable[graph_centroid] assign[=] tuple[[<ast.Subscript object at 0x7da1b1cefc10>, <ast.Subscript object at 0x7da1b1ceebc0>]]
if compare[name[graph_map] is constant[None]] begin[:]
variable[graph_map] assign[=] call[name[folium].Map, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1cefcd0>, <ast.Name object at 0x7da1b1cefdc0>]]] in starred[call[name[gdf_edges].iterrows, parameter[]]] begin[:]
variable[pl] assign[=] call[name[make_folium_polyline], parameter[]]
call[name[pl].add_to, parameter[name[graph_map]]]
if name[fit_bounds] begin[:]
variable[tb] assign[=] name[gdf_edges].total_bounds
variable[bounds] assign[=] list[[<ast.Tuple object at 0x7da1b1ceef80>, <ast.Tuple object at 0x7da1b1cee410>]]
call[name[graph_map].fit_bounds, parameter[name[bounds]]]
return[name[graph_map]]
|
keyword[def] identifier[plot_graph_folium] ( identifier[G] , identifier[graph_map] = keyword[None] , identifier[popup_attribute] = keyword[None] ,
identifier[tiles] = literal[string] , identifier[zoom] = literal[int] , identifier[fit_bounds] = keyword[True] ,
identifier[edge_color] = literal[string] , identifier[edge_width] = literal[int] , identifier[edge_opacity] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[folium] :
keyword[raise] identifier[ImportError] ( literal[string] )
identifier[gdf_edges] = identifier[graph_to_gdfs] ( identifier[G] , identifier[nodes] = keyword[False] , identifier[fill_edge_geometry] = keyword[True] )
identifier[x] , identifier[y] = identifier[gdf_edges] . identifier[unary_union] . identifier[centroid] . identifier[xy]
identifier[graph_centroid] =( identifier[y] [ literal[int] ], identifier[x] [ literal[int] ])
keyword[if] identifier[graph_map] keyword[is] keyword[None] :
identifier[graph_map] = identifier[folium] . identifier[Map] ( identifier[location] = identifier[graph_centroid] , identifier[zoom_start] = identifier[zoom] , identifier[tiles] = identifier[tiles] )
keyword[for] identifier[_] , identifier[row] keyword[in] identifier[gdf_edges] . identifier[iterrows] ():
identifier[pl] = identifier[make_folium_polyline] ( identifier[edge] = identifier[row] , identifier[edge_color] = identifier[edge_color] , identifier[edge_width] = identifier[edge_width] ,
identifier[edge_opacity] = identifier[edge_opacity] , identifier[popup_attribute] = identifier[popup_attribute] )
identifier[pl] . identifier[add_to] ( identifier[graph_map] )
keyword[if] identifier[fit_bounds] :
identifier[tb] = identifier[gdf_edges] . identifier[total_bounds]
identifier[bounds] =[( identifier[tb] [ literal[int] ], identifier[tb] [ literal[int] ]),( identifier[tb] [ literal[int] ], identifier[tb] [ literal[int] ])]
identifier[graph_map] . identifier[fit_bounds] ( identifier[bounds] )
keyword[return] identifier[graph_map]
|
def plot_graph_folium(G, graph_map=None, popup_attribute=None, tiles='cartodbpositron', zoom=1, fit_bounds=True, edge_color='#333333', edge_width=5, edge_opacity=1):
"""
Plot a graph on an interactive folium web map.
Note that anything larger than a small city can take a long time to plot and
create a large web map file that is very slow to load as JavaScript.
Parameters
----------
G : networkx multidigraph
graph_map : folium.folium.Map
if not None, plot the graph on this preexisting folium map object
popup_attribute : string
edge attribute to display in a pop-up when an edge is clicked
tiles : string
name of a folium tileset
zoom : int
initial zoom level for the map
fit_bounds : bool
if True, fit the map to the boundaries of the route's edges
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : numeric
opacity of the edge lines
Returns
-------
graph_map : folium.folium.Map
"""
# check if we were able to import folium successfully
if not folium:
raise ImportError('The folium package must be installed to use this optional feature.') # depends on [control=['if'], data=[]]
# create gdf of the graph edges
gdf_edges = graph_to_gdfs(G, nodes=False, fill_edge_geometry=True)
# get graph centroid
(x, y) = gdf_edges.unary_union.centroid.xy
graph_centroid = (y[0], x[0])
# create the folium web map if one wasn't passed-in
if graph_map is None:
graph_map = folium.Map(location=graph_centroid, zoom_start=zoom, tiles=tiles) # depends on [control=['if'], data=['graph_map']]
# add each graph edge to the map
for (_, row) in gdf_edges.iterrows():
pl = make_folium_polyline(edge=row, edge_color=edge_color, edge_width=edge_width, edge_opacity=edge_opacity, popup_attribute=popup_attribute)
pl.add_to(graph_map) # depends on [control=['for'], data=[]]
# if fit_bounds is True, fit the map to the bounds of the route by passing
# list of lat-lng points as [southwest, northeast]
if fit_bounds:
tb = gdf_edges.total_bounds
bounds = [(tb[1], tb[0]), (tb[3], tb[2])]
graph_map.fit_bounds(bounds) # depends on [control=['if'], data=[]]
return graph_map
|
def solubility_parameter(self):
r'''Solubility parameter of the chemical at its
current temperature and pressure, in units of [Pa^0.5].
.. math::
\delta = \sqrt{\frac{\Delta H_{vap} - RT}{V_m}}
Calculated based on enthalpy of vaporization and molar volume.
Normally calculated at STP. For uses of this property, see
:obj:`thermo.solubility.solubility_parameter`.
Examples
--------
>>> Chemical('NH3').solubility_parameter
24766.329043856073
'''
return solubility_parameter(T=self.T, Hvapm=self.Hvapm, Vml=self.Vml,
Method=self.solubility_parameter_method,
CASRN=self.CAS)
|
def function[solubility_parameter, parameter[self]]:
constant[Solubility parameter of the chemical at its
current temperature and pressure, in units of [Pa^0.5].
.. math::
\delta = \sqrt{\frac{\Delta H_{vap} - RT}{V_m}}
Calculated based on enthalpy of vaporization and molar volume.
Normally calculated at STP. For uses of this property, see
:obj:`thermo.solubility.solubility_parameter`.
Examples
--------
>>> Chemical('NH3').solubility_parameter
24766.329043856073
]
return[call[name[solubility_parameter], parameter[]]]
|
keyword[def] identifier[solubility_parameter] ( identifier[self] ):
literal[string]
keyword[return] identifier[solubility_parameter] ( identifier[T] = identifier[self] . identifier[T] , identifier[Hvapm] = identifier[self] . identifier[Hvapm] , identifier[Vml] = identifier[self] . identifier[Vml] ,
identifier[Method] = identifier[self] . identifier[solubility_parameter_method] ,
identifier[CASRN] = identifier[self] . identifier[CAS] )
|
def solubility_parameter(self):
"""Solubility parameter of the chemical at its
current temperature and pressure, in units of [Pa^0.5].
.. math::
\\delta = \\sqrt{\\frac{\\Delta H_{vap} - RT}{V_m}}
Calculated based on enthalpy of vaporization and molar volume.
Normally calculated at STP. For uses of this property, see
:obj:`thermo.solubility.solubility_parameter`.
Examples
--------
>>> Chemical('NH3').solubility_parameter
24766.329043856073
"""
return solubility_parameter(T=self.T, Hvapm=self.Hvapm, Vml=self.Vml, Method=self.solubility_parameter_method, CASRN=self.CAS)
|
def CheckTemplates(self, base_dir, version):
"""Verify we have at least one template that matches maj.minor version."""
major_minor = ".".join(version.split(".")[0:2])
templates = glob.glob(
os.path.join(base_dir, "templates/*%s*.zip" % major_minor))
required_templates = set(
[x.replace("maj.minor", major_minor) for x in self.REQUIRED_TEMPLATES])
# Client templates have an extra version digit, e.g. 3.1.0.0
templates_present = set([
re.sub(r"_%s[^_]+_" % major_minor, "_%s_" % major_minor,
os.path.basename(x)) for x in templates
])
difference = required_templates - templates_present
if difference:
raise RuntimeError("Missing templates %s" % difference)
|
def function[CheckTemplates, parameter[self, base_dir, version]]:
constant[Verify we have at least one template that matches maj.minor version.]
variable[major_minor] assign[=] call[constant[.].join, parameter[call[call[name[version].split, parameter[constant[.]]]][<ast.Slice object at 0x7da1b1c0ec20>]]]
variable[templates] assign[=] call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[base_dir], binary_operation[constant[templates/*%s*.zip] <ast.Mod object at 0x7da2590d6920> name[major_minor]]]]]]
variable[required_templates] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b1c0ef20>]]
variable[templates_present] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b1c0d840>]]
variable[difference] assign[=] binary_operation[name[required_templates] - name[templates_present]]
if name[difference] begin[:]
<ast.Raise object at 0x7da1b1b0e8c0>
|
keyword[def] identifier[CheckTemplates] ( identifier[self] , identifier[base_dir] , identifier[version] ):
literal[string]
identifier[major_minor] = literal[string] . identifier[join] ( identifier[version] . identifier[split] ( literal[string] )[ literal[int] : literal[int] ])
identifier[templates] = identifier[glob] . identifier[glob] (
identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , literal[string] % identifier[major_minor] ))
identifier[required_templates] = identifier[set] (
[ identifier[x] . identifier[replace] ( literal[string] , identifier[major_minor] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[REQUIRED_TEMPLATES] ])
identifier[templates_present] = identifier[set] ([
identifier[re] . identifier[sub] ( literal[string] % identifier[major_minor] , literal[string] % identifier[major_minor] ,
identifier[os] . identifier[path] . identifier[basename] ( identifier[x] )) keyword[for] identifier[x] keyword[in] identifier[templates]
])
identifier[difference] = identifier[required_templates] - identifier[templates_present]
keyword[if] identifier[difference] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[difference] )
|
def CheckTemplates(self, base_dir, version):
"""Verify we have at least one template that matches maj.minor version."""
major_minor = '.'.join(version.split('.')[0:2])
templates = glob.glob(os.path.join(base_dir, 'templates/*%s*.zip' % major_minor))
required_templates = set([x.replace('maj.minor', major_minor) for x in self.REQUIRED_TEMPLATES])
# Client templates have an extra version digit, e.g. 3.1.0.0
templates_present = set([re.sub('_%s[^_]+_' % major_minor, '_%s_' % major_minor, os.path.basename(x)) for x in templates])
difference = required_templates - templates_present
if difference:
raise RuntimeError('Missing templates %s' % difference) # depends on [control=['if'], data=[]]
|
def init_subclass_by_name(baseclass, short_name, params):
"""
Find the subclass, `kls` of baseclass with class attribute `short_name`
that matches the supplied `short_name`, and then instantiate and return
that class with:
return kls(**params)
This function also tries its best to catch any possible TypeErrors due
to binding of the arguments, and rethrows them as nicely formatted
RuntimeErrors that are suitable for showing to users.
"""
sc = baseclass.__subclasses__()
for kls in sc:
if kls.short_name == short_name or \
(_is_collection(kls.short_name) and short_name in kls.short_name):
try:
return kls(**params)
except TypeError as e:
spec = inspect.getargspec(kls.__init__)
# try to give nice errors to the user if the params failed
# to bind
if 'unexpected' in str(e):
avail = join_quoted(spec.args[1:])
raise RuntimeError(
"%s's %s. Available params for this subclass are: %s."
% (short_name, str(e), avail))
elif 'takes exactly' in str(e):
required = join_quoted(spec.args[1:-len(spec.defaults)])
raise RuntimeError(
"%s's %s. Required params for this subclass are %s."
% (short_name, str(e), required))
elif 'takes at least' in str(e):
required = join_quoted(spec.args[1:-len(spec.defaults)])
optional = join_quoted(spec.args[-len(spec.defaults):])
raise RuntimeError(
"%s's %s. Required params for this subclass are: %s. "
"Optional params are: %s" % (
short_name, str(e), required, optional))
# :(
raise
chain = itertools.chain.from_iterable(
e.short_name if _is_collection(e.short_name) else [e.short_name]
for e in sc)
avail_names = ', '.join(str(n) for n in chain)
raise ValueError('"%s" is not a recognized subclass. available names '
'are: %s' % (short_name, avail_names))
|
def function[init_subclass_by_name, parameter[baseclass, short_name, params]]:
constant[
Find the subclass, `kls` of baseclass with class attribute `short_name`
that matches the supplied `short_name`, and then instantiate and return
that class with:
return kls(**params)
This function also tries its best to catch any possible TypeErrors due
to binding of the arguments, and rethrows them as nicely formatted
RuntimeErrors that are suitable for showing to users.
]
variable[sc] assign[=] call[name[baseclass].__subclasses__, parameter[]]
for taget[name[kls]] in starred[name[sc]] begin[:]
if <ast.BoolOp object at 0x7da1aff4df60> begin[:]
<ast.Try object at 0x7da1aff4e890>
variable[chain] assign[=] call[name[itertools].chain.from_iterable, parameter[<ast.GeneratorExp object at 0x7da2041d80a0>]]
variable[avail_names] assign[=] call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da2041db190>]]
<ast.Raise object at 0x7da18dc07a60>
|
keyword[def] identifier[init_subclass_by_name] ( identifier[baseclass] , identifier[short_name] , identifier[params] ):
literal[string]
identifier[sc] = identifier[baseclass] . identifier[__subclasses__] ()
keyword[for] identifier[kls] keyword[in] identifier[sc] :
keyword[if] identifier[kls] . identifier[short_name] == identifier[short_name] keyword[or] ( identifier[_is_collection] ( identifier[kls] . identifier[short_name] ) keyword[and] identifier[short_name] keyword[in] identifier[kls] . identifier[short_name] ):
keyword[try] :
keyword[return] identifier[kls] (** identifier[params] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
identifier[spec] = identifier[inspect] . identifier[getargspec] ( identifier[kls] . identifier[__init__] )
keyword[if] literal[string] keyword[in] identifier[str] ( identifier[e] ):
identifier[avail] = identifier[join_quoted] ( identifier[spec] . identifier[args] [ literal[int] :])
keyword[raise] identifier[RuntimeError] (
literal[string]
%( identifier[short_name] , identifier[str] ( identifier[e] ), identifier[avail] ))
keyword[elif] literal[string] keyword[in] identifier[str] ( identifier[e] ):
identifier[required] = identifier[join_quoted] ( identifier[spec] . identifier[args] [ literal[int] :- identifier[len] ( identifier[spec] . identifier[defaults] )])
keyword[raise] identifier[RuntimeError] (
literal[string]
%( identifier[short_name] , identifier[str] ( identifier[e] ), identifier[required] ))
keyword[elif] literal[string] keyword[in] identifier[str] ( identifier[e] ):
identifier[required] = identifier[join_quoted] ( identifier[spec] . identifier[args] [ literal[int] :- identifier[len] ( identifier[spec] . identifier[defaults] )])
identifier[optional] = identifier[join_quoted] ( identifier[spec] . identifier[args] [- identifier[len] ( identifier[spec] . identifier[defaults] ):])
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string] %(
identifier[short_name] , identifier[str] ( identifier[e] ), identifier[required] , identifier[optional] ))
keyword[raise]
identifier[chain] = identifier[itertools] . identifier[chain] . identifier[from_iterable] (
identifier[e] . identifier[short_name] keyword[if] identifier[_is_collection] ( identifier[e] . identifier[short_name] ) keyword[else] [ identifier[e] . identifier[short_name] ]
keyword[for] identifier[e] keyword[in] identifier[sc] )
identifier[avail_names] = literal[string] . identifier[join] ( identifier[str] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[chain] )
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %( identifier[short_name] , identifier[avail_names] ))
|
def init_subclass_by_name(baseclass, short_name, params):
"""
Find the subclass, `kls` of baseclass with class attribute `short_name`
that matches the supplied `short_name`, and then instantiate and return
that class with:
return kls(**params)
This function also tries its best to catch any possible TypeErrors due
to binding of the arguments, and rethrows them as nicely formatted
RuntimeErrors that are suitable for showing to users.
"""
sc = baseclass.__subclasses__()
for kls in sc:
if kls.short_name == short_name or (_is_collection(kls.short_name) and short_name in kls.short_name):
try:
return kls(**params) # depends on [control=['try'], data=[]]
except TypeError as e:
spec = inspect.getargspec(kls.__init__)
# try to give nice errors to the user if the params failed
# to bind
if 'unexpected' in str(e):
avail = join_quoted(spec.args[1:])
raise RuntimeError("%s's %s. Available params for this subclass are: %s." % (short_name, str(e), avail)) # depends on [control=['if'], data=[]]
elif 'takes exactly' in str(e):
required = join_quoted(spec.args[1:-len(spec.defaults)])
raise RuntimeError("%s's %s. Required params for this subclass are %s." % (short_name, str(e), required)) # depends on [control=['if'], data=[]]
elif 'takes at least' in str(e):
required = join_quoted(spec.args[1:-len(spec.defaults)])
optional = join_quoted(spec.args[-len(spec.defaults):])
raise RuntimeError("%s's %s. Required params for this subclass are: %s. Optional params are: %s" % (short_name, str(e), required, optional)) # depends on [control=['if'], data=[]]
# :(
raise # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['kls']]
chain = itertools.chain.from_iterable((e.short_name if _is_collection(e.short_name) else [e.short_name] for e in sc))
avail_names = ', '.join((str(n) for n in chain))
raise ValueError('"%s" is not a recognized subclass. available names are: %s' % (short_name, avail_names))
|
def calc_2d_ellipse_properties(cov,nstd=2):
"""Calculate the properties for 2d ellipse given the covariance matrix."""
def eigsorted(cov):
vals, vecs = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
return vals[order], vecs[:,order]
vals, vecs = eigsorted(cov)
width, height = 2 * nstd * np.sqrt(vals[:2])
normal = vecs[:,2] if vecs[2,2] > 0 else -vecs[:,2]
d = np.cross(normal, (0, 0, 1))
M = rotation_matrix(d)
x_trans = np.dot(M,(1,0,0))
cos_val = np.dot(vecs[:,0],x_trans)/np.linalg.norm(vecs[:,0])/np.linalg.norm(x_trans)
theta = np.degrees(np.arccos(np.clip(cos_val, -1, 1))) # if you really want the angle
return { 'width': width, 'height': height, 'angle': theta }, normal
|
def function[calc_2d_ellipse_properties, parameter[cov, nstd]]:
constant[Calculate the properties for 2d ellipse given the covariance matrix.]
def function[eigsorted, parameter[cov]]:
<ast.Tuple object at 0x7da1b13934c0> assign[=] call[name[np].linalg.eigh, parameter[name[cov]]]
variable[order] assign[=] call[call[name[vals].argsort, parameter[]]][<ast.Slice object at 0x7da1b13921d0>]
return[tuple[[<ast.Subscript object at 0x7da1b1392590>, <ast.Subscript object at 0x7da1b1390e50>]]]
<ast.Tuple object at 0x7da18f723100> assign[=] call[name[eigsorted], parameter[name[cov]]]
<ast.Tuple object at 0x7da18f721d80> assign[=] binary_operation[binary_operation[constant[2] * name[nstd]] * call[name[np].sqrt, parameter[call[name[vals]][<ast.Slice object at 0x7da18f7229e0>]]]]
variable[normal] assign[=] <ast.IfExp object at 0x7da1b1391b40>
variable[d] assign[=] call[name[np].cross, parameter[name[normal], tuple[[<ast.Constant object at 0x7da1b12c5390>, <ast.Constant object at 0x7da1b12c7bb0>, <ast.Constant object at 0x7da18f09db70>]]]]
variable[M] assign[=] call[name[rotation_matrix], parameter[name[d]]]
variable[x_trans] assign[=] call[name[np].dot, parameter[name[M], tuple[[<ast.Constant object at 0x7da18f09ded0>, <ast.Constant object at 0x7da18f09cf70>, <ast.Constant object at 0x7da18f09d750>]]]]
variable[cos_val] assign[=] binary_operation[binary_operation[call[name[np].dot, parameter[call[name[vecs]][tuple[[<ast.Slice object at 0x7da18f09caf0>, <ast.Constant object at 0x7da18f09ec20>]]], name[x_trans]]] / call[name[np].linalg.norm, parameter[call[name[vecs]][tuple[[<ast.Slice object at 0x7da18f09ee60>, <ast.Constant object at 0x7da18f09ff10>]]]]]] / call[name[np].linalg.norm, parameter[name[x_trans]]]]
variable[theta] assign[=] call[name[np].degrees, parameter[call[name[np].arccos, parameter[call[name[np].clip, parameter[name[cos_val], <ast.UnaryOp object at 0x7da1b13e8ca0>, constant[1]]]]]]]
return[tuple[[<ast.Dict object at 0x7da2054a7550>, <ast.Name object at 0x7da2054a77c0>]]]
|
keyword[def] identifier[calc_2d_ellipse_properties] ( identifier[cov] , identifier[nstd] = literal[int] ):
literal[string]
keyword[def] identifier[eigsorted] ( identifier[cov] ):
identifier[vals] , identifier[vecs] = identifier[np] . identifier[linalg] . identifier[eigh] ( identifier[cov] )
identifier[order] = identifier[vals] . identifier[argsort] ()[::- literal[int] ]
keyword[return] identifier[vals] [ identifier[order] ], identifier[vecs] [:, identifier[order] ]
identifier[vals] , identifier[vecs] = identifier[eigsorted] ( identifier[cov] )
identifier[width] , identifier[height] = literal[int] * identifier[nstd] * identifier[np] . identifier[sqrt] ( identifier[vals] [: literal[int] ])
identifier[normal] = identifier[vecs] [:, literal[int] ] keyword[if] identifier[vecs] [ literal[int] , literal[int] ]> literal[int] keyword[else] - identifier[vecs] [:, literal[int] ]
identifier[d] = identifier[np] . identifier[cross] ( identifier[normal] ,( literal[int] , literal[int] , literal[int] ))
identifier[M] = identifier[rotation_matrix] ( identifier[d] )
identifier[x_trans] = identifier[np] . identifier[dot] ( identifier[M] ,( literal[int] , literal[int] , literal[int] ))
identifier[cos_val] = identifier[np] . identifier[dot] ( identifier[vecs] [:, literal[int] ], identifier[x_trans] )/ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[vecs] [:, literal[int] ])/ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[x_trans] )
identifier[theta] = identifier[np] . identifier[degrees] ( identifier[np] . identifier[arccos] ( identifier[np] . identifier[clip] ( identifier[cos_val] ,- literal[int] , literal[int] )))
keyword[return] { literal[string] : identifier[width] , literal[string] : identifier[height] , literal[string] : identifier[theta] }, identifier[normal]
|
def calc_2d_ellipse_properties(cov, nstd=2):
"""Calculate the properties for 2d ellipse given the covariance matrix."""
def eigsorted(cov):
(vals, vecs) = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
return (vals[order], vecs[:, order])
(vals, vecs) = eigsorted(cov)
(width, height) = 2 * nstd * np.sqrt(vals[:2])
normal = vecs[:, 2] if vecs[2, 2] > 0 else -vecs[:, 2]
d = np.cross(normal, (0, 0, 1))
M = rotation_matrix(d)
x_trans = np.dot(M, (1, 0, 0))
cos_val = np.dot(vecs[:, 0], x_trans) / np.linalg.norm(vecs[:, 0]) / np.linalg.norm(x_trans)
theta = np.degrees(np.arccos(np.clip(cos_val, -1, 1))) # if you really want the angle
return ({'width': width, 'height': height, 'angle': theta}, normal)
|
def run_synthetic_SGLD():
"""Run synthetic SGLD"""
theta1 = 0
theta2 = 1
sigma1 = numpy.sqrt(10)
sigma2 = 1
sigmax = numpy.sqrt(2)
X = load_synthetic(theta1=theta1, theta2=theta2, sigmax=sigmax, num=100)
minibatch_size = 1
total_iter_num = 1000000
lr_scheduler = SGLDScheduler(begin_rate=0.01, end_rate=0.0001, total_iter_num=total_iter_num,
factor=0.55)
optimizer = mx.optimizer.create('sgld',
learning_rate=None,
rescale_grad=1.0,
lr_scheduler=lr_scheduler,
wd=0)
updater = mx.optimizer.get_updater(optimizer)
theta = mx.random.normal(0, 1, (2,), mx.cpu())
grad = nd.empty((2,), mx.cpu())
samples = numpy.zeros((2, total_iter_num))
start = time.time()
for i in range(total_iter_num):
if (i + 1) % 100000 == 0:
end = time.time()
print("Iter:%d, Time spent: %f" % (i + 1, end - start))
start = time.time()
ind = numpy.random.randint(0, X.shape[0])
synthetic_grad(X[ind], theta, sigma1, sigma2, sigmax,
rescale_grad=X.shape[0] / float(minibatch_size), grad=grad)
updater('theta', grad, theta)
samples[:, i] = theta.asnumpy()
plt.hist2d(samples[0, :], samples[1, :], (200, 200), cmap=plt.cm.jet)
plt.colorbar()
plt.show()
|
def function[run_synthetic_SGLD, parameter[]]:
constant[Run synthetic SGLD]
variable[theta1] assign[=] constant[0]
variable[theta2] assign[=] constant[1]
variable[sigma1] assign[=] call[name[numpy].sqrt, parameter[constant[10]]]
variable[sigma2] assign[=] constant[1]
variable[sigmax] assign[=] call[name[numpy].sqrt, parameter[constant[2]]]
variable[X] assign[=] call[name[load_synthetic], parameter[]]
variable[minibatch_size] assign[=] constant[1]
variable[total_iter_num] assign[=] constant[1000000]
variable[lr_scheduler] assign[=] call[name[SGLDScheduler], parameter[]]
variable[optimizer] assign[=] call[name[mx].optimizer.create, parameter[constant[sgld]]]
variable[updater] assign[=] call[name[mx].optimizer.get_updater, parameter[name[optimizer]]]
variable[theta] assign[=] call[name[mx].random.normal, parameter[constant[0], constant[1], tuple[[<ast.Constant object at 0x7da1b20eee90>]], call[name[mx].cpu, parameter[]]]]
variable[grad] assign[=] call[name[nd].empty, parameter[tuple[[<ast.Constant object at 0x7da1b20eecb0>]], call[name[mx].cpu, parameter[]]]]
variable[samples] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Constant object at 0x7da1b20eead0>, <ast.Name object at 0x7da1b20eeaa0>]]]]
variable[start] assign[=] call[name[time].time, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[name[total_iter_num]]]] begin[:]
if compare[binary_operation[binary_operation[name[i] + constant[1]] <ast.Mod object at 0x7da2590d6920> constant[100000]] equal[==] constant[0]] begin[:]
variable[end] assign[=] call[name[time].time, parameter[]]
call[name[print], parameter[binary_operation[constant[Iter:%d, Time spent: %f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b1e041f0>, <ast.BinOp object at 0x7da1b1e04370>]]]]]
variable[start] assign[=] call[name[time].time, parameter[]]
variable[ind] assign[=] call[name[numpy].random.randint, parameter[constant[0], call[name[X].shape][constant[0]]]]
call[name[synthetic_grad], parameter[call[name[X]][name[ind]], name[theta], name[sigma1], name[sigma2], name[sigmax]]]
call[name[updater], parameter[constant[theta], name[grad], name[theta]]]
call[name[samples]][tuple[[<ast.Slice object at 0x7da1b1e06140>, <ast.Name object at 0x7da1b1e14e50>]]] assign[=] call[name[theta].asnumpy, parameter[]]
call[name[plt].hist2d, parameter[call[name[samples]][tuple[[<ast.Constant object at 0x7da2054a5180>, <ast.Slice object at 0x7da2054a6020>]]], call[name[samples]][tuple[[<ast.Constant object at 0x7da2054a7c40>, <ast.Slice object at 0x7da2054a6ec0>]]], tuple[[<ast.Constant object at 0x7da2054a6440>, <ast.Constant object at 0x7da2054a7d30>]]]]
call[name[plt].colorbar, parameter[]]
call[name[plt].show, parameter[]]
|
keyword[def] identifier[run_synthetic_SGLD] ():
literal[string]
identifier[theta1] = literal[int]
identifier[theta2] = literal[int]
identifier[sigma1] = identifier[numpy] . identifier[sqrt] ( literal[int] )
identifier[sigma2] = literal[int]
identifier[sigmax] = identifier[numpy] . identifier[sqrt] ( literal[int] )
identifier[X] = identifier[load_synthetic] ( identifier[theta1] = identifier[theta1] , identifier[theta2] = identifier[theta2] , identifier[sigmax] = identifier[sigmax] , identifier[num] = literal[int] )
identifier[minibatch_size] = literal[int]
identifier[total_iter_num] = literal[int]
identifier[lr_scheduler] = identifier[SGLDScheduler] ( identifier[begin_rate] = literal[int] , identifier[end_rate] = literal[int] , identifier[total_iter_num] = identifier[total_iter_num] ,
identifier[factor] = literal[int] )
identifier[optimizer] = identifier[mx] . identifier[optimizer] . identifier[create] ( literal[string] ,
identifier[learning_rate] = keyword[None] ,
identifier[rescale_grad] = literal[int] ,
identifier[lr_scheduler] = identifier[lr_scheduler] ,
identifier[wd] = literal[int] )
identifier[updater] = identifier[mx] . identifier[optimizer] . identifier[get_updater] ( identifier[optimizer] )
identifier[theta] = identifier[mx] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( literal[int] ,), identifier[mx] . identifier[cpu] ())
identifier[grad] = identifier[nd] . identifier[empty] (( literal[int] ,), identifier[mx] . identifier[cpu] ())
identifier[samples] = identifier[numpy] . identifier[zeros] (( literal[int] , identifier[total_iter_num] ))
identifier[start] = identifier[time] . identifier[time] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[total_iter_num] ):
keyword[if] ( identifier[i] + literal[int] )% literal[int] == literal[int] :
identifier[end] = identifier[time] . identifier[time] ()
identifier[print] ( literal[string] %( identifier[i] + literal[int] , identifier[end] - identifier[start] ))
identifier[start] = identifier[time] . identifier[time] ()
identifier[ind] = identifier[numpy] . identifier[random] . identifier[randint] ( literal[int] , identifier[X] . identifier[shape] [ literal[int] ])
identifier[synthetic_grad] ( identifier[X] [ identifier[ind] ], identifier[theta] , identifier[sigma1] , identifier[sigma2] , identifier[sigmax] ,
identifier[rescale_grad] = identifier[X] . identifier[shape] [ literal[int] ]/ identifier[float] ( identifier[minibatch_size] ), identifier[grad] = identifier[grad] )
identifier[updater] ( literal[string] , identifier[grad] , identifier[theta] )
identifier[samples] [:, identifier[i] ]= identifier[theta] . identifier[asnumpy] ()
identifier[plt] . identifier[hist2d] ( identifier[samples] [ literal[int] ,:], identifier[samples] [ literal[int] ,:],( literal[int] , literal[int] ), identifier[cmap] = identifier[plt] . identifier[cm] . identifier[jet] )
identifier[plt] . identifier[colorbar] ()
identifier[plt] . identifier[show] ()
|
def run_synthetic_SGLD():
"""Run synthetic SGLD"""
theta1 = 0
theta2 = 1
sigma1 = numpy.sqrt(10)
sigma2 = 1
sigmax = numpy.sqrt(2)
X = load_synthetic(theta1=theta1, theta2=theta2, sigmax=sigmax, num=100)
minibatch_size = 1
total_iter_num = 1000000
lr_scheduler = SGLDScheduler(begin_rate=0.01, end_rate=0.0001, total_iter_num=total_iter_num, factor=0.55)
optimizer = mx.optimizer.create('sgld', learning_rate=None, rescale_grad=1.0, lr_scheduler=lr_scheduler, wd=0)
updater = mx.optimizer.get_updater(optimizer)
theta = mx.random.normal(0, 1, (2,), mx.cpu())
grad = nd.empty((2,), mx.cpu())
samples = numpy.zeros((2, total_iter_num))
start = time.time()
for i in range(total_iter_num):
if (i + 1) % 100000 == 0:
end = time.time()
print('Iter:%d, Time spent: %f' % (i + 1, end - start))
start = time.time() # depends on [control=['if'], data=[]]
ind = numpy.random.randint(0, X.shape[0])
synthetic_grad(X[ind], theta, sigma1, sigma2, sigmax, rescale_grad=X.shape[0] / float(minibatch_size), grad=grad)
updater('theta', grad, theta)
samples[:, i] = theta.asnumpy() # depends on [control=['for'], data=['i']]
plt.hist2d(samples[0, :], samples[1, :], (200, 200), cmap=plt.cm.jet)
plt.colorbar()
plt.show()
|
def _refresh_mine_cache(wrapped):
'''
Decorator to trigger a refresh of salt mine data.
'''
@functools.wraps(wrapped)
def wrapper(*args, **kwargs):
'''
refresh salt mine on exit.
'''
returned = wrapped(*args, **__utils__['args.clean_kwargs'](**kwargs))
if _check_update_mine():
__salt__['mine.send'](
'docker.ps', verbose=True, all=True, host=True)
return returned
return wrapper
|
def function[_refresh_mine_cache, parameter[wrapped]]:
constant[
Decorator to trigger a refresh of salt mine data.
]
def function[wrapper, parameter[]]:
constant[
refresh salt mine on exit.
]
variable[returned] assign[=] call[name[wrapped], parameter[<ast.Starred object at 0x7da1b1f7a380>]]
if call[name[_check_update_mine], parameter[]] begin[:]
call[call[name[__salt__]][constant[mine.send]], parameter[constant[docker.ps]]]
return[name[returned]]
return[name[wrapper]]
|
keyword[def] identifier[_refresh_mine_cache] ( identifier[wrapped] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[wrapped] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[returned] = identifier[wrapped] (* identifier[args] ,** identifier[__utils__] [ literal[string] ](** identifier[kwargs] ))
keyword[if] identifier[_check_update_mine] ():
identifier[__salt__] [ literal[string] ](
literal[string] , identifier[verbose] = keyword[True] , identifier[all] = keyword[True] , identifier[host] = keyword[True] )
keyword[return] identifier[returned]
keyword[return] identifier[wrapper]
|
def _refresh_mine_cache(wrapped):
"""
Decorator to trigger a refresh of salt mine data.
"""
@functools.wraps(wrapped)
def wrapper(*args, **kwargs):
"""
refresh salt mine on exit.
"""
returned = wrapped(*args, **__utils__['args.clean_kwargs'](**kwargs))
if _check_update_mine():
__salt__['mine.send']('docker.ps', verbose=True, all=True, host=True) # depends on [control=['if'], data=[]]
return returned
return wrapper
|
def unlink_parameter(self, param):
"""
:param param: param object to remove from being a parameter of this parameterized object.
"""
if not param in self.parameters:
try:
raise HierarchyError("{} does not belong to this object {}, remove parameters directly from their respective parents".format(param._short(), self.name))
except AttributeError:
raise HierarchyError("{} does not seem to be a parameter, remove parameters directly from their respective parents".format(str(param)))
start = sum([p.size for p in self.parameters[:param._parent_index_]])
self.size -= param.size
del self.parameters[param._parent_index_]
self._remove_parameter_name(param)
param._disconnect_parent()
param.remove_observer(self, self._pass_through_notify_observers)
for name, iop in self._index_operations.items():
iop.shift_left(start, param.size)
self._connect_parameters()
self._notify_parent_change()
parent = self._parent_
while parent is not None:
parent.size -= param.size
parent = parent._parent_
self._highest_parent_._connect_parameters()
self._highest_parent_._connect_fixes()
self._highest_parent_._notify_parent_change()
|
def function[unlink_parameter, parameter[self, param]]:
constant[
:param param: param object to remove from being a parameter of this parameterized object.
]
if <ast.UnaryOp object at 0x7da1b0f58730> begin[:]
<ast.Try object at 0x7da1b0f5b1f0>
variable[start] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b0d11ba0>]]
<ast.AugAssign object at 0x7da1b0d10e50>
<ast.Delete object at 0x7da1b0d12a10>
call[name[self]._remove_parameter_name, parameter[name[param]]]
call[name[param]._disconnect_parent, parameter[]]
call[name[param].remove_observer, parameter[name[self], name[self]._pass_through_notify_observers]]
for taget[tuple[[<ast.Name object at 0x7da1b0d102b0>, <ast.Name object at 0x7da1b0d13070>]]] in starred[call[name[self]._index_operations.items, parameter[]]] begin[:]
call[name[iop].shift_left, parameter[name[start], name[param].size]]
call[name[self]._connect_parameters, parameter[]]
call[name[self]._notify_parent_change, parameter[]]
variable[parent] assign[=] name[self]._parent_
while compare[name[parent] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b0d12c50>
variable[parent] assign[=] name[parent]._parent_
call[name[self]._highest_parent_._connect_parameters, parameter[]]
call[name[self]._highest_parent_._connect_fixes, parameter[]]
call[name[self]._highest_parent_._notify_parent_change, parameter[]]
|
keyword[def] identifier[unlink_parameter] ( identifier[self] , identifier[param] ):
literal[string]
keyword[if] keyword[not] identifier[param] keyword[in] identifier[self] . identifier[parameters] :
keyword[try] :
keyword[raise] identifier[HierarchyError] ( literal[string] . identifier[format] ( identifier[param] . identifier[_short] (), identifier[self] . identifier[name] ))
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[HierarchyError] ( literal[string] . identifier[format] ( identifier[str] ( identifier[param] )))
identifier[start] = identifier[sum] ([ identifier[p] . identifier[size] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[parameters] [: identifier[param] . identifier[_parent_index_] ]])
identifier[self] . identifier[size] -= identifier[param] . identifier[size]
keyword[del] identifier[self] . identifier[parameters] [ identifier[param] . identifier[_parent_index_] ]
identifier[self] . identifier[_remove_parameter_name] ( identifier[param] )
identifier[param] . identifier[_disconnect_parent] ()
identifier[param] . identifier[remove_observer] ( identifier[self] , identifier[self] . identifier[_pass_through_notify_observers] )
keyword[for] identifier[name] , identifier[iop] keyword[in] identifier[self] . identifier[_index_operations] . identifier[items] ():
identifier[iop] . identifier[shift_left] ( identifier[start] , identifier[param] . identifier[size] )
identifier[self] . identifier[_connect_parameters] ()
identifier[self] . identifier[_notify_parent_change] ()
identifier[parent] = identifier[self] . identifier[_parent_]
keyword[while] identifier[parent] keyword[is] keyword[not] keyword[None] :
identifier[parent] . identifier[size] -= identifier[param] . identifier[size]
identifier[parent] = identifier[parent] . identifier[_parent_]
identifier[self] . identifier[_highest_parent_] . identifier[_connect_parameters] ()
identifier[self] . identifier[_highest_parent_] . identifier[_connect_fixes] ()
identifier[self] . identifier[_highest_parent_] . identifier[_notify_parent_change] ()
|
def unlink_parameter(self, param):
"""
:param param: param object to remove from being a parameter of this parameterized object.
"""
if not param in self.parameters:
try:
raise HierarchyError('{} does not belong to this object {}, remove parameters directly from their respective parents'.format(param._short(), self.name)) # depends on [control=['try'], data=[]]
except AttributeError:
raise HierarchyError('{} does not seem to be a parameter, remove parameters directly from their respective parents'.format(str(param))) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
start = sum([p.size for p in self.parameters[:param._parent_index_]])
self.size -= param.size
del self.parameters[param._parent_index_]
self._remove_parameter_name(param)
param._disconnect_parent()
param.remove_observer(self, self._pass_through_notify_observers)
for (name, iop) in self._index_operations.items():
iop.shift_left(start, param.size) # depends on [control=['for'], data=[]]
self._connect_parameters()
self._notify_parent_change()
parent = self._parent_
while parent is not None:
parent.size -= param.size
parent = parent._parent_ # depends on [control=['while'], data=['parent']]
self._highest_parent_._connect_parameters()
self._highest_parent_._connect_fixes()
self._highest_parent_._notify_parent_change()
|
def p_typeDeclarationRHS(self, p):
"""typeDeclarationRHS : Syntax
| TEXTUAL_CONVENTION DisplayPart STATUS Status DESCRIPTION Text ReferPart SYNTAX Syntax
| choiceClause"""
if p[1]:
if p[1] == 'TEXTUAL-CONVENTION':
p[0] = ('typeDeclarationRHS', p[2], # display
p[4], # status
(p[5], p[6]), # description
p[7], # reference
p[9]) # syntax
else:
p[0] = ('typeDeclarationRHS', p[1])
|
def function[p_typeDeclarationRHS, parameter[self, p]]:
constant[typeDeclarationRHS : Syntax
| TEXTUAL_CONVENTION DisplayPart STATUS Status DESCRIPTION Text ReferPart SYNTAX Syntax
| choiceClause]
if call[name[p]][constant[1]] begin[:]
if compare[call[name[p]][constant[1]] equal[==] constant[TEXTUAL-CONVENTION]] begin[:]
call[name[p]][constant[0]] assign[=] tuple[[<ast.Constant object at 0x7da1b016fa00>, <ast.Subscript object at 0x7da1b016c850>, <ast.Subscript object at 0x7da1b016c070>, <ast.Tuple object at 0x7da1b016d8a0>, <ast.Subscript object at 0x7da1b016ca30>, <ast.Subscript object at 0x7da1b016c2e0>]]
|
keyword[def] identifier[p_typeDeclarationRHS] ( identifier[self] , identifier[p] ):
literal[string]
keyword[if] identifier[p] [ literal[int] ]:
keyword[if] identifier[p] [ literal[int] ]== literal[string] :
identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ],
identifier[p] [ literal[int] ],
( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]),
identifier[p] [ literal[int] ],
identifier[p] [ literal[int] ])
keyword[else] :
identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ])
|
def p_typeDeclarationRHS(self, p):
"""typeDeclarationRHS : Syntax
| TEXTUAL_CONVENTION DisplayPart STATUS Status DESCRIPTION Text ReferPart SYNTAX Syntax
| choiceClause"""
if p[1]:
if p[1] == 'TEXTUAL-CONVENTION': # display
# status
# description
# reference
p[0] = ('typeDeclarationRHS', p[2], p[4], (p[5], p[6]), p[7], p[9]) # syntax # depends on [control=['if'], data=[]]
else:
p[0] = ('typeDeclarationRHS', p[1]) # depends on [control=['if'], data=[]]
|
def receiver(self, func=None, json=False):
"""
Registers a receiver function
"""
self.receivers.append((func, json))
|
def function[receiver, parameter[self, func, json]]:
constant[
Registers a receiver function
]
call[name[self].receivers.append, parameter[tuple[[<ast.Name object at 0x7da2044c2080>, <ast.Name object at 0x7da2044c3cd0>]]]]
|
keyword[def] identifier[receiver] ( identifier[self] , identifier[func] = keyword[None] , identifier[json] = keyword[False] ):
literal[string]
identifier[self] . identifier[receivers] . identifier[append] (( identifier[func] , identifier[json] ))
|
def receiver(self, func=None, json=False):
"""
Registers a receiver function
"""
self.receivers.append((func, json))
|
def check_valid(self, values, flag=None):
"""Check if the data contains any non-valid status information
Parameters
----------
values: pycbc.types.Array
Array of status information
flag: str, optional
Override the default valid mask with a user defined mask.
Returns
-------
status: boolean
Returns True if all of the status information if valid,
False if any is not.
"""
if self.valid_on_zero:
valid = values.numpy() == 0
else:
if flag is None:
flag = self.valid_mask
valid = numpy.bitwise_and(values.numpy(), flag) == flag
return bool(numpy.all(valid))
|
def function[check_valid, parameter[self, values, flag]]:
constant[Check if the data contains any non-valid status information
Parameters
----------
values: pycbc.types.Array
Array of status information
flag: str, optional
Override the default valid mask with a user defined mask.
Returns
-------
status: boolean
Returns True if all of the status information if valid,
False if any is not.
]
if name[self].valid_on_zero begin[:]
variable[valid] assign[=] compare[call[name[values].numpy, parameter[]] equal[==] constant[0]]
return[call[name[bool], parameter[call[name[numpy].all, parameter[name[valid]]]]]]
|
keyword[def] identifier[check_valid] ( identifier[self] , identifier[values] , identifier[flag] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[valid_on_zero] :
identifier[valid] = identifier[values] . identifier[numpy] ()== literal[int]
keyword[else] :
keyword[if] identifier[flag] keyword[is] keyword[None] :
identifier[flag] = identifier[self] . identifier[valid_mask]
identifier[valid] = identifier[numpy] . identifier[bitwise_and] ( identifier[values] . identifier[numpy] (), identifier[flag] )== identifier[flag]
keyword[return] identifier[bool] ( identifier[numpy] . identifier[all] ( identifier[valid] ))
|
def check_valid(self, values, flag=None):
"""Check if the data contains any non-valid status information
Parameters
----------
values: pycbc.types.Array
Array of status information
flag: str, optional
Override the default valid mask with a user defined mask.
Returns
-------
status: boolean
Returns True if all of the status information if valid,
False if any is not.
"""
if self.valid_on_zero:
valid = values.numpy() == 0 # depends on [control=['if'], data=[]]
else:
if flag is None:
flag = self.valid_mask # depends on [control=['if'], data=['flag']]
valid = numpy.bitwise_and(values.numpy(), flag) == flag
return bool(numpy.all(valid))
|
def set_color(self, rgb):
"""Set dimmer color.
"""
target = ','.join([str(c) for c in rgb])
self.set_service_value(
self.color_service,
'ColorRGB',
'newColorRGBTarget',
target)
rgbi = self.get_color_index(['R', 'G', 'B'])
if rgbi is None:
return
target = ('0=0,1=0,' +
str(rgbi[0]) + '=' + str(rgb[0]) + ',' +
str(rgbi[1]) + '=' + str(rgb[1]) + ',' +
str(rgbi[2]) + '=' + str(rgb[2]))
self.set_cache_complex_value("CurrentColor", target)
|
def function[set_color, parameter[self, rgb]]:
constant[Set dimmer color.
]
variable[target] assign[=] call[constant[,].join, parameter[<ast.ListComp object at 0x7da18fe91750>]]
call[name[self].set_service_value, parameter[name[self].color_service, constant[ColorRGB], constant[newColorRGBTarget], name[target]]]
variable[rgbi] assign[=] call[name[self].get_color_index, parameter[list[[<ast.Constant object at 0x7da18fe90d60>, <ast.Constant object at 0x7da18fe91210>, <ast.Constant object at 0x7da18fe93a30>]]]]
if compare[name[rgbi] is constant[None]] begin[:]
return[None]
variable[target] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[0=0,1=0,] + call[name[str], parameter[call[name[rgbi]][constant[0]]]]] + constant[=]] + call[name[str], parameter[call[name[rgb]][constant[0]]]]] + constant[,]] + call[name[str], parameter[call[name[rgbi]][constant[1]]]]] + constant[=]] + call[name[str], parameter[call[name[rgb]][constant[1]]]]] + constant[,]] + call[name[str], parameter[call[name[rgbi]][constant[2]]]]] + constant[=]] + call[name[str], parameter[call[name[rgb]][constant[2]]]]]
call[name[self].set_cache_complex_value, parameter[constant[CurrentColor], name[target]]]
|
keyword[def] identifier[set_color] ( identifier[self] , identifier[rgb] ):
literal[string]
identifier[target] = literal[string] . identifier[join] ([ identifier[str] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[rgb] ])
identifier[self] . identifier[set_service_value] (
identifier[self] . identifier[color_service] ,
literal[string] ,
literal[string] ,
identifier[target] )
identifier[rgbi] = identifier[self] . identifier[get_color_index] ([ literal[string] , literal[string] , literal[string] ])
keyword[if] identifier[rgbi] keyword[is] keyword[None] :
keyword[return]
identifier[target] =( literal[string] +
identifier[str] ( identifier[rgbi] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[rgb] [ literal[int] ])+ literal[string] +
identifier[str] ( identifier[rgbi] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[rgb] [ literal[int] ])+ literal[string] +
identifier[str] ( identifier[rgbi] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[rgb] [ literal[int] ]))
identifier[self] . identifier[set_cache_complex_value] ( literal[string] , identifier[target] )
|
def set_color(self, rgb):
"""Set dimmer color.
"""
target = ','.join([str(c) for c in rgb])
self.set_service_value(self.color_service, 'ColorRGB', 'newColorRGBTarget', target)
rgbi = self.get_color_index(['R', 'G', 'B'])
if rgbi is None:
return # depends on [control=['if'], data=[]]
target = '0=0,1=0,' + str(rgbi[0]) + '=' + str(rgb[0]) + ',' + str(rgbi[1]) + '=' + str(rgb[1]) + ',' + str(rgbi[2]) + '=' + str(rgb[2])
self.set_cache_complex_value('CurrentColor', target)
|
def remove_challenge_for_url(url):
""" Removes the cached challenge for the specified URL.
:param url: the URL for which to remove the cached challenge """
if not url:
raise ValueError('URL cannot be empty')
url = parse.urlparse(url)
_lock.acquire()
del _cache[url.netloc]
_lock.release()
|
def function[remove_challenge_for_url, parameter[url]]:
constant[ Removes the cached challenge for the specified URL.
:param url: the URL for which to remove the cached challenge ]
if <ast.UnaryOp object at 0x7da1b2346590> begin[:]
<ast.Raise object at 0x7da1b2344a60>
variable[url] assign[=] call[name[parse].urlparse, parameter[name[url]]]
call[name[_lock].acquire, parameter[]]
<ast.Delete object at 0x7da2054a6290>
call[name[_lock].release, parameter[]]
|
keyword[def] identifier[remove_challenge_for_url] ( identifier[url] ):
literal[string]
keyword[if] keyword[not] identifier[url] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[url] = identifier[parse] . identifier[urlparse] ( identifier[url] )
identifier[_lock] . identifier[acquire] ()
keyword[del] identifier[_cache] [ identifier[url] . identifier[netloc] ]
identifier[_lock] . identifier[release] ()
|
def remove_challenge_for_url(url):
""" Removes the cached challenge for the specified URL.
:param url: the URL for which to remove the cached challenge """
if not url:
raise ValueError('URL cannot be empty') # depends on [control=['if'], data=[]]
url = parse.urlparse(url)
_lock.acquire()
del _cache[url.netloc]
_lock.release()
|
def _get_post_data(cls, args):
'''Return the post data.'''
if args.post_data:
return args.post_data
elif args.post_file:
return args.post_file.read()
|
def function[_get_post_data, parameter[cls, args]]:
constant[Return the post data.]
if name[args].post_data begin[:]
return[name[args].post_data]
|
keyword[def] identifier[_get_post_data] ( identifier[cls] , identifier[args] ):
literal[string]
keyword[if] identifier[args] . identifier[post_data] :
keyword[return] identifier[args] . identifier[post_data]
keyword[elif] identifier[args] . identifier[post_file] :
keyword[return] identifier[args] . identifier[post_file] . identifier[read] ()
|
def _get_post_data(cls, args):
"""Return the post data."""
if args.post_data:
return args.post_data # depends on [control=['if'], data=[]]
elif args.post_file:
return args.post_file.read() # depends on [control=['if'], data=[]]
|
def has_source_contents(self, src_id):
"""Checks if some sources exist."""
return bool(rustcall(_lib.lsm_view_has_source_contents,
self._get_ptr(), src_id))
|
def function[has_source_contents, parameter[self, src_id]]:
constant[Checks if some sources exist.]
return[call[name[bool], parameter[call[name[rustcall], parameter[name[_lib].lsm_view_has_source_contents, call[name[self]._get_ptr, parameter[]], name[src_id]]]]]]
|
keyword[def] identifier[has_source_contents] ( identifier[self] , identifier[src_id] ):
literal[string]
keyword[return] identifier[bool] ( identifier[rustcall] ( identifier[_lib] . identifier[lsm_view_has_source_contents] ,
identifier[self] . identifier[_get_ptr] (), identifier[src_id] ))
|
def has_source_contents(self, src_id):
"""Checks if some sources exist."""
return bool(rustcall(_lib.lsm_view_has_source_contents, self._get_ptr(), src_id))
|
def _get_blockade_id_from_cwd(self, cwd=None):
'''Generate a new blockade ID based on the CWD'''
if not cwd:
cwd = os.getcwd()
# this follows a similar pattern as docker-compose uses
parent_dir = os.path.abspath(cwd)
basename = os.path.basename(parent_dir).lower()
blockade_id = re.sub(r"[^a-z0-9]", "", basename)
if not blockade_id: # if we can't get a valid name from CWD, use "default"
blockade_id = "default"
return blockade_id
|
def function[_get_blockade_id_from_cwd, parameter[self, cwd]]:
constant[Generate a new blockade ID based on the CWD]
if <ast.UnaryOp object at 0x7da1b0192ec0> begin[:]
variable[cwd] assign[=] call[name[os].getcwd, parameter[]]
variable[parent_dir] assign[=] call[name[os].path.abspath, parameter[name[cwd]]]
variable[basename] assign[=] call[call[name[os].path.basename, parameter[name[parent_dir]]].lower, parameter[]]
variable[blockade_id] assign[=] call[name[re].sub, parameter[constant[[^a-z0-9]], constant[], name[basename]]]
if <ast.UnaryOp object at 0x7da1b0192080> begin[:]
variable[blockade_id] assign[=] constant[default]
return[name[blockade_id]]
|
keyword[def] identifier[_get_blockade_id_from_cwd] ( identifier[self] , identifier[cwd] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[cwd] :
identifier[cwd] = identifier[os] . identifier[getcwd] ()
identifier[parent_dir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[cwd] )
identifier[basename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[parent_dir] ). identifier[lower] ()
identifier[blockade_id] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[basename] )
keyword[if] keyword[not] identifier[blockade_id] :
identifier[blockade_id] = literal[string]
keyword[return] identifier[blockade_id]
|
def _get_blockade_id_from_cwd(self, cwd=None):
"""Generate a new blockade ID based on the CWD"""
if not cwd:
cwd = os.getcwd() # depends on [control=['if'], data=[]]
# this follows a similar pattern as docker-compose uses
parent_dir = os.path.abspath(cwd)
basename = os.path.basename(parent_dir).lower()
blockade_id = re.sub('[^a-z0-9]', '', basename)
if not blockade_id: # if we can't get a valid name from CWD, use "default"
blockade_id = 'default' # depends on [control=['if'], data=[]]
return blockade_id
|
def authenticate_through_netrc(path=None):
"""
Return the tuple user / password given a path for the .netrc file.
Raises CredentialsError if no valid netrc file is found.
"""
errors = []
netrc_machine = 'coursera-dl'
paths = [path] if path else get_config_paths("netrc")
for path in paths:
try:
logging.debug('Trying netrc file %s', path)
auths = netrc.netrc(path).authenticators(netrc_machine)
except (IOError, netrc.NetrcParseError) as e:
errors.append(e)
else:
if auths is None:
errors.append('Didn\'t find any credentials for ' +
netrc_machine)
else:
return auths[0], auths[2]
error_messages = '\n'.join(str(e) for e in errors)
raise CredentialsError(
'Did not find valid netrc file:\n' + error_messages +
'\nPlease run this command: chmod og-rw ~/.netrc')
|
def function[authenticate_through_netrc, parameter[path]]:
constant[
Return the tuple user / password given a path for the .netrc file.
Raises CredentialsError if no valid netrc file is found.
]
variable[errors] assign[=] list[[]]
variable[netrc_machine] assign[=] constant[coursera-dl]
variable[paths] assign[=] <ast.IfExp object at 0x7da204620f70>
for taget[name[path]] in starred[name[paths]] begin[:]
<ast.Try object at 0x7da204621390>
variable[error_messages] assign[=] call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da2046236a0>]]
<ast.Raise object at 0x7da2046210f0>
|
keyword[def] identifier[authenticate_through_netrc] ( identifier[path] = keyword[None] ):
literal[string]
identifier[errors] =[]
identifier[netrc_machine] = literal[string]
identifier[paths] =[ identifier[path] ] keyword[if] identifier[path] keyword[else] identifier[get_config_paths] ( literal[string] )
keyword[for] identifier[path] keyword[in] identifier[paths] :
keyword[try] :
identifier[logging] . identifier[debug] ( literal[string] , identifier[path] )
identifier[auths] = identifier[netrc] . identifier[netrc] ( identifier[path] ). identifier[authenticators] ( identifier[netrc_machine] )
keyword[except] ( identifier[IOError] , identifier[netrc] . identifier[NetrcParseError] ) keyword[as] identifier[e] :
identifier[errors] . identifier[append] ( identifier[e] )
keyword[else] :
keyword[if] identifier[auths] keyword[is] keyword[None] :
identifier[errors] . identifier[append] ( literal[string] +
identifier[netrc_machine] )
keyword[else] :
keyword[return] identifier[auths] [ literal[int] ], identifier[auths] [ literal[int] ]
identifier[error_messages] = literal[string] . identifier[join] ( identifier[str] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[errors] )
keyword[raise] identifier[CredentialsError] (
literal[string] + identifier[error_messages] +
literal[string] )
|
def authenticate_through_netrc(path=None):
"""
Return the tuple user / password given a path for the .netrc file.
Raises CredentialsError if no valid netrc file is found.
"""
errors = []
netrc_machine = 'coursera-dl'
paths = [path] if path else get_config_paths('netrc')
for path in paths:
try:
logging.debug('Trying netrc file %s', path)
auths = netrc.netrc(path).authenticators(netrc_machine) # depends on [control=['try'], data=[]]
except (IOError, netrc.NetrcParseError) as e:
errors.append(e) # depends on [control=['except'], data=['e']]
else:
if auths is None:
errors.append("Didn't find any credentials for " + netrc_machine) # depends on [control=['if'], data=[]]
else:
return (auths[0], auths[2]) # depends on [control=['for'], data=['path']]
error_messages = '\n'.join((str(e) for e in errors))
raise CredentialsError('Did not find valid netrc file:\n' + error_messages + '\nPlease run this command: chmod og-rw ~/.netrc')
|
def basescript(line):
'''
>>> import pprint
>>> input_line = '{"level": "warning", "timestamp": "2018-02-07T06:37:00.297610Z", "event": "exited via keyboard interrupt", "type": "log", "id": "20180207T063700_4d03fe800bd111e89ecb96000007bc65", "_": {"ln": 58, "file": "/usr/local/lib/python2.7/dist-packages/basescript/basescript.py", "name": "basescript.basescript", "fn": "start"}}'
>>> output_line1 = basescript(input_line)
>>> pprint.pprint(output_line1)
{'data': {u'_': {u'file': u'/usr/local/lib/python2.7/dist-packages/basescript/basescript.py',
u'fn': u'start',
u'ln': 58,
u'name': u'basescript.basescript'},
u'event': u'exited via keyboard interrupt',
u'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
u'level': u'warning',
u'timestamp': u'2018-02-07T06:37:00.297610Z',
u'type': u'log'},
'event': u'exited via keyboard interrupt',
'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
'level': u'warning',
'timestamp': u'2018-02-07T06:37:00.297610Z',
'type': u'log'}
'''
log = json.loads(line)
return dict(
timestamp=log['timestamp'],
data=log,
id=log['id'],
type=log['type'],
level=log['level'],
event=log['event']
)
|
def function[basescript, parameter[line]]:
constant[
>>> import pprint
>>> input_line = '{"level": "warning", "timestamp": "2018-02-07T06:37:00.297610Z", "event": "exited via keyboard interrupt", "type": "log", "id": "20180207T063700_4d03fe800bd111e89ecb96000007bc65", "_": {"ln": 58, "file": "/usr/local/lib/python2.7/dist-packages/basescript/basescript.py", "name": "basescript.basescript", "fn": "start"}}'
>>> output_line1 = basescript(input_line)
>>> pprint.pprint(output_line1)
{'data': {u'_': {u'file': u'/usr/local/lib/python2.7/dist-packages/basescript/basescript.py',
u'fn': u'start',
u'ln': 58,
u'name': u'basescript.basescript'},
u'event': u'exited via keyboard interrupt',
u'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
u'level': u'warning',
u'timestamp': u'2018-02-07T06:37:00.297610Z',
u'type': u'log'},
'event': u'exited via keyboard interrupt',
'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
'level': u'warning',
'timestamp': u'2018-02-07T06:37:00.297610Z',
'type': u'log'}
]
variable[log] assign[=] call[name[json].loads, parameter[name[line]]]
return[call[name[dict], parameter[]]]
|
keyword[def] identifier[basescript] ( identifier[line] ):
literal[string]
identifier[log] = identifier[json] . identifier[loads] ( identifier[line] )
keyword[return] identifier[dict] (
identifier[timestamp] = identifier[log] [ literal[string] ],
identifier[data] = identifier[log] ,
identifier[id] = identifier[log] [ literal[string] ],
identifier[type] = identifier[log] [ literal[string] ],
identifier[level] = identifier[log] [ literal[string] ],
identifier[event] = identifier[log] [ literal[string] ]
)
|
def basescript(line):
"""
>>> import pprint
>>> input_line = '{"level": "warning", "timestamp": "2018-02-07T06:37:00.297610Z", "event": "exited via keyboard interrupt", "type": "log", "id": "20180207T063700_4d03fe800bd111e89ecb96000007bc65", "_": {"ln": 58, "file": "/usr/local/lib/python2.7/dist-packages/basescript/basescript.py", "name": "basescript.basescript", "fn": "start"}}'
>>> output_line1 = basescript(input_line)
>>> pprint.pprint(output_line1)
{'data': {u'_': {u'file': u'/usr/local/lib/python2.7/dist-packages/basescript/basescript.py',
u'fn': u'start',
u'ln': 58,
u'name': u'basescript.basescript'},
u'event': u'exited via keyboard interrupt',
u'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
u'level': u'warning',
u'timestamp': u'2018-02-07T06:37:00.297610Z',
u'type': u'log'},
'event': u'exited via keyboard interrupt',
'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
'level': u'warning',
'timestamp': u'2018-02-07T06:37:00.297610Z',
'type': u'log'}
"""
log = json.loads(line)
return dict(timestamp=log['timestamp'], data=log, id=log['id'], type=log['type'], level=log['level'], event=log['event'])
|
def prettify(unicode_text):
"""Return a pretty-printed version of a unicode XML string.
Useful for debugging.
Args:
unicode_text (str): A text representation of XML (unicode,
*not* utf-8).
Returns:
str: A pretty-printed version of the input.
"""
import xml.dom.minidom
reparsed = xml.dom.minidom.parseString(unicode_text.encode('utf-8'))
return reparsed.toprettyxml(indent=" ", newl="\n")
|
def function[prettify, parameter[unicode_text]]:
constant[Return a pretty-printed version of a unicode XML string.
Useful for debugging.
Args:
unicode_text (str): A text representation of XML (unicode,
*not* utf-8).
Returns:
str: A pretty-printed version of the input.
]
import module[xml.dom.minidom]
variable[reparsed] assign[=] call[name[xml].dom.minidom.parseString, parameter[call[name[unicode_text].encode, parameter[constant[utf-8]]]]]
return[call[name[reparsed].toprettyxml, parameter[]]]
|
keyword[def] identifier[prettify] ( identifier[unicode_text] ):
literal[string]
keyword[import] identifier[xml] . identifier[dom] . identifier[minidom]
identifier[reparsed] = identifier[xml] . identifier[dom] . identifier[minidom] . identifier[parseString] ( identifier[unicode_text] . identifier[encode] ( literal[string] ))
keyword[return] identifier[reparsed] . identifier[toprettyxml] ( identifier[indent] = literal[string] , identifier[newl] = literal[string] )
|
def prettify(unicode_text):
"""Return a pretty-printed version of a unicode XML string.
Useful for debugging.
Args:
unicode_text (str): A text representation of XML (unicode,
*not* utf-8).
Returns:
str: A pretty-printed version of the input.
"""
import xml.dom.minidom
reparsed = xml.dom.minidom.parseString(unicode_text.encode('utf-8'))
return reparsed.toprettyxml(indent=' ', newl='\n')
|
def create_instance(self, **kwargs):
"""Creates a new virtual server instance.
.. warning::
This will add charges to your account
Example::
new_vsi = {
'domain': u'test01.labs.sftlyr.ws',
'hostname': u'minion05',
'datacenter': u'hkg02',
'flavor': 'BL1_1X2X100'
'dedicated': False,
'private': False,
'os_code' : u'UBUNTU_LATEST',
'hourly': True,
'ssh_keys': [1234],
'disks': ('100','25'),
'local_disk': True,
'tags': 'test, pleaseCancel',
'public_security_groups': [12, 15]
}
vsi = mgr.create_instance(**new_vsi)
# vsi will have the newly created vsi details if done properly.
print vsi
:param int cpus: The number of virtual CPUs to include in the instance.
:param int memory: The amount of RAM to order.
:param bool hourly: Flag to indicate if this server should be billed hourly (default) or monthly.
:param string hostname: The hostname to use for the new server.
:param string domain: The domain to use for the new server.
:param bool local_disk: Flag to indicate if this should be a local disk (default) or a SAN disk.
:param string datacenter: The short name of the data center in which the VS should reside.
:param string os_code: The operating system to use. Cannot be specified if image_id is specified.
:param int image_id: The GUID of the image to load onto the server. Cannot be specified if os_code is specified.
:param bool dedicated: Flag to indicate if this should be housed on adedicated or shared host (default).
This will incur a fee on your account.
:param int public_vlan: The ID of the public VLAN on which you want this VS placed.
:param list public_security_groups: The list of security group IDs to apply to the public interface
:param list private_security_groups: The list of security group IDs to apply to the private interface
:param int private_vlan: The ID of the private VLAN on which you want this VS placed.
:param list disks: A list of disk capacities for this server.
:param string post_uri: The URI of the post-install script to run after reload
:param bool private: If true, the VS will be provisioned only with access to the private network.
Defaults to false
:param list ssh_keys: The SSH keys to add to the root user
:param int nic_speed: The port speed to set
:param string tags: tags to set on the VS as a comma separated list
:param string flavor: The key name of the public virtual server flavor being ordered.
:param int host_id: The host id of a dedicated host to provision a dedicated host virtual server on.
"""
tags = kwargs.pop('tags', None)
inst = self.guest.createObject(self._generate_create_dict(**kwargs))
if tags is not None:
self.set_tags(tags, guest_id=inst['id'])
return inst
|
def function[create_instance, parameter[self]]:
constant[Creates a new virtual server instance.
.. warning::
This will add charges to your account
Example::
new_vsi = {
'domain': u'test01.labs.sftlyr.ws',
'hostname': u'minion05',
'datacenter': u'hkg02',
'flavor': 'BL1_1X2X100'
'dedicated': False,
'private': False,
'os_code' : u'UBUNTU_LATEST',
'hourly': True,
'ssh_keys': [1234],
'disks': ('100','25'),
'local_disk': True,
'tags': 'test, pleaseCancel',
'public_security_groups': [12, 15]
}
vsi = mgr.create_instance(**new_vsi)
# vsi will have the newly created vsi details if done properly.
print vsi
:param int cpus: The number of virtual CPUs to include in the instance.
:param int memory: The amount of RAM to order.
:param bool hourly: Flag to indicate if this server should be billed hourly (default) or monthly.
:param string hostname: The hostname to use for the new server.
:param string domain: The domain to use for the new server.
:param bool local_disk: Flag to indicate if this should be a local disk (default) or a SAN disk.
:param string datacenter: The short name of the data center in which the VS should reside.
:param string os_code: The operating system to use. Cannot be specified if image_id is specified.
:param int image_id: The GUID of the image to load onto the server. Cannot be specified if os_code is specified.
:param bool dedicated: Flag to indicate if this should be housed on adedicated or shared host (default).
This will incur a fee on your account.
:param int public_vlan: The ID of the public VLAN on which you want this VS placed.
:param list public_security_groups: The list of security group IDs to apply to the public interface
:param list private_security_groups: The list of security group IDs to apply to the private interface
:param int private_vlan: The ID of the private VLAN on which you want this VS placed.
:param list disks: A list of disk capacities for this server.
:param string post_uri: The URI of the post-install script to run after reload
:param bool private: If true, the VS will be provisioned only with access to the private network.
Defaults to false
:param list ssh_keys: The SSH keys to add to the root user
:param int nic_speed: The port speed to set
:param string tags: tags to set on the VS as a comma separated list
:param string flavor: The key name of the public virtual server flavor being ordered.
:param int host_id: The host id of a dedicated host to provision a dedicated host virtual server on.
]
variable[tags] assign[=] call[name[kwargs].pop, parameter[constant[tags], constant[None]]]
variable[inst] assign[=] call[name[self].guest.createObject, parameter[call[name[self]._generate_create_dict, parameter[]]]]
if compare[name[tags] is_not constant[None]] begin[:]
call[name[self].set_tags, parameter[name[tags]]]
return[name[inst]]
|
keyword[def] identifier[create_instance] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[tags] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[inst] = identifier[self] . identifier[guest] . identifier[createObject] ( identifier[self] . identifier[_generate_create_dict] (** identifier[kwargs] ))
keyword[if] identifier[tags] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[set_tags] ( identifier[tags] , identifier[guest_id] = identifier[inst] [ literal[string] ])
keyword[return] identifier[inst]
|
def create_instance(self, **kwargs):
"""Creates a new virtual server instance.
.. warning::
This will add charges to your account
Example::
new_vsi = {
'domain': u'test01.labs.sftlyr.ws',
'hostname': u'minion05',
'datacenter': u'hkg02',
'flavor': 'BL1_1X2X100'
'dedicated': False,
'private': False,
'os_code' : u'UBUNTU_LATEST',
'hourly': True,
'ssh_keys': [1234],
'disks': ('100','25'),
'local_disk': True,
'tags': 'test, pleaseCancel',
'public_security_groups': [12, 15]
}
vsi = mgr.create_instance(**new_vsi)
# vsi will have the newly created vsi details if done properly.
print vsi
:param int cpus: The number of virtual CPUs to include in the instance.
:param int memory: The amount of RAM to order.
:param bool hourly: Flag to indicate if this server should be billed hourly (default) or monthly.
:param string hostname: The hostname to use for the new server.
:param string domain: The domain to use for the new server.
:param bool local_disk: Flag to indicate if this should be a local disk (default) or a SAN disk.
:param string datacenter: The short name of the data center in which the VS should reside.
:param string os_code: The operating system to use. Cannot be specified if image_id is specified.
:param int image_id: The GUID of the image to load onto the server. Cannot be specified if os_code is specified.
:param bool dedicated: Flag to indicate if this should be housed on adedicated or shared host (default).
This will incur a fee on your account.
:param int public_vlan: The ID of the public VLAN on which you want this VS placed.
:param list public_security_groups: The list of security group IDs to apply to the public interface
:param list private_security_groups: The list of security group IDs to apply to the private interface
:param int private_vlan: The ID of the private VLAN on which you want this VS placed.
:param list disks: A list of disk capacities for this server.
:param string post_uri: The URI of the post-install script to run after reload
:param bool private: If true, the VS will be provisioned only with access to the private network.
Defaults to false
:param list ssh_keys: The SSH keys to add to the root user
:param int nic_speed: The port speed to set
:param string tags: tags to set on the VS as a comma separated list
:param string flavor: The key name of the public virtual server flavor being ordered.
:param int host_id: The host id of a dedicated host to provision a dedicated host virtual server on.
"""
tags = kwargs.pop('tags', None)
inst = self.guest.createObject(self._generate_create_dict(**kwargs))
if tags is not None:
self.set_tags(tags, guest_id=inst['id']) # depends on [control=['if'], data=['tags']]
return inst
|
def parse_modes(modes, targets=None, noargs=''):
"""Parse channel modes:
.. code-block:: python
>>> parse_modes('+c-n', noargs='cn')
[('+', 'c', None), ('-', 'n', None)]
>>> parse_modes('+c-v', ['gawel'], noargs='c')
[('+', 'c', None), ('-', 'v', 'gawel')]
"""
if not targets:
targets = []
cleaned = []
for mode in modes:
if mode in '-+':
char = mode
continue
target = targets.pop(0) if mode not in noargs else None
cleaned.append((char, mode, target))
return cleaned
|
def function[parse_modes, parameter[modes, targets, noargs]]:
constant[Parse channel modes:
.. code-block:: python
>>> parse_modes('+c-n', noargs='cn')
[('+', 'c', None), ('-', 'n', None)]
>>> parse_modes('+c-v', ['gawel'], noargs='c')
[('+', 'c', None), ('-', 'v', 'gawel')]
]
if <ast.UnaryOp object at 0x7da1b069c0d0> begin[:]
variable[targets] assign[=] list[[]]
variable[cleaned] assign[=] list[[]]
for taget[name[mode]] in starred[name[modes]] begin[:]
if compare[name[mode] in constant[-+]] begin[:]
variable[char] assign[=] name[mode]
continue
variable[target] assign[=] <ast.IfExp object at 0x7da1b069fcd0>
call[name[cleaned].append, parameter[tuple[[<ast.Name object at 0x7da1b069d810>, <ast.Name object at 0x7da1b069d4e0>, <ast.Name object at 0x7da1b069d840>]]]]
return[name[cleaned]]
|
keyword[def] identifier[parse_modes] ( identifier[modes] , identifier[targets] = keyword[None] , identifier[noargs] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[targets] :
identifier[targets] =[]
identifier[cleaned] =[]
keyword[for] identifier[mode] keyword[in] identifier[modes] :
keyword[if] identifier[mode] keyword[in] literal[string] :
identifier[char] = identifier[mode]
keyword[continue]
identifier[target] = identifier[targets] . identifier[pop] ( literal[int] ) keyword[if] identifier[mode] keyword[not] keyword[in] identifier[noargs] keyword[else] keyword[None]
identifier[cleaned] . identifier[append] (( identifier[char] , identifier[mode] , identifier[target] ))
keyword[return] identifier[cleaned]
|
def parse_modes(modes, targets=None, noargs=''):
"""Parse channel modes:
.. code-block:: python
>>> parse_modes('+c-n', noargs='cn')
[('+', 'c', None), ('-', 'n', None)]
>>> parse_modes('+c-v', ['gawel'], noargs='c')
[('+', 'c', None), ('-', 'v', 'gawel')]
"""
if not targets:
targets = [] # depends on [control=['if'], data=[]]
cleaned = []
for mode in modes:
if mode in '-+':
char = mode
continue # depends on [control=['if'], data=['mode']]
target = targets.pop(0) if mode not in noargs else None
cleaned.append((char, mode, target)) # depends on [control=['for'], data=['mode']]
return cleaned
|
def mean_by_window(self, indices, window):
"""
Average series across multiple windows specified by their centers.
Parameters
----------
indices : array-like
List of times specifying window centers
window : int
Window size
"""
masks = self._makewindows(indices, window)
newindex = arange(0, len(masks[0]))
return self.map(lambda x: mean([x[m] for m in masks], axis=0), index=newindex)
|
def function[mean_by_window, parameter[self, indices, window]]:
constant[
Average series across multiple windows specified by their centers.
Parameters
----------
indices : array-like
List of times specifying window centers
window : int
Window size
]
variable[masks] assign[=] call[name[self]._makewindows, parameter[name[indices], name[window]]]
variable[newindex] assign[=] call[name[arange], parameter[constant[0], call[name[len], parameter[call[name[masks]][constant[0]]]]]]
return[call[name[self].map, parameter[<ast.Lambda object at 0x7da18f811b10>]]]
|
keyword[def] identifier[mean_by_window] ( identifier[self] , identifier[indices] , identifier[window] ):
literal[string]
identifier[masks] = identifier[self] . identifier[_makewindows] ( identifier[indices] , identifier[window] )
identifier[newindex] = identifier[arange] ( literal[int] , identifier[len] ( identifier[masks] [ literal[int] ]))
keyword[return] identifier[self] . identifier[map] ( keyword[lambda] identifier[x] : identifier[mean] ([ identifier[x] [ identifier[m] ] keyword[for] identifier[m] keyword[in] identifier[masks] ], identifier[axis] = literal[int] ), identifier[index] = identifier[newindex] )
|
def mean_by_window(self, indices, window):
"""
Average series across multiple windows specified by their centers.
Parameters
----------
indices : array-like
List of times specifying window centers
window : int
Window size
"""
masks = self._makewindows(indices, window)
newindex = arange(0, len(masks[0]))
return self.map(lambda x: mean([x[m] for m in masks], axis=0), index=newindex)
|
def newline(self, copy_margin=True):
"""
Insert a line ending at the current position.
"""
if copy_margin:
self.insert_text('\n' + self.document.leading_whitespace_in_current_line)
else:
self.insert_text('\n')
|
def function[newline, parameter[self, copy_margin]]:
constant[
Insert a line ending at the current position.
]
if name[copy_margin] begin[:]
call[name[self].insert_text, parameter[binary_operation[constant[
] + name[self].document.leading_whitespace_in_current_line]]]
|
keyword[def] identifier[newline] ( identifier[self] , identifier[copy_margin] = keyword[True] ):
literal[string]
keyword[if] identifier[copy_margin] :
identifier[self] . identifier[insert_text] ( literal[string] + identifier[self] . identifier[document] . identifier[leading_whitespace_in_current_line] )
keyword[else] :
identifier[self] . identifier[insert_text] ( literal[string] )
|
def newline(self, copy_margin=True):
"""
Insert a line ending at the current position.
"""
if copy_margin:
self.insert_text('\n' + self.document.leading_whitespace_in_current_line) # depends on [control=['if'], data=[]]
else:
self.insert_text('\n')
|
def assign_unassigned_members(self, group_category_id, sync=None):
"""
Assign unassigned members.
Assign all unassigned members as evenly as possible among the existing
student groups.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - group_category_id
"""ID"""
path["group_category_id"] = group_category_id
# OPTIONAL - sync
"""The assigning is done asynchronously by default. If you would like to
override this and have the assigning done synchronously, set this value
to true."""
if sync is not None:
data["sync"] = sync
self.logger.debug("POST /api/v1/group_categories/{group_category_id}/assign_unassigned_members with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/group_categories/{group_category_id}/assign_unassigned_members".format(**path), data=data, params=params, single_item=True)
|
def function[assign_unassigned_members, parameter[self, group_category_id, sync]]:
constant[
Assign unassigned members.
Assign all unassigned members as evenly as possible among the existing
student groups.
]
variable[path] assign[=] dictionary[[], []]
variable[data] assign[=] dictionary[[], []]
variable[params] assign[=] dictionary[[], []]
constant[ID]
call[name[path]][constant[group_category_id]] assign[=] name[group_category_id]
constant[The assigning is done asynchronously by default. If you would like to
override this and have the assigning done synchronously, set this value
to true.]
if compare[name[sync] is_not constant[None]] begin[:]
call[name[data]][constant[sync]] assign[=] name[sync]
call[name[self].logger.debug, parameter[call[constant[POST /api/v1/group_categories/{group_category_id}/assign_unassigned_members with query params: {params} and form data: {data}].format, parameter[]]]]
return[call[name[self].generic_request, parameter[constant[POST], call[constant[/api/v1/group_categories/{group_category_id}/assign_unassigned_members].format, parameter[]]]]]
|
keyword[def] identifier[assign_unassigned_members] ( identifier[self] , identifier[group_category_id] , identifier[sync] = keyword[None] ):
literal[string]
identifier[path] ={}
identifier[data] ={}
identifier[params] ={}
literal[string]
identifier[path] [ literal[string] ]= identifier[group_category_id]
literal[string]
keyword[if] identifier[sync] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[sync]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[params] = identifier[params] , identifier[data] = identifier[data] ,** identifier[path] ))
keyword[return] identifier[self] . identifier[generic_request] ( literal[string] , literal[string] . identifier[format] (** identifier[path] ), identifier[data] = identifier[data] , identifier[params] = identifier[params] , identifier[single_item] = keyword[True] )
|
def assign_unassigned_members(self, group_category_id, sync=None):
"""
Assign unassigned members.
Assign all unassigned members as evenly as possible among the existing
student groups.
"""
path = {}
data = {}
params = {} # REQUIRED - PATH - group_category_id
'ID'
path['group_category_id'] = group_category_id # OPTIONAL - sync
'The assigning is done asynchronously by default. If you would like to\n override this and have the assigning done synchronously, set this value\n to true.'
if sync is not None:
data['sync'] = sync # depends on [control=['if'], data=['sync']]
self.logger.debug('POST /api/v1/group_categories/{group_category_id}/assign_unassigned_members with query params: {params} and form data: {data}'.format(params=params, data=data, **path))
return self.generic_request('POST', '/api/v1/group_categories/{group_category_id}/assign_unassigned_members'.format(**path), data=data, params=params, single_item=True)
|
def _subs(self, substitutions, default, simplify):
"""
Return an expression where all subterms equal to a key expression are
substituted by the corresponding value expression using a mapping of:
{expr->expr to substitute.}
"""
# track the new list of unchanged args or replaced args through
# a substitution
new_arguments = []
changed_something = False
# shortcut for basic logic True or False
if self is self.TRUE or self is self.FALSE:
return self
# if the expression has no elements, e.g. is empty, do not apply
# substitions
if not self.args:
return default
# iterate the subexpressions: either plain symbols or a subexpressions
for arg in self.args:
# collect substitutions for exact matches
# break as soon as we have a match
for expr, substitution in substitutions.items():
if arg == expr:
new_arguments.append(substitution)
changed_something = True
break
# this will execute only if we did not break out of the
# loop, e.g. if we did not change anything and did not
# collect any substitutions
else:
# recursively call _subs on each arg to see if we get a
# substituted arg
new_arg = arg._subs(substitutions, default, simplify)
if new_arg is None:
# if we did not collect a substitution for this arg,
# keep the arg as-is, it is not replaced by anything
new_arguments.append(arg)
else:
# otherwise, we add the substitution for this arg instead
new_arguments.append(new_arg)
changed_something = True
if not changed_something:
return
# here we did some substitution: we return a new expression
# built from the new_arguments
newexpr = self.__class__(*new_arguments)
return newexpr.simplify() if simplify else newexpr
|
def function[_subs, parameter[self, substitutions, default, simplify]]:
constant[
Return an expression where all subterms equal to a key expression are
substituted by the corresponding value expression using a mapping of:
{expr->expr to substitute.}
]
variable[new_arguments] assign[=] list[[]]
variable[changed_something] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b28506d0> begin[:]
return[name[self]]
if <ast.UnaryOp object at 0x7da1b2852da0> begin[:]
return[name[default]]
for taget[name[arg]] in starred[name[self].args] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b2852530>, <ast.Name object at 0x7da1b2853400>]]] in starred[call[name[substitutions].items, parameter[]]] begin[:]
if compare[name[arg] equal[==] name[expr]] begin[:]
call[name[new_arguments].append, parameter[name[substitution]]]
variable[changed_something] assign[=] constant[True]
break
if <ast.UnaryOp object at 0x7da2041d8fd0> begin[:]
return[None]
variable[newexpr] assign[=] call[name[self].__class__, parameter[<ast.Starred object at 0x7da2041db850>]]
return[<ast.IfExp object at 0x7da2041db370>]
|
keyword[def] identifier[_subs] ( identifier[self] , identifier[substitutions] , identifier[default] , identifier[simplify] ):
literal[string]
identifier[new_arguments] =[]
identifier[changed_something] = keyword[False]
keyword[if] identifier[self] keyword[is] identifier[self] . identifier[TRUE] keyword[or] identifier[self] keyword[is] identifier[self] . identifier[FALSE] :
keyword[return] identifier[self]
keyword[if] keyword[not] identifier[self] . identifier[args] :
keyword[return] identifier[default]
keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[args] :
keyword[for] identifier[expr] , identifier[substitution] keyword[in] identifier[substitutions] . identifier[items] ():
keyword[if] identifier[arg] == identifier[expr] :
identifier[new_arguments] . identifier[append] ( identifier[substitution] )
identifier[changed_something] = keyword[True]
keyword[break]
keyword[else] :
identifier[new_arg] = identifier[arg] . identifier[_subs] ( identifier[substitutions] , identifier[default] , identifier[simplify] )
keyword[if] identifier[new_arg] keyword[is] keyword[None] :
identifier[new_arguments] . identifier[append] ( identifier[arg] )
keyword[else] :
identifier[new_arguments] . identifier[append] ( identifier[new_arg] )
identifier[changed_something] = keyword[True]
keyword[if] keyword[not] identifier[changed_something] :
keyword[return]
identifier[newexpr] = identifier[self] . identifier[__class__] (* identifier[new_arguments] )
keyword[return] identifier[newexpr] . identifier[simplify] () keyword[if] identifier[simplify] keyword[else] identifier[newexpr]
|
def _subs(self, substitutions, default, simplify):
"""
Return an expression where all subterms equal to a key expression are
substituted by the corresponding value expression using a mapping of:
{expr->expr to substitute.}
"""
# track the new list of unchanged args or replaced args through
# a substitution
new_arguments = []
changed_something = False
# shortcut for basic logic True or False
if self is self.TRUE or self is self.FALSE:
return self # depends on [control=['if'], data=[]]
# if the expression has no elements, e.g. is empty, do not apply
# substitions
if not self.args:
return default # depends on [control=['if'], data=[]]
# iterate the subexpressions: either plain symbols or a subexpressions
for arg in self.args:
# collect substitutions for exact matches
# break as soon as we have a match
for (expr, substitution) in substitutions.items():
if arg == expr:
new_arguments.append(substitution)
changed_something = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
else:
# this will execute only if we did not break out of the
# loop, e.g. if we did not change anything and did not
# collect any substitutions
# recursively call _subs on each arg to see if we get a
# substituted arg
new_arg = arg._subs(substitutions, default, simplify)
if new_arg is None:
# if we did not collect a substitution for this arg,
# keep the arg as-is, it is not replaced by anything
new_arguments.append(arg) # depends on [control=['if'], data=[]]
else:
# otherwise, we add the substitution for this arg instead
new_arguments.append(new_arg)
changed_something = True # depends on [control=['for'], data=['arg']]
if not changed_something:
return # depends on [control=['if'], data=[]]
# here we did some substitution: we return a new expression
# built from the new_arguments
newexpr = self.__class__(*new_arguments)
return newexpr.simplify() if simplify else newexpr
|
def init(cwd,
bare=False,
template=None,
separate_git_dir=None,
shared=None,
opts='',
git_opts='',
user=None,
password=None,
ignore_retcode=False,
output_encoding=None):
'''
Interface to `git-init(1)`_
cwd
The path to the directory to be initialized
bare : False
If ``True``, init a bare repository
.. versionadded:: 2015.8.0
template
Set this argument to specify an alternate `template directory`_
.. versionadded:: 2015.8.0
separate_git_dir
Set this argument to specify an alternate ``$GIT_DIR``
.. versionadded:: 2015.8.0
shared
Set sharing permissions on git repo. See `git-init(1)`_ for more
details.
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
git_opts
Any additional options to add to git command itself (not the ``init``
subcommand), in a single string. This is useful for passing ``-c`` to
run git with temporary changes to the git configuration.
.. versionadded:: 2017.7.0
.. note::
This is only supported in git 1.7.2 and newer.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
password
Windows only. Required when specifying ``user``. This parameter will be
ignored on non-Windows platforms.
.. versionadded:: 2016.3.4
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
output_encoding
Use this option to specify which encoding to use to decode the output
from any git commands which are run. This should not be needed in most
cases.
.. note::
This should only be needed if the files in the repository were
created with filenames using an encoding other than UTF-8 to handle
Unicode characters.
.. versionadded:: 2018.3.1
.. _`git-init(1)`: http://git-scm.com/docs/git-init
.. _`template directory`: http://git-scm.com/docs/git-init#_template_directory
CLI Examples:
.. code-block:: bash
salt myminion git.init /path/to/repo
# Init a bare repo (before 2015.8.0)
salt myminion git.init /path/to/bare/repo.git opts='--bare'
# Init a bare repo (2015.8.0 and later)
salt myminion git.init /path/to/bare/repo.git bare=True
'''
cwd = _expand_path(cwd, user)
command = ['git'] + _format_git_opts(git_opts)
command.append('init')
if bare:
command.append('--bare')
if template is not None:
command.append('--template={0}'.format(template))
if separate_git_dir is not None:
command.append('--separate-git-dir={0}'.format(separate_git_dir))
if shared is not None:
if isinstance(shared, six.integer_types) \
and not isinstance(shared, bool):
shared = '0' + six.text_type(shared)
elif not isinstance(shared, six.string_types):
# Using lower here because booleans would be capitalized when
# converted to a string.
shared = six.text_type(shared).lower()
command.append('--shared={0}'.format(shared))
command.extend(_format_opts(opts))
command.append(cwd)
return _git_run(command,
user=user,
password=password,
ignore_retcode=ignore_retcode,
output_encoding=output_encoding)['stdout']
|
def function[init, parameter[cwd, bare, template, separate_git_dir, shared, opts, git_opts, user, password, ignore_retcode, output_encoding]]:
constant[
Interface to `git-init(1)`_
cwd
The path to the directory to be initialized
bare : False
If ``True``, init a bare repository
.. versionadded:: 2015.8.0
template
Set this argument to specify an alternate `template directory`_
.. versionadded:: 2015.8.0
separate_git_dir
Set this argument to specify an alternate ``$GIT_DIR``
.. versionadded:: 2015.8.0
shared
Set sharing permissions on git repo. See `git-init(1)`_ for more
details.
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
git_opts
Any additional options to add to git command itself (not the ``init``
subcommand), in a single string. This is useful for passing ``-c`` to
run git with temporary changes to the git configuration.
.. versionadded:: 2017.7.0
.. note::
This is only supported in git 1.7.2 and newer.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
password
Windows only. Required when specifying ``user``. This parameter will be
ignored on non-Windows platforms.
.. versionadded:: 2016.3.4
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
output_encoding
Use this option to specify which encoding to use to decode the output
from any git commands which are run. This should not be needed in most
cases.
.. note::
This should only be needed if the files in the repository were
created with filenames using an encoding other than UTF-8 to handle
Unicode characters.
.. versionadded:: 2018.3.1
.. _`git-init(1)`: http://git-scm.com/docs/git-init
.. _`template directory`: http://git-scm.com/docs/git-init#_template_directory
CLI Examples:
.. code-block:: bash
salt myminion git.init /path/to/repo
# Init a bare repo (before 2015.8.0)
salt myminion git.init /path/to/bare/repo.git opts='--bare'
# Init a bare repo (2015.8.0 and later)
salt myminion git.init /path/to/bare/repo.git bare=True
]
variable[cwd] assign[=] call[name[_expand_path], parameter[name[cwd], name[user]]]
variable[command] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b26ac520>]] + call[name[_format_git_opts], parameter[name[git_opts]]]]
call[name[command].append, parameter[constant[init]]]
if name[bare] begin[:]
call[name[command].append, parameter[constant[--bare]]]
if compare[name[template] is_not constant[None]] begin[:]
call[name[command].append, parameter[call[constant[--template={0}].format, parameter[name[template]]]]]
if compare[name[separate_git_dir] is_not constant[None]] begin[:]
call[name[command].append, parameter[call[constant[--separate-git-dir={0}].format, parameter[name[separate_git_dir]]]]]
if compare[name[shared] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b1c20370> begin[:]
variable[shared] assign[=] binary_operation[constant[0] + call[name[six].text_type, parameter[name[shared]]]]
call[name[command].append, parameter[call[constant[--shared={0}].format, parameter[name[shared]]]]]
call[name[command].extend, parameter[call[name[_format_opts], parameter[name[opts]]]]]
call[name[command].append, parameter[name[cwd]]]
return[call[call[name[_git_run], parameter[name[command]]]][constant[stdout]]]
|
keyword[def] identifier[init] ( identifier[cwd] ,
identifier[bare] = keyword[False] ,
identifier[template] = keyword[None] ,
identifier[separate_git_dir] = keyword[None] ,
identifier[shared] = keyword[None] ,
identifier[opts] = literal[string] ,
identifier[git_opts] = literal[string] ,
identifier[user] = keyword[None] ,
identifier[password] = keyword[None] ,
identifier[ignore_retcode] = keyword[False] ,
identifier[output_encoding] = keyword[None] ):
literal[string]
identifier[cwd] = identifier[_expand_path] ( identifier[cwd] , identifier[user] )
identifier[command] =[ literal[string] ]+ identifier[_format_git_opts] ( identifier[git_opts] )
identifier[command] . identifier[append] ( literal[string] )
keyword[if] identifier[bare] :
identifier[command] . identifier[append] ( literal[string] )
keyword[if] identifier[template] keyword[is] keyword[not] keyword[None] :
identifier[command] . identifier[append] ( literal[string] . identifier[format] ( identifier[template] ))
keyword[if] identifier[separate_git_dir] keyword[is] keyword[not] keyword[None] :
identifier[command] . identifier[append] ( literal[string] . identifier[format] ( identifier[separate_git_dir] ))
keyword[if] identifier[shared] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[shared] , identifier[six] . identifier[integer_types] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[shared] , identifier[bool] ):
identifier[shared] = literal[string] + identifier[six] . identifier[text_type] ( identifier[shared] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[shared] , identifier[six] . identifier[string_types] ):
identifier[shared] = identifier[six] . identifier[text_type] ( identifier[shared] ). identifier[lower] ()
identifier[command] . identifier[append] ( literal[string] . identifier[format] ( identifier[shared] ))
identifier[command] . identifier[extend] ( identifier[_format_opts] ( identifier[opts] ))
identifier[command] . identifier[append] ( identifier[cwd] )
keyword[return] identifier[_git_run] ( identifier[command] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[ignore_retcode] = identifier[ignore_retcode] ,
identifier[output_encoding] = identifier[output_encoding] )[ literal[string] ]
|
def init(cwd, bare=False, template=None, separate_git_dir=None, shared=None, opts='', git_opts='', user=None, password=None, ignore_retcode=False, output_encoding=None):
"""
Interface to `git-init(1)`_
cwd
The path to the directory to be initialized
bare : False
If ``True``, init a bare repository
.. versionadded:: 2015.8.0
template
Set this argument to specify an alternate `template directory`_
.. versionadded:: 2015.8.0
separate_git_dir
Set this argument to specify an alternate ``$GIT_DIR``
.. versionadded:: 2015.8.0
shared
Set sharing permissions on git repo. See `git-init(1)`_ for more
details.
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
git_opts
Any additional options to add to git command itself (not the ``init``
subcommand), in a single string. This is useful for passing ``-c`` to
run git with temporary changes to the git configuration.
.. versionadded:: 2017.7.0
.. note::
This is only supported in git 1.7.2 and newer.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
password
Windows only. Required when specifying ``user``. This parameter will be
ignored on non-Windows platforms.
.. versionadded:: 2016.3.4
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
output_encoding
Use this option to specify which encoding to use to decode the output
from any git commands which are run. This should not be needed in most
cases.
.. note::
This should only be needed if the files in the repository were
created with filenames using an encoding other than UTF-8 to handle
Unicode characters.
.. versionadded:: 2018.3.1
.. _`git-init(1)`: http://git-scm.com/docs/git-init
.. _`template directory`: http://git-scm.com/docs/git-init#_template_directory
CLI Examples:
.. code-block:: bash
salt myminion git.init /path/to/repo
# Init a bare repo (before 2015.8.0)
salt myminion git.init /path/to/bare/repo.git opts='--bare'
# Init a bare repo (2015.8.0 and later)
salt myminion git.init /path/to/bare/repo.git bare=True
"""
cwd = _expand_path(cwd, user)
command = ['git'] + _format_git_opts(git_opts)
command.append('init')
if bare:
command.append('--bare') # depends on [control=['if'], data=[]]
if template is not None:
command.append('--template={0}'.format(template)) # depends on [control=['if'], data=['template']]
if separate_git_dir is not None:
command.append('--separate-git-dir={0}'.format(separate_git_dir)) # depends on [control=['if'], data=['separate_git_dir']]
if shared is not None:
if isinstance(shared, six.integer_types) and (not isinstance(shared, bool)):
shared = '0' + six.text_type(shared) # depends on [control=['if'], data=[]]
elif not isinstance(shared, six.string_types):
# Using lower here because booleans would be capitalized when
# converted to a string.
shared = six.text_type(shared).lower() # depends on [control=['if'], data=[]]
command.append('--shared={0}'.format(shared)) # depends on [control=['if'], data=['shared']]
command.extend(_format_opts(opts))
command.append(cwd)
return _git_run(command, user=user, password=password, ignore_retcode=ignore_retcode, output_encoding=output_encoding)['stdout']
|
def assure_entity(fnc):
"""
Converts an entityID passed as the entity to a CloudMonitorEntity object.
"""
@wraps(fnc)
def _wrapped(self, entity, *args, **kwargs):
if not isinstance(entity, CloudMonitorEntity):
# Must be the ID
entity = self._entity_manager.get(entity)
return fnc(self, entity, *args, **kwargs)
return _wrapped
|
def function[assure_entity, parameter[fnc]]:
constant[
Converts an entityID passed as the entity to a CloudMonitorEntity object.
]
def function[_wrapped, parameter[self, entity]]:
if <ast.UnaryOp object at 0x7da1b056c2e0> begin[:]
variable[entity] assign[=] call[name[self]._entity_manager.get, parameter[name[entity]]]
return[call[name[fnc], parameter[name[self], name[entity], <ast.Starred object at 0x7da1b056e3b0>]]]
return[name[_wrapped]]
|
keyword[def] identifier[assure_entity] ( identifier[fnc] ):
literal[string]
@ identifier[wraps] ( identifier[fnc] )
keyword[def] identifier[_wrapped] ( identifier[self] , identifier[entity] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[entity] , identifier[CloudMonitorEntity] ):
identifier[entity] = identifier[self] . identifier[_entity_manager] . identifier[get] ( identifier[entity] )
keyword[return] identifier[fnc] ( identifier[self] , identifier[entity] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[_wrapped]
|
def assure_entity(fnc):
"""
Converts an entityID passed as the entity to a CloudMonitorEntity object.
"""
@wraps(fnc)
def _wrapped(self, entity, *args, **kwargs):
if not isinstance(entity, CloudMonitorEntity):
# Must be the ID
entity = self._entity_manager.get(entity) # depends on [control=['if'], data=[]]
return fnc(self, entity, *args, **kwargs)
return _wrapped
|
def set_symbol(self, symbol):
"""(symbol, bondorder) -> set the bondsymbol
of the molecule"""
raise "Deprecated"
self.symbol, self.bondtype, bondorder, self.equiv_class = \
BONDLOOKUP[symbol]
if self.bondtype == 4:
self.aromatic = 1
else:
self.aromatic = 0
|
def function[set_symbol, parameter[self, symbol]]:
constant[(symbol, bondorder) -> set the bondsymbol
of the molecule]
<ast.Raise object at 0x7da18ede5cf0>
<ast.Tuple object at 0x7da18ede72e0> assign[=] call[name[BONDLOOKUP]][name[symbol]]
if compare[name[self].bondtype equal[==] constant[4]] begin[:]
name[self].aromatic assign[=] constant[1]
|
keyword[def] identifier[set_symbol] ( identifier[self] , identifier[symbol] ):
literal[string]
keyword[raise] literal[string]
identifier[self] . identifier[symbol] , identifier[self] . identifier[bondtype] , identifier[bondorder] , identifier[self] . identifier[equiv_class] = identifier[BONDLOOKUP] [ identifier[symbol] ]
keyword[if] identifier[self] . identifier[bondtype] == literal[int] :
identifier[self] . identifier[aromatic] = literal[int]
keyword[else] :
identifier[self] . identifier[aromatic] = literal[int]
|
def set_symbol(self, symbol):
"""(symbol, bondorder) -> set the bondsymbol
of the molecule"""
raise 'Deprecated'
(self.symbol, self.bondtype, bondorder, self.equiv_class) = BONDLOOKUP[symbol]
if self.bondtype == 4:
self.aromatic = 1 # depends on [control=['if'], data=[]]
else:
self.aromatic = 0
|
def set_monthly(self, interval, *, day_of_month=None, days_of_week=None,
index=None, **kwargs):
""" Set to repeat every month on specified days for every x no. of days
:param int interval: no. of days to repeat at
:param int day_of_month: repeat day of a month
:param list[str] days_of_week: list of days of the week to repeat
:param index: index
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
"""
if not day_of_month and not days_of_week:
raise ValueError('Must provide day_of_month or days_of_week values')
if day_of_month and days_of_week:
raise ValueError('Must provide only one of the two options')
self.set_daily(interval, **kwargs)
if day_of_month:
self.__day_of_month = day_of_month
elif days_of_week:
self.__days_of_week = set(days_of_week)
if index:
self.__index = index
|
def function[set_monthly, parameter[self, interval]]:
constant[ Set to repeat every month on specified days for every x no. of days
:param int interval: no. of days to repeat at
:param int day_of_month: repeat day of a month
:param list[str] days_of_week: list of days of the week to repeat
:param index: index
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
]
if <ast.BoolOp object at 0x7da1b1b2b310> begin[:]
<ast.Raise object at 0x7da1b1b286d0>
if <ast.BoolOp object at 0x7da1b1b0ff40> begin[:]
<ast.Raise object at 0x7da1b1b0fe80>
call[name[self].set_daily, parameter[name[interval]]]
if name[day_of_month] begin[:]
name[self].__day_of_month assign[=] name[day_of_month]
|
keyword[def] identifier[set_monthly] ( identifier[self] , identifier[interval] ,*, identifier[day_of_month] = keyword[None] , identifier[days_of_week] = keyword[None] ,
identifier[index] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[day_of_month] keyword[and] keyword[not] identifier[days_of_week] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[day_of_month] keyword[and] identifier[days_of_week] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[set_daily] ( identifier[interval] ,** identifier[kwargs] )
keyword[if] identifier[day_of_month] :
identifier[self] . identifier[__day_of_month] = identifier[day_of_month]
keyword[elif] identifier[days_of_week] :
identifier[self] . identifier[__days_of_week] = identifier[set] ( identifier[days_of_week] )
keyword[if] identifier[index] :
identifier[self] . identifier[__index] = identifier[index]
|
def set_monthly(self, interval, *, day_of_month=None, days_of_week=None, index=None, **kwargs):
""" Set to repeat every month on specified days for every x no. of days
:param int interval: no. of days to repeat at
:param int day_of_month: repeat day of a month
:param list[str] days_of_week: list of days of the week to repeat
:param index: index
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
"""
if not day_of_month and (not days_of_week):
raise ValueError('Must provide day_of_month or days_of_week values') # depends on [control=['if'], data=[]]
if day_of_month and days_of_week:
raise ValueError('Must provide only one of the two options') # depends on [control=['if'], data=[]]
self.set_daily(interval, **kwargs)
if day_of_month:
self.__day_of_month = day_of_month # depends on [control=['if'], data=[]]
elif days_of_week:
self.__days_of_week = set(days_of_week)
if index:
self.__index = index # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def table_extract(self, table_name, destination, format='CSV', compress=True,
field_delimiter=',', print_header=True):
"""Exports the table to GCS.
Args:
table_name: the name of the table as a tuple of components.
destination: the destination URI(s). Can be a single URI or a list.
format: the format to use for the exported data; one of CSV, NEWLINE_DELIMITED_JSON or AVRO.
Defaults to CSV.
compress: whether to compress the data on export. Compression is not supported for
AVRO format. Defaults to False.
field_delimiter: for CSV exports, the field delimiter to use. Defaults to ','
print_header: for CSV exports, whether to include an initial header line. Default true.
Returns:
A parsed result object.
Raises:
Exception if there is an error performing the operation.
"""
url = Api._ENDPOINT + (Api._JOBS_PATH % (table_name.project_id, ''))
if isinstance(destination, basestring):
destination = [destination]
data = {
# 'projectId': table_name.project_id, # Code sample shows this but it is not in job
# reference spec. Filed as b/19235843
'kind': 'bigquery#job',
'configuration': {
'extract': {
'sourceTable': {
'projectId': table_name.project_id,
'datasetId': table_name.dataset_id,
'tableId': table_name.table_id,
},
'compression': 'GZIP' if compress else 'NONE',
'fieldDelimiter': field_delimiter,
'printHeader': print_header,
'destinationUris': destination,
'destinationFormat': format,
}
}
}
return datalab.utils.Http.request(url, data=data, credentials=self._credentials)
|
def function[table_extract, parameter[self, table_name, destination, format, compress, field_delimiter, print_header]]:
constant[Exports the table to GCS.
Args:
table_name: the name of the table as a tuple of components.
destination: the destination URI(s). Can be a single URI or a list.
format: the format to use for the exported data; one of CSV, NEWLINE_DELIMITED_JSON or AVRO.
Defaults to CSV.
compress: whether to compress the data on export. Compression is not supported for
AVRO format. Defaults to False.
field_delimiter: for CSV exports, the field delimiter to use. Defaults to ','
print_header: for CSV exports, whether to include an initial header line. Default true.
Returns:
A parsed result object.
Raises:
Exception if there is an error performing the operation.
]
variable[url] assign[=] binary_operation[name[Api]._ENDPOINT + binary_operation[name[Api]._JOBS_PATH <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f00e4a0>, <ast.Constant object at 0x7da18f00c130>]]]]
if call[name[isinstance], parameter[name[destination], name[basestring]]] begin[:]
variable[destination] assign[=] list[[<ast.Name object at 0x7da18f00f6a0>]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f00f6d0>, <ast.Constant object at 0x7da18f00fac0>], [<ast.Constant object at 0x7da18f00ee90>, <ast.Dict object at 0x7da18f00ecb0>]]
return[call[name[datalab].utils.Http.request, parameter[name[url]]]]
|
keyword[def] identifier[table_extract] ( identifier[self] , identifier[table_name] , identifier[destination] , identifier[format] = literal[string] , identifier[compress] = keyword[True] ,
identifier[field_delimiter] = literal[string] , identifier[print_header] = keyword[True] ):
literal[string]
identifier[url] = identifier[Api] . identifier[_ENDPOINT] +( identifier[Api] . identifier[_JOBS_PATH] %( identifier[table_name] . identifier[project_id] , literal[string] ))
keyword[if] identifier[isinstance] ( identifier[destination] , identifier[basestring] ):
identifier[destination] =[ identifier[destination] ]
identifier[data] ={
literal[string] : literal[string] ,
literal[string] :{
literal[string] :{
literal[string] :{
literal[string] : identifier[table_name] . identifier[project_id] ,
literal[string] : identifier[table_name] . identifier[dataset_id] ,
literal[string] : identifier[table_name] . identifier[table_id] ,
},
literal[string] : literal[string] keyword[if] identifier[compress] keyword[else] literal[string] ,
literal[string] : identifier[field_delimiter] ,
literal[string] : identifier[print_header] ,
literal[string] : identifier[destination] ,
literal[string] : identifier[format] ,
}
}
}
keyword[return] identifier[datalab] . identifier[utils] . identifier[Http] . identifier[request] ( identifier[url] , identifier[data] = identifier[data] , identifier[credentials] = identifier[self] . identifier[_credentials] )
|
def table_extract(self, table_name, destination, format='CSV', compress=True, field_delimiter=',', print_header=True):
"""Exports the table to GCS.
Args:
table_name: the name of the table as a tuple of components.
destination: the destination URI(s). Can be a single URI or a list.
format: the format to use for the exported data; one of CSV, NEWLINE_DELIMITED_JSON or AVRO.
Defaults to CSV.
compress: whether to compress the data on export. Compression is not supported for
AVRO format. Defaults to False.
field_delimiter: for CSV exports, the field delimiter to use. Defaults to ','
print_header: for CSV exports, whether to include an initial header line. Default true.
Returns:
A parsed result object.
Raises:
Exception if there is an error performing the operation.
"""
url = Api._ENDPOINT + Api._JOBS_PATH % (table_name.project_id, '')
if isinstance(destination, basestring):
destination = [destination] # depends on [control=['if'], data=[]]
# 'projectId': table_name.project_id, # Code sample shows this but it is not in job
# reference spec. Filed as b/19235843
data = {'kind': 'bigquery#job', 'configuration': {'extract': {'sourceTable': {'projectId': table_name.project_id, 'datasetId': table_name.dataset_id, 'tableId': table_name.table_id}, 'compression': 'GZIP' if compress else 'NONE', 'fieldDelimiter': field_delimiter, 'printHeader': print_header, 'destinationUris': destination, 'destinationFormat': format}}}
return datalab.utils.Http.request(url, data=data, credentials=self._credentials)
|
def restore_row(self, row, schema):
"""Restore row from BigQuery
"""
for index, field in enumerate(schema.fields):
if field.type == 'datetime':
row[index] = parse(row[index])
if field.type == 'date':
row[index] = parse(row[index]).date()
if field.type == 'time':
row[index] = parse(row[index]).time()
return schema.cast_row(row)
|
def function[restore_row, parameter[self, row, schema]]:
constant[Restore row from BigQuery
]
for taget[tuple[[<ast.Name object at 0x7da1b262a8c0>, <ast.Name object at 0x7da1b262ac80>]]] in starred[call[name[enumerate], parameter[name[schema].fields]]] begin[:]
if compare[name[field].type equal[==] constant[datetime]] begin[:]
call[name[row]][name[index]] assign[=] call[name[parse], parameter[call[name[row]][name[index]]]]
if compare[name[field].type equal[==] constant[date]] begin[:]
call[name[row]][name[index]] assign[=] call[call[name[parse], parameter[call[name[row]][name[index]]]].date, parameter[]]
if compare[name[field].type equal[==] constant[time]] begin[:]
call[name[row]][name[index]] assign[=] call[call[name[parse], parameter[call[name[row]][name[index]]]].time, parameter[]]
return[call[name[schema].cast_row, parameter[name[row]]]]
|
keyword[def] identifier[restore_row] ( identifier[self] , identifier[row] , identifier[schema] ):
literal[string]
keyword[for] identifier[index] , identifier[field] keyword[in] identifier[enumerate] ( identifier[schema] . identifier[fields] ):
keyword[if] identifier[field] . identifier[type] == literal[string] :
identifier[row] [ identifier[index] ]= identifier[parse] ( identifier[row] [ identifier[index] ])
keyword[if] identifier[field] . identifier[type] == literal[string] :
identifier[row] [ identifier[index] ]= identifier[parse] ( identifier[row] [ identifier[index] ]). identifier[date] ()
keyword[if] identifier[field] . identifier[type] == literal[string] :
identifier[row] [ identifier[index] ]= identifier[parse] ( identifier[row] [ identifier[index] ]). identifier[time] ()
keyword[return] identifier[schema] . identifier[cast_row] ( identifier[row] )
|
def restore_row(self, row, schema):
"""Restore row from BigQuery
"""
for (index, field) in enumerate(schema.fields):
if field.type == 'datetime':
row[index] = parse(row[index]) # depends on [control=['if'], data=[]]
if field.type == 'date':
row[index] = parse(row[index]).date() # depends on [control=['if'], data=[]]
if field.type == 'time':
row[index] = parse(row[index]).time() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return schema.cast_row(row)
|
def get_language_details(self, language):
"""Get user's status about a language."""
for lang in self.user_data.languages:
if language == lang['language_string']:
return lang
return {}
|
def function[get_language_details, parameter[self, language]]:
constant[Get user's status about a language.]
for taget[name[lang]] in starred[name[self].user_data.languages] begin[:]
if compare[name[language] equal[==] call[name[lang]][constant[language_string]]] begin[:]
return[name[lang]]
return[dictionary[[], []]]
|
keyword[def] identifier[get_language_details] ( identifier[self] , identifier[language] ):
literal[string]
keyword[for] identifier[lang] keyword[in] identifier[self] . identifier[user_data] . identifier[languages] :
keyword[if] identifier[language] == identifier[lang] [ literal[string] ]:
keyword[return] identifier[lang]
keyword[return] {}
|
def get_language_details(self, language):
"""Get user's status about a language."""
for lang in self.user_data.languages:
if language == lang['language_string']:
return lang # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['lang']]
return {}
|
def make_union(*transformers, **kwargs):
"""Construct a FeatureUnion from the given transformers.
This is a shorthand for the FeatureUnion constructor; it does not require,
and does not permit, naming the transformers. Instead, they will be given
names automatically based on their types. It also does not allow weighting.
Parameters
----------
*transformers : list of estimators
n_jobs : int, optional
Number of jobs to run in parallel (default 1).
Returns
-------
f : FeatureUnion
Examples
--------
>>> from sklearn.decomposition import PCA, TruncatedSVD
>>> from sklearn.pipeline import make_union
>>> make_union(PCA(), TruncatedSVD()) # doctest: +NORMALIZE_WHITESPACE
FeatureUnion(n_jobs=1,
transformer_list=[('pca',
PCA(copy=True, iterated_power='auto',
n_components=None, random_state=None,
svd_solver='auto', tol=0.0, whiten=False)),
('truncatedsvd',
TruncatedSVD(algorithm='randomized',
n_components=2, n_iter=5,
random_state=None, tol=0.0))],
transformer_weights=None)
"""
n_jobs = kwargs.pop('n_jobs', 1)
concatenate = kwargs.pop('concatenate', True)
if kwargs:
# We do not currently support `transformer_weights` as we may want to
# change its type spec in make_union
raise TypeError('Unknown keyword arguments: "{}"'
.format(list(kwargs.keys())[0]))
return FeatureUnion(_name_estimators(transformers), n_jobs= n_jobs, concatenate= concatenate)
|
def function[make_union, parameter[]]:
constant[Construct a FeatureUnion from the given transformers.
This is a shorthand for the FeatureUnion constructor; it does not require,
and does not permit, naming the transformers. Instead, they will be given
names automatically based on their types. It also does not allow weighting.
Parameters
----------
*transformers : list of estimators
n_jobs : int, optional
Number of jobs to run in parallel (default 1).
Returns
-------
f : FeatureUnion
Examples
--------
>>> from sklearn.decomposition import PCA, TruncatedSVD
>>> from sklearn.pipeline import make_union
>>> make_union(PCA(), TruncatedSVD()) # doctest: +NORMALIZE_WHITESPACE
FeatureUnion(n_jobs=1,
transformer_list=[('pca',
PCA(copy=True, iterated_power='auto',
n_components=None, random_state=None,
svd_solver='auto', tol=0.0, whiten=False)),
('truncatedsvd',
TruncatedSVD(algorithm='randomized',
n_components=2, n_iter=5,
random_state=None, tol=0.0))],
transformer_weights=None)
]
variable[n_jobs] assign[=] call[name[kwargs].pop, parameter[constant[n_jobs], constant[1]]]
variable[concatenate] assign[=] call[name[kwargs].pop, parameter[constant[concatenate], constant[True]]]
if name[kwargs] begin[:]
<ast.Raise object at 0x7da1b1172740>
return[call[name[FeatureUnion], parameter[call[name[_name_estimators], parameter[name[transformers]]]]]]
|
keyword[def] identifier[make_union] (* identifier[transformers] ,** identifier[kwargs] ):
literal[string]
identifier[n_jobs] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] )
identifier[concatenate] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
keyword[if] identifier[kwargs] :
keyword[raise] identifier[TypeError] ( literal[string]
. identifier[format] ( identifier[list] ( identifier[kwargs] . identifier[keys] ())[ literal[int] ]))
keyword[return] identifier[FeatureUnion] ( identifier[_name_estimators] ( identifier[transformers] ), identifier[n_jobs] = identifier[n_jobs] , identifier[concatenate] = identifier[concatenate] )
|
def make_union(*transformers, **kwargs):
"""Construct a FeatureUnion from the given transformers.
This is a shorthand for the FeatureUnion constructor; it does not require,
and does not permit, naming the transformers. Instead, they will be given
names automatically based on their types. It also does not allow weighting.
Parameters
----------
*transformers : list of estimators
n_jobs : int, optional
Number of jobs to run in parallel (default 1).
Returns
-------
f : FeatureUnion
Examples
--------
>>> from sklearn.decomposition import PCA, TruncatedSVD
>>> from sklearn.pipeline import make_union
>>> make_union(PCA(), TruncatedSVD()) # doctest: +NORMALIZE_WHITESPACE
FeatureUnion(n_jobs=1,
transformer_list=[('pca',
PCA(copy=True, iterated_power='auto',
n_components=None, random_state=None,
svd_solver='auto', tol=0.0, whiten=False)),
('truncatedsvd',
TruncatedSVD(algorithm='randomized',
n_components=2, n_iter=5,
random_state=None, tol=0.0))],
transformer_weights=None)
"""
n_jobs = kwargs.pop('n_jobs', 1)
concatenate = kwargs.pop('concatenate', True)
if kwargs:
# We do not currently support `transformer_weights` as we may want to
# change its type spec in make_union
raise TypeError('Unknown keyword arguments: "{}"'.format(list(kwargs.keys())[0])) # depends on [control=['if'], data=[]]
return FeatureUnion(_name_estimators(transformers), n_jobs=n_jobs, concatenate=concatenate)
|
def delete_admin(self, account_id, user_id, role):
"""
Remove an account admin role from a user.
https://canvas.instructure.com/doc/api/admins.html#method.admins.destroy
"""
url = ADMINS_API.format(account_id) + "/{}?role={}".format(
user_id, quote(role))
response = self._delete_resource(url)
return True
|
def function[delete_admin, parameter[self, account_id, user_id, role]]:
constant[
Remove an account admin role from a user.
https://canvas.instructure.com/doc/api/admins.html#method.admins.destroy
]
variable[url] assign[=] binary_operation[call[name[ADMINS_API].format, parameter[name[account_id]]] + call[constant[/{}?role={}].format, parameter[name[user_id], call[name[quote], parameter[name[role]]]]]]
variable[response] assign[=] call[name[self]._delete_resource, parameter[name[url]]]
return[constant[True]]
|
keyword[def] identifier[delete_admin] ( identifier[self] , identifier[account_id] , identifier[user_id] , identifier[role] ):
literal[string]
identifier[url] = identifier[ADMINS_API] . identifier[format] ( identifier[account_id] )+ literal[string] . identifier[format] (
identifier[user_id] , identifier[quote] ( identifier[role] ))
identifier[response] = identifier[self] . identifier[_delete_resource] ( identifier[url] )
keyword[return] keyword[True]
|
def delete_admin(self, account_id, user_id, role):
"""
Remove an account admin role from a user.
https://canvas.instructure.com/doc/api/admins.html#method.admins.destroy
"""
url = ADMINS_API.format(account_id) + '/{}?role={}'.format(user_id, quote(role))
response = self._delete_resource(url)
return True
|
def stat(self, path):
""" safely gets the Znode's Stat """
try:
stat = self.exists(str(path))
except (NoNodeError, NoAuthError):
stat = None
return stat
|
def function[stat, parameter[self, path]]:
constant[ safely gets the Znode's Stat ]
<ast.Try object at 0x7da18f00ecb0>
return[name[stat]]
|
keyword[def] identifier[stat] ( identifier[self] , identifier[path] ):
literal[string]
keyword[try] :
identifier[stat] = identifier[self] . identifier[exists] ( identifier[str] ( identifier[path] ))
keyword[except] ( identifier[NoNodeError] , identifier[NoAuthError] ):
identifier[stat] = keyword[None]
keyword[return] identifier[stat]
|
def stat(self, path):
""" safely gets the Znode's Stat """
try:
stat = self.exists(str(path)) # depends on [control=['try'], data=[]]
except (NoNodeError, NoAuthError):
stat = None # depends on [control=['except'], data=[]]
return stat
|
def sqs_create_queue(queue_name, options=None, client=None):
"""
This creates an SQS queue.
Parameters
----------
queue_name : str
The name of the queue to create.
options : dict or None
A dict of options indicate extra attributes the queue should have.
See the SQS docs for details. If None, no custom attributes will be
attached to the queue.
client : boto3.Client or None
If None, this function will instantiate a new `boto3.Client` object to
use in its operations. Alternatively, pass in an existing `boto3.Client`
instance to re-use it here.
Returns
-------
dict
This returns a dict of the form::
{'url': SQS URL of the queue,
'name': name of the queue}
"""
if not client:
client = boto3.client('sqs')
try:
if isinstance(options, dict):
resp = client.create_queue(QueueName=queue_name, Attributes=options)
else:
resp = client.create_queue(QueueName=queue_name)
if resp is not None:
return {'url':resp['QueueUrl'],
'name':queue_name}
else:
LOGERROR('could not create the specified queue: %s with options: %s'
% (queue_name, options))
return None
except Exception as e:
LOGEXCEPTION('could not create the specified queue: %s with options: %s'
% (queue_name, options))
return None
|
def function[sqs_create_queue, parameter[queue_name, options, client]]:
constant[
This creates an SQS queue.
Parameters
----------
queue_name : str
The name of the queue to create.
options : dict or None
A dict of options indicate extra attributes the queue should have.
See the SQS docs for details. If None, no custom attributes will be
attached to the queue.
client : boto3.Client or None
If None, this function will instantiate a new `boto3.Client` object to
use in its operations. Alternatively, pass in an existing `boto3.Client`
instance to re-use it here.
Returns
-------
dict
This returns a dict of the form::
{'url': SQS URL of the queue,
'name': name of the queue}
]
if <ast.UnaryOp object at 0x7da1b0036320> begin[:]
variable[client] assign[=] call[name[boto3].client, parameter[constant[sqs]]]
<ast.Try object at 0x7da1b0036da0>
|
keyword[def] identifier[sqs_create_queue] ( identifier[queue_name] , identifier[options] = keyword[None] , identifier[client] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[client] :
identifier[client] = identifier[boto3] . identifier[client] ( literal[string] )
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[options] , identifier[dict] ):
identifier[resp] = identifier[client] . identifier[create_queue] ( identifier[QueueName] = identifier[queue_name] , identifier[Attributes] = identifier[options] )
keyword[else] :
identifier[resp] = identifier[client] . identifier[create_queue] ( identifier[QueueName] = identifier[queue_name] )
keyword[if] identifier[resp] keyword[is] keyword[not] keyword[None] :
keyword[return] { literal[string] : identifier[resp] [ literal[string] ],
literal[string] : identifier[queue_name] }
keyword[else] :
identifier[LOGERROR] ( literal[string]
%( identifier[queue_name] , identifier[options] ))
keyword[return] keyword[None]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string]
%( identifier[queue_name] , identifier[options] ))
keyword[return] keyword[None]
|
def sqs_create_queue(queue_name, options=None, client=None):
"""
This creates an SQS queue.
Parameters
----------
queue_name : str
The name of the queue to create.
options : dict or None
A dict of options indicate extra attributes the queue should have.
See the SQS docs for details. If None, no custom attributes will be
attached to the queue.
client : boto3.Client or None
If None, this function will instantiate a new `boto3.Client` object to
use in its operations. Alternatively, pass in an existing `boto3.Client`
instance to re-use it here.
Returns
-------
dict
This returns a dict of the form::
{'url': SQS URL of the queue,
'name': name of the queue}
"""
if not client:
client = boto3.client('sqs') # depends on [control=['if'], data=[]]
try:
if isinstance(options, dict):
resp = client.create_queue(QueueName=queue_name, Attributes=options) # depends on [control=['if'], data=[]]
else:
resp = client.create_queue(QueueName=queue_name)
if resp is not None:
return {'url': resp['QueueUrl'], 'name': queue_name} # depends on [control=['if'], data=['resp']]
else:
LOGERROR('could not create the specified queue: %s with options: %s' % (queue_name, options))
return None # depends on [control=['try'], data=[]]
except Exception as e:
LOGEXCEPTION('could not create the specified queue: %s with options: %s' % (queue_name, options))
return None # depends on [control=['except'], data=[]]
|
def _update(self):
'''
Calculate heading and distance from dx and dy
'''
try:
theta_radians = math.atan(float(self.dy)/self.dx)
except ZeroDivisionError:
if self.dy > 0: theta_radians = 0.5*math.pi
elif self.dy < 0: theta_radians = 1.5*math.pi
self.magnitude = self.dy
else:
self.magnitude = 1./(math.cos(theta_radians))*self.dx
theta = math.degrees(theta_radians)
self.heading = self._angle_or_heading(theta)
|
def function[_update, parameter[self]]:
constant[
Calculate heading and distance from dx and dy
]
<ast.Try object at 0x7da18f09df90>
variable[theta] assign[=] call[name[math].degrees, parameter[name[theta_radians]]]
name[self].heading assign[=] call[name[self]._angle_or_heading, parameter[name[theta]]]
|
keyword[def] identifier[_update] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[theta_radians] = identifier[math] . identifier[atan] ( identifier[float] ( identifier[self] . identifier[dy] )/ identifier[self] . identifier[dx] )
keyword[except] identifier[ZeroDivisionError] :
keyword[if] identifier[self] . identifier[dy] > literal[int] : identifier[theta_radians] = literal[int] * identifier[math] . identifier[pi]
keyword[elif] identifier[self] . identifier[dy] < literal[int] : identifier[theta_radians] = literal[int] * identifier[math] . identifier[pi]
identifier[self] . identifier[magnitude] = identifier[self] . identifier[dy]
keyword[else] :
identifier[self] . identifier[magnitude] = literal[int] /( identifier[math] . identifier[cos] ( identifier[theta_radians] ))* identifier[self] . identifier[dx]
identifier[theta] = identifier[math] . identifier[degrees] ( identifier[theta_radians] )
identifier[self] . identifier[heading] = identifier[self] . identifier[_angle_or_heading] ( identifier[theta] )
|
def _update(self):
"""
Calculate heading and distance from dx and dy
"""
try:
theta_radians = math.atan(float(self.dy) / self.dx) # depends on [control=['try'], data=[]]
except ZeroDivisionError:
if self.dy > 0:
theta_radians = 0.5 * math.pi # depends on [control=['if'], data=[]]
elif self.dy < 0:
theta_radians = 1.5 * math.pi # depends on [control=['if'], data=[]]
self.magnitude = self.dy # depends on [control=['except'], data=[]]
else:
self.magnitude = 1.0 / math.cos(theta_radians) * self.dx
theta = math.degrees(theta_radians)
self.heading = self._angle_or_heading(theta)
|
def overlapped_convolution(bin_template, bin_image,
tollerance=0.5, splits=(4, 2)):
"""
As each of these images are hold only binary values, and RFFT2 works on
float64 greyscale values, we can make the convolution more efficient by
breaking the image up into :splits: sectons. Each one of these sections
then has its greyscale value adjusted and then stacked.
We then apply the convolution to this 'stack' of images, and adjust the
resultant position matches.
"""
th, tw = bin_template.shape
ih, iw = bin_image.shape
hs, ws = splits
h = ih // hs
w = iw // ws
count = numpy.count_nonzero(bin_template)
assert count > 0
assert h >= th
assert w >= tw
yoffset = [(i * h, ((i + 1) * h) + (th - 1)) for i in range(hs)]
xoffset = [(i * w, ((i + 1) * w) + (tw - 1)) for i in range(ws)]
# image_stacks is Origin (x,y), array, z (height in stack)
image_stacks = [((x1, y1), bin_image[y1:y2, x1:x2], float((count + 1) ** (num)))
for num, (x1, x2, y1, y2) in
enumerate((x1, x2, y1, y2) for (x1, x2)
in xoffset for (y1, y2) in yoffset)]
pad_h = max(i.shape[0] for _, i, _ in image_stacks)
pad_w = max(i.shape[1] for _, i, _ in image_stacks)
# rfft metrics must be an even size - why ... maths?
pad_w += pad_w % 2
pad_h += pad_h % 2
overlapped_image = sum_2d_images(pad_bin_image_to_shape(i, (pad_h, pad_w))
* num for _, i, num in image_stacks)
#print "Overlap splits %r, Image Size (%d,%d),
#Overlapped Size (%d,%d)" % (splits,iw,ih,pad_w,pad_h)
# Calculate the convolution of the FFT's of the overlapped image & template
convolution_freqs = (rfft2(overlapped_image) *
rfft2(bin_template[::-1, ::-1],
overlapped_image.shape))
# Reverse the FFT to find the result overlapped image
convolution_image = irfft2(convolution_freqs)
# At this point, the maximum point in convolution_image should be the
# bottom right (why?) of the area of greatest match
results = set()
for (x, y), _, num in image_stacks[::-1]:
test = convolution_image / num
filtered = ((test >= (count - tollerance)) &
(test <= (count + tollerance)))
match_points = numpy.transpose(numpy.nonzero(filtered)) # bottom right
for (fy, fx) in match_points:
if fx < (tw - 1) or fy < (th - 1):
continue
results.add((x + fx - (tw - 1), y + fy - (th - 1)))
convolution_image %= num
return list(results)
|
def function[overlapped_convolution, parameter[bin_template, bin_image, tollerance, splits]]:
constant[
As each of these images are hold only binary values, and RFFT2 works on
float64 greyscale values, we can make the convolution more efficient by
breaking the image up into :splits: sectons. Each one of these sections
then has its greyscale value adjusted and then stacked.
We then apply the convolution to this 'stack' of images, and adjust the
resultant position matches.
]
<ast.Tuple object at 0x7da1b271c490> assign[=] name[bin_template].shape
<ast.Tuple object at 0x7da1b271d090> assign[=] name[bin_image].shape
<ast.Tuple object at 0x7da1b271da20> assign[=] name[splits]
variable[h] assign[=] binary_operation[name[ih] <ast.FloorDiv object at 0x7da2590d6bc0> name[hs]]
variable[w] assign[=] binary_operation[name[iw] <ast.FloorDiv object at 0x7da2590d6bc0> name[ws]]
variable[count] assign[=] call[name[numpy].count_nonzero, parameter[name[bin_template]]]
assert[compare[name[count] greater[>] constant[0]]]
assert[compare[name[h] greater_or_equal[>=] name[th]]]
assert[compare[name[w] greater_or_equal[>=] name[tw]]]
variable[yoffset] assign[=] <ast.ListComp object at 0x7da1b2715f90>
variable[xoffset] assign[=] <ast.ListComp object at 0x7da1b27172e0>
variable[image_stacks] assign[=] <ast.ListComp object at 0x7da1b2715cc0>
variable[pad_h] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b27a6bf0>]]
variable[pad_w] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b27a6ec0>]]
<ast.AugAssign object at 0x7da1b27a6380>
<ast.AugAssign object at 0x7da1b27a6560>
variable[overlapped_image] assign[=] call[name[sum_2d_images], parameter[<ast.GeneratorExp object at 0x7da1b27a6680>]]
variable[convolution_freqs] assign[=] binary_operation[call[name[rfft2], parameter[name[overlapped_image]]] * call[name[rfft2], parameter[call[name[bin_template]][tuple[[<ast.Slice object at 0x7da1b27a5c90>, <ast.Slice object at 0x7da1b27a5d50>]]], name[overlapped_image].shape]]]
variable[convolution_image] assign[=] call[name[irfft2], parameter[name[convolution_freqs]]]
variable[results] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Tuple object at 0x7da1b27a4e50>, <ast.Name object at 0x7da1b27a4a60>, <ast.Name object at 0x7da1b27a4a00>]]] in starred[call[name[image_stacks]][<ast.Slice object at 0x7da1b27a4df0>]] begin[:]
variable[test] assign[=] binary_operation[name[convolution_image] / name[num]]
variable[filtered] assign[=] binary_operation[compare[name[test] greater_or_equal[>=] binary_operation[name[count] - name[tollerance]]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[test] less_or_equal[<=] binary_operation[name[count] + name[tollerance]]]]
variable[match_points] assign[=] call[name[numpy].transpose, parameter[call[name[numpy].nonzero, parameter[name[filtered]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b27a4550>, <ast.Name object at 0x7da1b27a4580>]]] in starred[name[match_points]] begin[:]
if <ast.BoolOp object at 0x7da1b27a4490> begin[:]
continue
call[name[results].add, parameter[tuple[[<ast.BinOp object at 0x7da1b27a58a0>, <ast.BinOp object at 0x7da1b27a5750>]]]]
<ast.AugAssign object at 0x7da1b27a4910>
return[call[name[list], parameter[name[results]]]]
|
keyword[def] identifier[overlapped_convolution] ( identifier[bin_template] , identifier[bin_image] ,
identifier[tollerance] = literal[int] , identifier[splits] =( literal[int] , literal[int] )):
literal[string]
identifier[th] , identifier[tw] = identifier[bin_template] . identifier[shape]
identifier[ih] , identifier[iw] = identifier[bin_image] . identifier[shape]
identifier[hs] , identifier[ws] = identifier[splits]
identifier[h] = identifier[ih] // identifier[hs]
identifier[w] = identifier[iw] // identifier[ws]
identifier[count] = identifier[numpy] . identifier[count_nonzero] ( identifier[bin_template] )
keyword[assert] identifier[count] > literal[int]
keyword[assert] identifier[h] >= identifier[th]
keyword[assert] identifier[w] >= identifier[tw]
identifier[yoffset] =[( identifier[i] * identifier[h] ,(( identifier[i] + literal[int] )* identifier[h] )+( identifier[th] - literal[int] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[hs] )]
identifier[xoffset] =[( identifier[i] * identifier[w] ,(( identifier[i] + literal[int] )* identifier[w] )+( identifier[tw] - literal[int] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ws] )]
identifier[image_stacks] =[(( identifier[x1] , identifier[y1] ), identifier[bin_image] [ identifier[y1] : identifier[y2] , identifier[x1] : identifier[x2] ], identifier[float] (( identifier[count] + literal[int] )**( identifier[num] )))
keyword[for] identifier[num] ,( identifier[x1] , identifier[x2] , identifier[y1] , identifier[y2] ) keyword[in]
identifier[enumerate] (( identifier[x1] , identifier[x2] , identifier[y1] , identifier[y2] ) keyword[for] ( identifier[x1] , identifier[x2] )
keyword[in] identifier[xoffset] keyword[for] ( identifier[y1] , identifier[y2] ) keyword[in] identifier[yoffset] )]
identifier[pad_h] = identifier[max] ( identifier[i] . identifier[shape] [ literal[int] ] keyword[for] identifier[_] , identifier[i] , identifier[_] keyword[in] identifier[image_stacks] )
identifier[pad_w] = identifier[max] ( identifier[i] . identifier[shape] [ literal[int] ] keyword[for] identifier[_] , identifier[i] , identifier[_] keyword[in] identifier[image_stacks] )
identifier[pad_w] += identifier[pad_w] % literal[int]
identifier[pad_h] += identifier[pad_h] % literal[int]
identifier[overlapped_image] = identifier[sum_2d_images] ( identifier[pad_bin_image_to_shape] ( identifier[i] ,( identifier[pad_h] , identifier[pad_w] ))
* identifier[num] keyword[for] identifier[_] , identifier[i] , identifier[num] keyword[in] identifier[image_stacks] )
identifier[convolution_freqs] =( identifier[rfft2] ( identifier[overlapped_image] )*
identifier[rfft2] ( identifier[bin_template] [::- literal[int] ,::- literal[int] ],
identifier[overlapped_image] . identifier[shape] ))
identifier[convolution_image] = identifier[irfft2] ( identifier[convolution_freqs] )
identifier[results] = identifier[set] ()
keyword[for] ( identifier[x] , identifier[y] ), identifier[_] , identifier[num] keyword[in] identifier[image_stacks] [::- literal[int] ]:
identifier[test] = identifier[convolution_image] / identifier[num]
identifier[filtered] =(( identifier[test] >=( identifier[count] - identifier[tollerance] ))&
( identifier[test] <=( identifier[count] + identifier[tollerance] )))
identifier[match_points] = identifier[numpy] . identifier[transpose] ( identifier[numpy] . identifier[nonzero] ( identifier[filtered] ))
keyword[for] ( identifier[fy] , identifier[fx] ) keyword[in] identifier[match_points] :
keyword[if] identifier[fx] <( identifier[tw] - literal[int] ) keyword[or] identifier[fy] <( identifier[th] - literal[int] ):
keyword[continue]
identifier[results] . identifier[add] (( identifier[x] + identifier[fx] -( identifier[tw] - literal[int] ), identifier[y] + identifier[fy] -( identifier[th] - literal[int] )))
identifier[convolution_image] %= identifier[num]
keyword[return] identifier[list] ( identifier[results] )
|
def overlapped_convolution(bin_template, bin_image, tollerance=0.5, splits=(4, 2)):
"""
As each of these images are hold only binary values, and RFFT2 works on
float64 greyscale values, we can make the convolution more efficient by
breaking the image up into :splits: sectons. Each one of these sections
then has its greyscale value adjusted and then stacked.
We then apply the convolution to this 'stack' of images, and adjust the
resultant position matches.
"""
(th, tw) = bin_template.shape
(ih, iw) = bin_image.shape
(hs, ws) = splits
h = ih // hs
w = iw // ws
count = numpy.count_nonzero(bin_template)
assert count > 0
assert h >= th
assert w >= tw
yoffset = [(i * h, (i + 1) * h + (th - 1)) for i in range(hs)]
xoffset = [(i * w, (i + 1) * w + (tw - 1)) for i in range(ws)] # image_stacks is Origin (x,y), array, z (height in stack)
image_stacks = [((x1, y1), bin_image[y1:y2, x1:x2], float((count + 1) ** num)) for (num, (x1, x2, y1, y2)) in enumerate(((x1, x2, y1, y2) for (x1, x2) in xoffset for (y1, y2) in yoffset))]
pad_h = max((i.shape[0] for (_, i, _) in image_stacks))
pad_w = max((i.shape[1] for (_, i, _) in image_stacks)) # rfft metrics must be an even size - why ... maths?
pad_w += pad_w % 2
pad_h += pad_h % 2
overlapped_image = sum_2d_images((pad_bin_image_to_shape(i, (pad_h, pad_w)) * num for (_, i, num) in image_stacks)) #print "Overlap splits %r, Image Size (%d,%d),
#Overlapped Size (%d,%d)" % (splits,iw,ih,pad_w,pad_h)
# Calculate the convolution of the FFT's of the overlapped image & template
convolution_freqs = rfft2(overlapped_image) * rfft2(bin_template[::-1, ::-1], overlapped_image.shape) # Reverse the FFT to find the result overlapped image
convolution_image = irfft2(convolution_freqs) # At this point, the maximum point in convolution_image should be the
# bottom right (why?) of the area of greatest match
results = set()
for ((x, y), _, num) in image_stacks[::-1]:
test = convolution_image / num
filtered = (test >= count - tollerance) & (test <= count + tollerance)
match_points = numpy.transpose(numpy.nonzero(filtered)) # bottom right
for (fy, fx) in match_points:
if fx < tw - 1 or fy < th - 1:
continue # depends on [control=['if'], data=[]]
results.add((x + fx - (tw - 1), y + fy - (th - 1))) # depends on [control=['for'], data=[]]
convolution_image %= num # depends on [control=['for'], data=[]]
return list(results)
|
def debug(message, *args, **kwargs):
"""
debug output goes to stderr so you can still redirect the stdout to a file
or another program. Controlled by the JUT_DEBUG environment variable being
present
"""
if 'end' in kwargs:
end = kwargs['end']
else:
end = '\n'
if DEBUG:
if len(args) == 0:
sys.stderr.write(message)
else:
sys.stderr.write(message % args)
sys.stderr.write(end)
sys.stderr.flush()
|
def function[debug, parameter[message]]:
constant[
debug output goes to stderr so you can still redirect the stdout to a file
or another program. Controlled by the JUT_DEBUG environment variable being
present
]
if compare[constant[end] in name[kwargs]] begin[:]
variable[end] assign[=] call[name[kwargs]][constant[end]]
if name[DEBUG] begin[:]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:]
call[name[sys].stderr.write, parameter[name[message]]]
call[name[sys].stderr.write, parameter[name[end]]]
call[name[sys].stderr.flush, parameter[]]
|
keyword[def] identifier[debug] ( identifier[message] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[end] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[end] = literal[string]
keyword[if] identifier[DEBUG] :
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] )
keyword[else] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] % identifier[args] )
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[end] )
identifier[sys] . identifier[stderr] . identifier[flush] ()
|
def debug(message, *args, **kwargs):
"""
debug output goes to stderr so you can still redirect the stdout to a file
or another program. Controlled by the JUT_DEBUG environment variable being
present
"""
if 'end' in kwargs:
end = kwargs['end'] # depends on [control=['if'], data=['kwargs']]
else:
end = '\n'
if DEBUG:
if len(args) == 0:
sys.stderr.write(message) # depends on [control=['if'], data=[]]
else:
sys.stderr.write(message % args)
sys.stderr.write(end)
sys.stderr.flush() # depends on [control=['if'], data=[]]
|
def process_new_post(self, bulk_mode, api_post, posts, author, post_categories, post_tags, post_media_attachments):
"""
Instantiate a new Post object using data from the WP API.
Related fields -- author, categories, tags, and attachments should be processed in advance
:param bulk_mode: If True, minimize db operations by bulk creating post objects
:param api_post: the API data for the Post
:param posts: the potentially growing list of Posts that we are processing in this run
:param author: the Author object for this Post
:param post_categories: the list of Category objects that should be linked to this Post
:param post_tags: the list of Tags objects that should be linked to this Post
:param post_media_attachments: the list of Media objects that should be attached to this Post
:return: None
"""
post = Post(site_id=self.site_id,
wp_id=api_post["ID"],
author=author,
post_date=api_post["date"],
modified=api_post["modified"],
title=api_post["title"],
url=api_post["URL"],
short_url=api_post["short_URL"],
content=api_post["content"],
excerpt=api_post["excerpt"],
slug=api_post["slug"],
guid=api_post["guid"],
status=api_post["status"],
sticky=api_post["sticky"],
password=api_post["password"],
parent=api_post["parent"],
post_type=api_post["type"],
likes_enabled=api_post["likes_enabled"],
sharing_enabled=api_post["sharing_enabled"],
like_count=api_post["like_count"],
global_ID=api_post["global_ID"],
featured_image=api_post["featured_image"],
format=api_post["format"],
menu_order=api_post["menu_order"],
metadata=api_post["metadata"],
post_thumbnail=api_post["post_thumbnail"])
posts.append(post)
# if we're not in bulk mode, go ahead and create the post in the db now
# otherwise this happens after all API posts are processed
if not bulk_mode:
self.bulk_create_posts(posts, post_categories, post_tags, post_media_attachments)
|
def function[process_new_post, parameter[self, bulk_mode, api_post, posts, author, post_categories, post_tags, post_media_attachments]]:
constant[
Instantiate a new Post object using data from the WP API.
Related fields -- author, categories, tags, and attachments should be processed in advance
:param bulk_mode: If True, minimize db operations by bulk creating post objects
:param api_post: the API data for the Post
:param posts: the potentially growing list of Posts that we are processing in this run
:param author: the Author object for this Post
:param post_categories: the list of Category objects that should be linked to this Post
:param post_tags: the list of Tags objects that should be linked to this Post
:param post_media_attachments: the list of Media objects that should be attached to this Post
:return: None
]
variable[post] assign[=] call[name[Post], parameter[]]
call[name[posts].append, parameter[name[post]]]
if <ast.UnaryOp object at 0x7da1b2255000> begin[:]
call[name[self].bulk_create_posts, parameter[name[posts], name[post_categories], name[post_tags], name[post_media_attachments]]]
|
keyword[def] identifier[process_new_post] ( identifier[self] , identifier[bulk_mode] , identifier[api_post] , identifier[posts] , identifier[author] , identifier[post_categories] , identifier[post_tags] , identifier[post_media_attachments] ):
literal[string]
identifier[post] = identifier[Post] ( identifier[site_id] = identifier[self] . identifier[site_id] ,
identifier[wp_id] = identifier[api_post] [ literal[string] ],
identifier[author] = identifier[author] ,
identifier[post_date] = identifier[api_post] [ literal[string] ],
identifier[modified] = identifier[api_post] [ literal[string] ],
identifier[title] = identifier[api_post] [ literal[string] ],
identifier[url] = identifier[api_post] [ literal[string] ],
identifier[short_url] = identifier[api_post] [ literal[string] ],
identifier[content] = identifier[api_post] [ literal[string] ],
identifier[excerpt] = identifier[api_post] [ literal[string] ],
identifier[slug] = identifier[api_post] [ literal[string] ],
identifier[guid] = identifier[api_post] [ literal[string] ],
identifier[status] = identifier[api_post] [ literal[string] ],
identifier[sticky] = identifier[api_post] [ literal[string] ],
identifier[password] = identifier[api_post] [ literal[string] ],
identifier[parent] = identifier[api_post] [ literal[string] ],
identifier[post_type] = identifier[api_post] [ literal[string] ],
identifier[likes_enabled] = identifier[api_post] [ literal[string] ],
identifier[sharing_enabled] = identifier[api_post] [ literal[string] ],
identifier[like_count] = identifier[api_post] [ literal[string] ],
identifier[global_ID] = identifier[api_post] [ literal[string] ],
identifier[featured_image] = identifier[api_post] [ literal[string] ],
identifier[format] = identifier[api_post] [ literal[string] ],
identifier[menu_order] = identifier[api_post] [ literal[string] ],
identifier[metadata] = identifier[api_post] [ literal[string] ],
identifier[post_thumbnail] = identifier[api_post] [ literal[string] ])
identifier[posts] . identifier[append] ( identifier[post] )
keyword[if] keyword[not] identifier[bulk_mode] :
identifier[self] . identifier[bulk_create_posts] ( identifier[posts] , identifier[post_categories] , identifier[post_tags] , identifier[post_media_attachments] )
|
def process_new_post(self, bulk_mode, api_post, posts, author, post_categories, post_tags, post_media_attachments):
"""
Instantiate a new Post object using data from the WP API.
Related fields -- author, categories, tags, and attachments should be processed in advance
:param bulk_mode: If True, minimize db operations by bulk creating post objects
:param api_post: the API data for the Post
:param posts: the potentially growing list of Posts that we are processing in this run
:param author: the Author object for this Post
:param post_categories: the list of Category objects that should be linked to this Post
:param post_tags: the list of Tags objects that should be linked to this Post
:param post_media_attachments: the list of Media objects that should be attached to this Post
:return: None
"""
post = Post(site_id=self.site_id, wp_id=api_post['ID'], author=author, post_date=api_post['date'], modified=api_post['modified'], title=api_post['title'], url=api_post['URL'], short_url=api_post['short_URL'], content=api_post['content'], excerpt=api_post['excerpt'], slug=api_post['slug'], guid=api_post['guid'], status=api_post['status'], sticky=api_post['sticky'], password=api_post['password'], parent=api_post['parent'], post_type=api_post['type'], likes_enabled=api_post['likes_enabled'], sharing_enabled=api_post['sharing_enabled'], like_count=api_post['like_count'], global_ID=api_post['global_ID'], featured_image=api_post['featured_image'], format=api_post['format'], menu_order=api_post['menu_order'], metadata=api_post['metadata'], post_thumbnail=api_post['post_thumbnail'])
posts.append(post)
# if we're not in bulk mode, go ahead and create the post in the db now
# otherwise this happens after all API posts are processed
if not bulk_mode:
self.bulk_create_posts(posts, post_categories, post_tags, post_media_attachments) # depends on [control=['if'], data=[]]
|
def iter_stack_frames(frames=None):
"""
Given an optional list of frames (defaults to current stack),
iterates over all frames that do not contain the ``__traceback_hide__``
local variable.
"""
if not frames:
frames = inspect.stack()[1:]
for frame, lineno in ((f[0], f[2]) for f in reversed(frames)):
f_locals = getattr(frame, 'f_locals', {})
if not _getitem_from_frame(f_locals, '__traceback_hide__'):
yield frame, lineno
|
def function[iter_stack_frames, parameter[frames]]:
constant[
Given an optional list of frames (defaults to current stack),
iterates over all frames that do not contain the ``__traceback_hide__``
local variable.
]
if <ast.UnaryOp object at 0x7da1b1726e90> begin[:]
variable[frames] assign[=] call[call[name[inspect].stack, parameter[]]][<ast.Slice object at 0x7da1b1725ea0>]
for taget[tuple[[<ast.Name object at 0x7da1b1724040>, <ast.Name object at 0x7da1b1724850>]]] in starred[<ast.GeneratorExp object at 0x7da1b1724790>] begin[:]
variable[f_locals] assign[=] call[name[getattr], parameter[name[frame], constant[f_locals], dictionary[[], []]]]
if <ast.UnaryOp object at 0x7da1b17266e0> begin[:]
<ast.Yield object at 0x7da1b1724880>
|
keyword[def] identifier[iter_stack_frames] ( identifier[frames] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[frames] :
identifier[frames] = identifier[inspect] . identifier[stack] ()[ literal[int] :]
keyword[for] identifier[frame] , identifier[lineno] keyword[in] (( identifier[f] [ literal[int] ], identifier[f] [ literal[int] ]) keyword[for] identifier[f] keyword[in] identifier[reversed] ( identifier[frames] )):
identifier[f_locals] = identifier[getattr] ( identifier[frame] , literal[string] ,{})
keyword[if] keyword[not] identifier[_getitem_from_frame] ( identifier[f_locals] , literal[string] ):
keyword[yield] identifier[frame] , identifier[lineno]
|
def iter_stack_frames(frames=None):
"""
Given an optional list of frames (defaults to current stack),
iterates over all frames that do not contain the ``__traceback_hide__``
local variable.
"""
if not frames:
frames = inspect.stack()[1:] # depends on [control=['if'], data=[]]
for (frame, lineno) in ((f[0], f[2]) for f in reversed(frames)):
f_locals = getattr(frame, 'f_locals', {})
if not _getitem_from_frame(f_locals, '__traceback_hide__'):
yield (frame, lineno) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in six.itervalues(self.storages):
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
|
def function[list, parameter[self, ignore_patterns]]:
constant[
List all files in all app storages.
]
for taget[name[storage]] in starred[call[name[six].itervalues, parameter[name[self].storages]]] begin[:]
if call[name[storage].exists, parameter[constant[]]] begin[:]
for taget[name[path]] in starred[call[name[utils].get_files, parameter[name[storage], name[ignore_patterns]]]] begin[:]
<ast.Yield object at 0x7da1afe1af80>
|
keyword[def] identifier[list] ( identifier[self] , identifier[ignore_patterns] ):
literal[string]
keyword[for] identifier[storage] keyword[in] identifier[six] . identifier[itervalues] ( identifier[self] . identifier[storages] ):
keyword[if] identifier[storage] . identifier[exists] ( literal[string] ):
keyword[for] identifier[path] keyword[in] identifier[utils] . identifier[get_files] ( identifier[storage] , identifier[ignore_patterns] ):
keyword[yield] identifier[path] , identifier[storage]
|
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in six.itervalues(self.storages):
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield (path, storage) # depends on [control=['for'], data=['path']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['storage']]
|
def register_data(self, typename, value):
"""Registers a data product, raising if a product was already registered.
:API: public
:param typename: The type of product to register a value for.
:param value: The data product to register under `typename`.
:returns: The registered `value`.
:raises: :class:`ProductError` if a value for the given product `typename` is already
registered.
"""
if typename in self.data_products:
raise ProductError('Already have a product registered for {}, cannot over-write with {}'
.format(typename, value))
return self.safe_create_data(typename, lambda: value)
|
def function[register_data, parameter[self, typename, value]]:
constant[Registers a data product, raising if a product was already registered.
:API: public
:param typename: The type of product to register a value for.
:param value: The data product to register under `typename`.
:returns: The registered `value`.
:raises: :class:`ProductError` if a value for the given product `typename` is already
registered.
]
if compare[name[typename] in name[self].data_products] begin[:]
<ast.Raise object at 0x7da1b1e6b580>
return[call[name[self].safe_create_data, parameter[name[typename], <ast.Lambda object at 0x7da1b1e6a800>]]]
|
keyword[def] identifier[register_data] ( identifier[self] , identifier[typename] , identifier[value] ):
literal[string]
keyword[if] identifier[typename] keyword[in] identifier[self] . identifier[data_products] :
keyword[raise] identifier[ProductError] ( literal[string]
. identifier[format] ( identifier[typename] , identifier[value] ))
keyword[return] identifier[self] . identifier[safe_create_data] ( identifier[typename] , keyword[lambda] : identifier[value] )
|
def register_data(self, typename, value):
"""Registers a data product, raising if a product was already registered.
:API: public
:param typename: The type of product to register a value for.
:param value: The data product to register under `typename`.
:returns: The registered `value`.
:raises: :class:`ProductError` if a value for the given product `typename` is already
registered.
"""
if typename in self.data_products:
raise ProductError('Already have a product registered for {}, cannot over-write with {}'.format(typename, value)) # depends on [control=['if'], data=['typename']]
return self.safe_create_data(typename, lambda : value)
|
def login(request):
"""View to check the persona assertion and remember the user"""
email = verify_login(request)
request.response.headers.extend(remember(request, email))
return {'redirect': request.POST.get('came_from', '/'), 'success': True}
|
def function[login, parameter[request]]:
constant[View to check the persona assertion and remember the user]
variable[email] assign[=] call[name[verify_login], parameter[name[request]]]
call[name[request].response.headers.extend, parameter[call[name[remember], parameter[name[request], name[email]]]]]
return[dictionary[[<ast.Constant object at 0x7da18bcc8850>, <ast.Constant object at 0x7da18bcc9270>], [<ast.Call object at 0x7da18bcc87c0>, <ast.Constant object at 0x7da1b0926710>]]]
|
keyword[def] identifier[login] ( identifier[request] ):
literal[string]
identifier[email] = identifier[verify_login] ( identifier[request] )
identifier[request] . identifier[response] . identifier[headers] . identifier[extend] ( identifier[remember] ( identifier[request] , identifier[email] ))
keyword[return] { literal[string] : identifier[request] . identifier[POST] . identifier[get] ( literal[string] , literal[string] ), literal[string] : keyword[True] }
|
def login(request):
"""View to check the persona assertion and remember the user"""
email = verify_login(request)
request.response.headers.extend(remember(request, email))
return {'redirect': request.POST.get('came_from', '/'), 'success': True}
|
def write_graph(self, filename):
"""
Write raw graph data which can be post-processed using graphviz.
"""
f = open(filename, 'w')
f.write(self._get_graphviz_data())
f.close()
|
def function[write_graph, parameter[self, filename]]:
constant[
Write raw graph data which can be post-processed using graphviz.
]
variable[f] assign[=] call[name[open], parameter[name[filename], constant[w]]]
call[name[f].write, parameter[call[name[self]._get_graphviz_data, parameter[]]]]
call[name[f].close, parameter[]]
|
keyword[def] identifier[write_graph] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[f] = identifier[open] ( identifier[filename] , literal[string] )
identifier[f] . identifier[write] ( identifier[self] . identifier[_get_graphviz_data] ())
identifier[f] . identifier[close] ()
|
def write_graph(self, filename):
"""
Write raw graph data which can be post-processed using graphviz.
"""
f = open(filename, 'w')
f.write(self._get_graphviz_data())
f.close()
|
def from_dict(input_dict):
"""
Instantiate an object of a derived class using the information
in input_dict (built by the to_dict method of the derived class).
More specifically, after reading the derived class from input_dict,
it calls the method _build_from_input_dict of the derived class.
Note: This method should not be overrided in the derived class. In case
it is needed, please override _build_from_input_dict instate.
:param dict input_dict: Dictionary with all the information needed to
instantiate the object.
"""
import copy
input_dict = copy.deepcopy(input_dict)
normalizer_class = input_dict.pop('class')
import GPy
normalizer_class = eval(normalizer_class)
return normalizer_class._build_from_input_dict(normalizer_class, input_dict)
|
def function[from_dict, parameter[input_dict]]:
constant[
Instantiate an object of a derived class using the information
in input_dict (built by the to_dict method of the derived class).
More specifically, after reading the derived class from input_dict,
it calls the method _build_from_input_dict of the derived class.
Note: This method should not be overrided in the derived class. In case
it is needed, please override _build_from_input_dict instate.
:param dict input_dict: Dictionary with all the information needed to
instantiate the object.
]
import module[copy]
variable[input_dict] assign[=] call[name[copy].deepcopy, parameter[name[input_dict]]]
variable[normalizer_class] assign[=] call[name[input_dict].pop, parameter[constant[class]]]
import module[GPy]
variable[normalizer_class] assign[=] call[name[eval], parameter[name[normalizer_class]]]
return[call[name[normalizer_class]._build_from_input_dict, parameter[name[normalizer_class], name[input_dict]]]]
|
keyword[def] identifier[from_dict] ( identifier[input_dict] ):
literal[string]
keyword[import] identifier[copy]
identifier[input_dict] = identifier[copy] . identifier[deepcopy] ( identifier[input_dict] )
identifier[normalizer_class] = identifier[input_dict] . identifier[pop] ( literal[string] )
keyword[import] identifier[GPy]
identifier[normalizer_class] = identifier[eval] ( identifier[normalizer_class] )
keyword[return] identifier[normalizer_class] . identifier[_build_from_input_dict] ( identifier[normalizer_class] , identifier[input_dict] )
|
def from_dict(input_dict):
"""
Instantiate an object of a derived class using the information
in input_dict (built by the to_dict method of the derived class).
More specifically, after reading the derived class from input_dict,
it calls the method _build_from_input_dict of the derived class.
Note: This method should not be overrided in the derived class. In case
it is needed, please override _build_from_input_dict instate.
:param dict input_dict: Dictionary with all the information needed to
instantiate the object.
"""
import copy
input_dict = copy.deepcopy(input_dict)
normalizer_class = input_dict.pop('class')
import GPy
normalizer_class = eval(normalizer_class)
return normalizer_class._build_from_input_dict(normalizer_class, input_dict)
|
def _get_redis_cache_opts():
'''
Return the Redis server connection details from the __opts__.
'''
return {
'host': __opts__.get('cache.redis.host', 'localhost'),
'port': __opts__.get('cache.redis.port', 6379),
'unix_socket_path': __opts__.get('cache.redis.unix_socket_path', None),
'db': __opts__.get('cache.redis.db', '0'),
'password': __opts__.get('cache.redis.password', ''),
'cluster_mode': __opts__.get('cache.redis.cluster_mode', False),
'startup_nodes': __opts__.get('cache.redis.cluster.startup_nodes', {}),
'skip_full_coverage_check': __opts__.get('cache.redis.cluster.skip_full_coverage_check', False),
}
|
def function[_get_redis_cache_opts, parameter[]]:
constant[
Return the Redis server connection details from the __opts__.
]
return[dictionary[[<ast.Constant object at 0x7da1b2036da0>, <ast.Constant object at 0x7da1b2034d30>, <ast.Constant object at 0x7da1b2034880>, <ast.Constant object at 0x7da1b20373d0>, <ast.Constant object at 0x7da1b2035090>, <ast.Constant object at 0x7da1b2036ef0>, <ast.Constant object at 0x7da1b2037940>, <ast.Constant object at 0x7da1b20353c0>], [<ast.Call object at 0x7da1b2037c40>, <ast.Call object at 0x7da1b2035c30>, <ast.Call object at 0x7da1b2034790>, <ast.Call object at 0x7da1b2037c10>, <ast.Call object at 0x7da1b20370d0>, <ast.Call object at 0x7da1b2034820>, <ast.Call object at 0x7da1b208fa30>, <ast.Call object at 0x7da1b208f160>]]]
|
keyword[def] identifier[_get_redis_cache_opts] ():
literal[string]
keyword[return] {
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , literal[int] ),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , keyword[None] ),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , keyword[False] ),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] ,{}),
literal[string] : identifier[__opts__] . identifier[get] ( literal[string] , keyword[False] ),
}
|
def _get_redis_cache_opts():
"""
Return the Redis server connection details from the __opts__.
"""
return {'host': __opts__.get('cache.redis.host', 'localhost'), 'port': __opts__.get('cache.redis.port', 6379), 'unix_socket_path': __opts__.get('cache.redis.unix_socket_path', None), 'db': __opts__.get('cache.redis.db', '0'), 'password': __opts__.get('cache.redis.password', ''), 'cluster_mode': __opts__.get('cache.redis.cluster_mode', False), 'startup_nodes': __opts__.get('cache.redis.cluster.startup_nodes', {}), 'skip_full_coverage_check': __opts__.get('cache.redis.cluster.skip_full_coverage_check', False)}
|
def write_json(filename, data):
""" Write data to JSON file
:param filename: name of JSON file to write data to
:type filename: str
:param data: data to write to JSON file
:type data: list, tuple
"""
with open(filename, 'w') as file:
json.dump(data, file, indent=4, sort_keys=True)
|
def function[write_json, parameter[filename, data]]:
constant[ Write data to JSON file
:param filename: name of JSON file to write data to
:type filename: str
:param data: data to write to JSON file
:type data: list, tuple
]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[json].dump, parameter[name[data], name[file]]]
|
keyword[def] identifier[write_json] ( identifier[filename] , identifier[data] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[file] :
identifier[json] . identifier[dump] ( identifier[data] , identifier[file] , identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] )
|
def write_json(filename, data):
""" Write data to JSON file
:param filename: name of JSON file to write data to
:type filename: str
:param data: data to write to JSON file
:type data: list, tuple
"""
with open(filename, 'w') as file:
json.dump(data, file, indent=4, sort_keys=True) # depends on [control=['with'], data=['file']]
|
def _seed(self, seed=-1):
"""
Initialize the random seed
"""
if seed != -1:
self._random = NupicRandom(seed)
else:
self._random = NupicRandom()
|
def function[_seed, parameter[self, seed]]:
constant[
Initialize the random seed
]
if compare[name[seed] not_equal[!=] <ast.UnaryOp object at 0x7da18f720c10>] begin[:]
name[self]._random assign[=] call[name[NupicRandom], parameter[name[seed]]]
|
keyword[def] identifier[_seed] ( identifier[self] , identifier[seed] =- literal[int] ):
literal[string]
keyword[if] identifier[seed] !=- literal[int] :
identifier[self] . identifier[_random] = identifier[NupicRandom] ( identifier[seed] )
keyword[else] :
identifier[self] . identifier[_random] = identifier[NupicRandom] ()
|
def _seed(self, seed=-1):
"""
Initialize the random seed
"""
if seed != -1:
self._random = NupicRandom(seed) # depends on [control=['if'], data=['seed']]
else:
self._random = NupicRandom()
|
def escapeUnderscores(self):
"""
Escape underscores so that the markdown is correct
"""
new_metrics = []
for m in self.metrics:
m['name'] = m['name'].replace("_", "\_")
new_metrics.append(m)
self.metrics = new_metrics
|
def function[escapeUnderscores, parameter[self]]:
constant[
Escape underscores so that the markdown is correct
]
variable[new_metrics] assign[=] list[[]]
for taget[name[m]] in starred[name[self].metrics] begin[:]
call[name[m]][constant[name]] assign[=] call[call[name[m]][constant[name]].replace, parameter[constant[_], constant[\_]]]
call[name[new_metrics].append, parameter[name[m]]]
name[self].metrics assign[=] name[new_metrics]
|
keyword[def] identifier[escapeUnderscores] ( identifier[self] ):
literal[string]
identifier[new_metrics] =[]
keyword[for] identifier[m] keyword[in] identifier[self] . identifier[metrics] :
identifier[m] [ literal[string] ]= identifier[m] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] )
identifier[new_metrics] . identifier[append] ( identifier[m] )
identifier[self] . identifier[metrics] = identifier[new_metrics]
|
def escapeUnderscores(self):
"""
Escape underscores so that the markdown is correct
"""
new_metrics = []
for m in self.metrics:
m['name'] = m['name'].replace('_', '\\_')
new_metrics.append(m) # depends on [control=['for'], data=['m']]
self.metrics = new_metrics
|
def get_behaviors(brain_or_object):
"""Iterate over all behaviors that are assigned to the object
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: Behaviors
:rtype: list
"""
obj = get_object(brain_or_object)
if not is_dexterity_content(obj):
fail(400, "Only Dexterity contents can have assigned behaviors")
assignable = IBehaviorAssignable(obj, None)
if not assignable:
return {}
out = {}
for behavior in assignable.enumerateBehaviors():
for name, field in getFields(behavior.interface).items():
out[name] = field
return out
|
def function[get_behaviors, parameter[brain_or_object]]:
constant[Iterate over all behaviors that are assigned to the object
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: Behaviors
:rtype: list
]
variable[obj] assign[=] call[name[get_object], parameter[name[brain_or_object]]]
if <ast.UnaryOp object at 0x7da1b2652200> begin[:]
call[name[fail], parameter[constant[400], constant[Only Dexterity contents can have assigned behaviors]]]
variable[assignable] assign[=] call[name[IBehaviorAssignable], parameter[name[obj], constant[None]]]
if <ast.UnaryOp object at 0x7da1b2650700> begin[:]
return[dictionary[[], []]]
variable[out] assign[=] dictionary[[], []]
for taget[name[behavior]] in starred[call[name[assignable].enumerateBehaviors, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b2653ee0>, <ast.Name object at 0x7da1b2653a00>]]] in starred[call[call[name[getFields], parameter[name[behavior].interface]].items, parameter[]]] begin[:]
call[name[out]][name[name]] assign[=] name[field]
return[name[out]]
|
keyword[def] identifier[get_behaviors] ( identifier[brain_or_object] ):
literal[string]
identifier[obj] = identifier[get_object] ( identifier[brain_or_object] )
keyword[if] keyword[not] identifier[is_dexterity_content] ( identifier[obj] ):
identifier[fail] ( literal[int] , literal[string] )
identifier[assignable] = identifier[IBehaviorAssignable] ( identifier[obj] , keyword[None] )
keyword[if] keyword[not] identifier[assignable] :
keyword[return] {}
identifier[out] ={}
keyword[for] identifier[behavior] keyword[in] identifier[assignable] . identifier[enumerateBehaviors] ():
keyword[for] identifier[name] , identifier[field] keyword[in] identifier[getFields] ( identifier[behavior] . identifier[interface] ). identifier[items] ():
identifier[out] [ identifier[name] ]= identifier[field]
keyword[return] identifier[out]
|
def get_behaviors(brain_or_object):
"""Iterate over all behaviors that are assigned to the object
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: Behaviors
:rtype: list
"""
obj = get_object(brain_or_object)
if not is_dexterity_content(obj):
fail(400, 'Only Dexterity contents can have assigned behaviors') # depends on [control=['if'], data=[]]
assignable = IBehaviorAssignable(obj, None)
if not assignable:
return {} # depends on [control=['if'], data=[]]
out = {}
for behavior in assignable.enumerateBehaviors():
for (name, field) in getFields(behavior.interface).items():
out[name] = field # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['behavior']]
return out
|
def _make_cz_layer(qubits: Iterable[devices.GridQubit], layer_index: int
) -> Iterable[ops.Operation]:
"""
Each layer index corresponds to a shift/transpose of this CZ pattern:
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
. . . . . . . . .
. . . . . . . . .
. . . . . . . . .
Labelled edges, showing the exact index-to-CZs mapping (mod 8):
●─0─●─2─●─4─●─6─●─0─. . .
3│ 7│ 3│ 7│ 3│
●─4─●─6─●─0─●─2─●─4─. . .
1│ 5│ 1│ 5│ 1│
●─0─●─2─●─4─●─6─●─0─. . .
7│ 3│ 7│ 3│ 7│
●─4─●─6─●─0─●─2─●─4─. . .
5│ 1│ 5│ 1│ 5│
●─0─●─2─●─4─●─6─●─0─. . .
3│ 7│ 3│ 7│ 3│
. . . . . .
. . . . . .
. . . . . .
Note that, for small devices, some layers will be empty because the layer
only contains edges not present on the device.
"""
# map to an internal layer index to match the cycle order of public circuits
layer_index_map = [0, 3, 2, 1, 4, 7, 6, 5]
internal_layer_index = layer_index_map[layer_index % 8]
dir_row = internal_layer_index % 2
dir_col = 1 - dir_row
shift = (internal_layer_index >> 1) % 4
for q in qubits:
q2 = devices.GridQubit(q.row + dir_row, q.col + dir_col)
if q2 not in qubits:
continue # This edge isn't on the device.
if (q.row * (2 - dir_row) + q.col * (2 - dir_col)) % 4 != shift:
continue # No CZ along this edge for this layer.
yield ops.common_gates.CZ(q, q2)
|
def function[_make_cz_layer, parameter[qubits, layer_index]]:
constant[
Each layer index corresponds to a shift/transpose of this CZ pattern:
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
. . . . . . . . .
. . . . . . . . .
. . . . . . . . .
Labelled edges, showing the exact index-to-CZs mapping (mod 8):
●─0─●─2─●─4─●─6─●─0─. . .
3│ 7│ 3│ 7│ 3│
●─4─●─6─●─0─●─2─●─4─. . .
1│ 5│ 1│ 5│ 1│
●─0─●─2─●─4─●─6─●─0─. . .
7│ 3│ 7│ 3│ 7│
●─4─●─6─●─0─●─2─●─4─. . .
5│ 1│ 5│ 1│ 5│
●─0─●─2─●─4─●─6─●─0─. . .
3│ 7│ 3│ 7│ 3│
. . . . . .
. . . . . .
. . . . . .
Note that, for small devices, some layers will be empty because the layer
only contains edges not present on the device.
]
variable[layer_index_map] assign[=] list[[<ast.Constant object at 0x7da204622bc0>, <ast.Constant object at 0x7da2046211b0>, <ast.Constant object at 0x7da204621060>, <ast.Constant object at 0x7da2046206d0>, <ast.Constant object at 0x7da204622fe0>, <ast.Constant object at 0x7da1b1c3d3f0>, <ast.Constant object at 0x7da1b1c1bb20>, <ast.Constant object at 0x7da1b1c1bb80>]]
variable[internal_layer_index] assign[=] call[name[layer_index_map]][binary_operation[name[layer_index] <ast.Mod object at 0x7da2590d6920> constant[8]]]
variable[dir_row] assign[=] binary_operation[name[internal_layer_index] <ast.Mod object at 0x7da2590d6920> constant[2]]
variable[dir_col] assign[=] binary_operation[constant[1] - name[dir_row]]
variable[shift] assign[=] binary_operation[binary_operation[name[internal_layer_index] <ast.RShift object at 0x7da2590d6a40> constant[1]] <ast.Mod object at 0x7da2590d6920> constant[4]]
for taget[name[q]] in starred[name[qubits]] begin[:]
variable[q2] assign[=] call[name[devices].GridQubit, parameter[binary_operation[name[q].row + name[dir_row]], binary_operation[name[q].col + name[dir_col]]]]
if compare[name[q2] <ast.NotIn object at 0x7da2590d7190> name[qubits]] begin[:]
continue
if compare[binary_operation[binary_operation[binary_operation[name[q].row * binary_operation[constant[2] - name[dir_row]]] + binary_operation[name[q].col * binary_operation[constant[2] - name[dir_col]]]] <ast.Mod object at 0x7da2590d6920> constant[4]] not_equal[!=] name[shift]] begin[:]
continue
<ast.Yield object at 0x7da204621ff0>
|
keyword[def] identifier[_make_cz_layer] ( identifier[qubits] : identifier[Iterable] [ identifier[devices] . identifier[GridQubit] ], identifier[layer_index] : identifier[int]
)-> identifier[Iterable] [ identifier[ops] . identifier[Operation] ]:
literal[string]
identifier[layer_index_map] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[internal_layer_index] = identifier[layer_index_map] [ identifier[layer_index] % literal[int] ]
identifier[dir_row] = identifier[internal_layer_index] % literal[int]
identifier[dir_col] = literal[int] - identifier[dir_row]
identifier[shift] =( identifier[internal_layer_index] >> literal[int] )% literal[int]
keyword[for] identifier[q] keyword[in] identifier[qubits] :
identifier[q2] = identifier[devices] . identifier[GridQubit] ( identifier[q] . identifier[row] + identifier[dir_row] , identifier[q] . identifier[col] + identifier[dir_col] )
keyword[if] identifier[q2] keyword[not] keyword[in] identifier[qubits] :
keyword[continue]
keyword[if] ( identifier[q] . identifier[row] *( literal[int] - identifier[dir_row] )+ identifier[q] . identifier[col] *( literal[int] - identifier[dir_col] ))% literal[int] != identifier[shift] :
keyword[continue]
keyword[yield] identifier[ops] . identifier[common_gates] . identifier[CZ] ( identifier[q] , identifier[q2] )
|
def _make_cz_layer(qubits: Iterable[devices.GridQubit], layer_index: int) -> Iterable[ops.Operation]:
"""
Each layer index corresponds to a shift/transpose of this CZ pattern:
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
●───● ● ● ●───● ● ● . . .
● ● ●───● ● ● ●───● . . .
. . . . . . . . .
. . . . . . . . .
. . . . . . . . .
Labelled edges, showing the exact index-to-CZs mapping (mod 8):
●─0─●─2─●─4─●─6─●─0─. . .
3│ 7│ 3│ 7│ 3│
●─4─●─6─●─0─●─2─●─4─. . .
1│ 5│ 1│ 5│ 1│
●─0─●─2─●─4─●─6─●─0─. . .
7│ 3│ 7│ 3│ 7│
●─4─●─6─●─0─●─2─●─4─. . .
5│ 1│ 5│ 1│ 5│
●─0─●─2─●─4─●─6─●─0─. . .
3│ 7│ 3│ 7│ 3│
. . . . . .
. . . . . .
. . . . . .
Note that, for small devices, some layers will be empty because the layer
only contains edges not present on the device.
"""
# map to an internal layer index to match the cycle order of public circuits
layer_index_map = [0, 3, 2, 1, 4, 7, 6, 5]
internal_layer_index = layer_index_map[layer_index % 8]
dir_row = internal_layer_index % 2
dir_col = 1 - dir_row
shift = (internal_layer_index >> 1) % 4
for q in qubits:
q2 = devices.GridQubit(q.row + dir_row, q.col + dir_col)
if q2 not in qubits:
continue # This edge isn't on the device. # depends on [control=['if'], data=[]]
if (q.row * (2 - dir_row) + q.col * (2 - dir_col)) % 4 != shift:
continue # No CZ along this edge for this layer. # depends on [control=['if'], data=[]]
yield ops.common_gates.CZ(q, q2) # depends on [control=['for'], data=['q']]
|
def delete(self, task, params={}, **options):
"""A specific, existing task can be deleted by making a DELETE request on the
URL for that task. Deleted tasks go into the "trash" of the user making
the delete request. Tasks can be recovered from the trash within a period
of 30 days; afterward they are completely removed from the system.
Returns an empty data record.
Parameters
----------
task : {Id} The task to delete.
"""
path = "/tasks/%s" % (task)
return self.client.delete(path, params, **options)
|
def function[delete, parameter[self, task, params]]:
constant[A specific, existing task can be deleted by making a DELETE request on the
URL for that task. Deleted tasks go into the "trash" of the user making
the delete request. Tasks can be recovered from the trash within a period
of 30 days; afterward they are completely removed from the system.
Returns an empty data record.
Parameters
----------
task : {Id} The task to delete.
]
variable[path] assign[=] binary_operation[constant[/tasks/%s] <ast.Mod object at 0x7da2590d6920> name[task]]
return[call[name[self].client.delete, parameter[name[path], name[params]]]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[task] , identifier[params] ={},** identifier[options] ):
literal[string]
identifier[path] = literal[string] %( identifier[task] )
keyword[return] identifier[self] . identifier[client] . identifier[delete] ( identifier[path] , identifier[params] ,** identifier[options] )
|
def delete(self, task, params={}, **options):
"""A specific, existing task can be deleted by making a DELETE request on the
URL for that task. Deleted tasks go into the "trash" of the user making
the delete request. Tasks can be recovered from the trash within a period
of 30 days; afterward they are completely removed from the system.
Returns an empty data record.
Parameters
----------
task : {Id} The task to delete.
"""
path = '/tasks/%s' % task
return self.client.delete(path, params, **options)
|
def handle_signature(self, sig, signode):
"""Parse the signature *sig* into individual nodes and append them to the
*signode*. If ValueError is raises, parsing is aborted and the whole
*sig* string is put into a single desc_name node.
The return value is the value that identifies the object. IOW, it is
the identifier that will be used to reference this object, datum,
attribute, proc, etc. It is a tuple of "fullname" (including module and
class(es)) and the classes. See also :py:meth:`add_target_and_index`.
"""
if self._is_attr_like():
sig_match = chpl_attr_sig_pattern.match(sig)
if sig_match is None:
raise ValueError('Signature does not parse: {0}'.format(sig))
func_prefix, name_prefix, name, retann = sig_match.groups()
arglist = None
else:
sig_match = chpl_sig_pattern.match(sig)
if sig_match is None:
raise ValueError('Signature does not parse: {0}'.format(sig))
func_prefix, name_prefix, name, arglist, retann = \
sig_match.groups()
modname = self.options.get(
'module', self.env.temp_data.get('chpl:module'))
classname = self.env.temp_data.get('chpl:class')
if classname:
if name_prefix and name_prefix.startswith(classname):
fullname = name_prefix + name
# class name is given again in the signature
name_prefix = name_prefix[len(classname):].lstrip('.')
elif name_prefix:
# class name is given in the signature, but different
# (shouldn't happen)
fullname = classname + '.' + name_prefix + name
else:
# class name is not given in the signature
fullname = classname + '.' + name
else:
if name_prefix:
classname = name_prefix.rstrip('.')
fullname = name_prefix + name
else:
classname = ''
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix)
# if func_prefix:
# signode += addnodes.desc_addname(func_prefix, func_prefix)
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix)
anno = self.options.get('annotation')
signode += addnodes.desc_name(name, name)
if not arglist:
# If this needs an arglist, and parens were provided in the
# signature, add a parameterlist. Chapel supports paren-less
# functions and methods, which can act as computed properties. If
# arglist is the empty string, the signature included parens. If
# arglist is None, it did not include parens.
if self.needs_arglist() and arglist is not None:
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist()
if retann:
signode += addnodes.desc_type(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
self._pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_type(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
|
def function[handle_signature, parameter[self, sig, signode]]:
constant[Parse the signature *sig* into individual nodes and append them to the
*signode*. If ValueError is raises, parsing is aborted and the whole
*sig* string is put into a single desc_name node.
The return value is the value that identifies the object. IOW, it is
the identifier that will be used to reference this object, datum,
attribute, proc, etc. It is a tuple of "fullname" (including module and
class(es)) and the classes. See also :py:meth:`add_target_and_index`.
]
if call[name[self]._is_attr_like, parameter[]] begin[:]
variable[sig_match] assign[=] call[name[chpl_attr_sig_pattern].match, parameter[name[sig]]]
if compare[name[sig_match] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b098a1d0>
<ast.Tuple object at 0x7da1b098a110> assign[=] call[name[sig_match].groups, parameter[]]
variable[arglist] assign[=] constant[None]
variable[modname] assign[=] call[name[self].options.get, parameter[constant[module], call[name[self].env.temp_data.get, parameter[constant[chpl:module]]]]]
variable[classname] assign[=] call[name[self].env.temp_data.get, parameter[constant[chpl:class]]]
if name[classname] begin[:]
if <ast.BoolOp object at 0x7da1b0ad94e0> begin[:]
variable[fullname] assign[=] binary_operation[name[name_prefix] + name[name]]
variable[name_prefix] assign[=] call[call[name[name_prefix]][<ast.Slice object at 0x7da1b0ad9840>].lstrip, parameter[constant[.]]]
call[name[signode]][constant[module]] assign[=] name[modname]
call[name[signode]][constant[class]] assign[=] name[classname]
call[name[signode]][constant[fullname]] assign[=] name[fullname]
variable[sig_prefix] assign[=] call[name[self].get_signature_prefix, parameter[name[sig]]]
if name[sig_prefix] begin[:]
<ast.AugAssign object at 0x7da1b0ad9960>
if name[name_prefix] begin[:]
<ast.AugAssign object at 0x7da1b0ad9c60>
variable[anno] assign[=] call[name[self].options.get, parameter[constant[annotation]]]
<ast.AugAssign object at 0x7da1b0ad8c40>
if <ast.UnaryOp object at 0x7da1b0ad8ac0> begin[:]
if <ast.BoolOp object at 0x7da1b0ad8610> begin[:]
<ast.AugAssign object at 0x7da1b0ada260>
if name[retann] begin[:]
<ast.AugAssign object at 0x7da1b0adbf10>
if name[anno] begin[:]
<ast.AugAssign object at 0x7da1b0ada4a0>
return[tuple[[<ast.Name object at 0x7da1b0ad8fd0>, <ast.Name object at 0x7da1b0ada890>]]]
call[name[self]._pseudo_parse_arglist, parameter[name[signode], name[arglist]]]
if name[retann] begin[:]
<ast.AugAssign object at 0x7da1b0ad8d90>
if name[anno] begin[:]
<ast.AugAssign object at 0x7da1b0adae90>
return[tuple[[<ast.Name object at 0x7da1b0adb940>, <ast.Name object at 0x7da1b0adae60>]]]
|
keyword[def] identifier[handle_signature] ( identifier[self] , identifier[sig] , identifier[signode] ):
literal[string]
keyword[if] identifier[self] . identifier[_is_attr_like] ():
identifier[sig_match] = identifier[chpl_attr_sig_pattern] . identifier[match] ( identifier[sig] )
keyword[if] identifier[sig_match] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[sig] ))
identifier[func_prefix] , identifier[name_prefix] , identifier[name] , identifier[retann] = identifier[sig_match] . identifier[groups] ()
identifier[arglist] = keyword[None]
keyword[else] :
identifier[sig_match] = identifier[chpl_sig_pattern] . identifier[match] ( identifier[sig] )
keyword[if] identifier[sig_match] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[sig] ))
identifier[func_prefix] , identifier[name_prefix] , identifier[name] , identifier[arglist] , identifier[retann] = identifier[sig_match] . identifier[groups] ()
identifier[modname] = identifier[self] . identifier[options] . identifier[get] (
literal[string] , identifier[self] . identifier[env] . identifier[temp_data] . identifier[get] ( literal[string] ))
identifier[classname] = identifier[self] . identifier[env] . identifier[temp_data] . identifier[get] ( literal[string] )
keyword[if] identifier[classname] :
keyword[if] identifier[name_prefix] keyword[and] identifier[name_prefix] . identifier[startswith] ( identifier[classname] ):
identifier[fullname] = identifier[name_prefix] + identifier[name]
identifier[name_prefix] = identifier[name_prefix] [ identifier[len] ( identifier[classname] ):]. identifier[lstrip] ( literal[string] )
keyword[elif] identifier[name_prefix] :
identifier[fullname] = identifier[classname] + literal[string] + identifier[name_prefix] + identifier[name]
keyword[else] :
identifier[fullname] = identifier[classname] + literal[string] + identifier[name]
keyword[else] :
keyword[if] identifier[name_prefix] :
identifier[classname] = identifier[name_prefix] . identifier[rstrip] ( literal[string] )
identifier[fullname] = identifier[name_prefix] + identifier[name]
keyword[else] :
identifier[classname] = literal[string]
identifier[fullname] = identifier[name]
identifier[signode] [ literal[string] ]= identifier[modname]
identifier[signode] [ literal[string] ]= identifier[classname]
identifier[signode] [ literal[string] ]= identifier[fullname]
identifier[sig_prefix] = identifier[self] . identifier[get_signature_prefix] ( identifier[sig] )
keyword[if] identifier[sig_prefix] :
identifier[signode] += identifier[addnodes] . identifier[desc_annotation] ( identifier[sig_prefix] , identifier[sig_prefix] )
keyword[if] identifier[name_prefix] :
identifier[signode] += identifier[addnodes] . identifier[desc_addname] ( identifier[name_prefix] , identifier[name_prefix] )
identifier[anno] = identifier[self] . identifier[options] . identifier[get] ( literal[string] )
identifier[signode] += identifier[addnodes] . identifier[desc_name] ( identifier[name] , identifier[name] )
keyword[if] keyword[not] identifier[arglist] :
keyword[if] identifier[self] . identifier[needs_arglist] () keyword[and] identifier[arglist] keyword[is] keyword[not] keyword[None] :
identifier[signode] += identifier[addnodes] . identifier[desc_parameterlist] ()
keyword[if] identifier[retann] :
identifier[signode] += identifier[addnodes] . identifier[desc_type] ( identifier[retann] , identifier[retann] )
keyword[if] identifier[anno] :
identifier[signode] += identifier[addnodes] . identifier[desc_annotation] ( literal[string] + identifier[anno] , literal[string] + identifier[anno] )
keyword[return] identifier[fullname] , identifier[name_prefix]
identifier[self] . identifier[_pseudo_parse_arglist] ( identifier[signode] , identifier[arglist] )
keyword[if] identifier[retann] :
identifier[signode] += identifier[addnodes] . identifier[desc_type] ( identifier[retann] , identifier[retann] )
keyword[if] identifier[anno] :
identifier[signode] += identifier[addnodes] . identifier[desc_annotation] ( literal[string] + identifier[anno] , literal[string] + identifier[anno] )
keyword[return] identifier[fullname] , identifier[name_prefix]
|
def handle_signature(self, sig, signode):
"""Parse the signature *sig* into individual nodes and append them to the
*signode*. If ValueError is raises, parsing is aborted and the whole
*sig* string is put into a single desc_name node.
The return value is the value that identifies the object. IOW, it is
the identifier that will be used to reference this object, datum,
attribute, proc, etc. It is a tuple of "fullname" (including module and
class(es)) and the classes. See also :py:meth:`add_target_and_index`.
"""
if self._is_attr_like():
sig_match = chpl_attr_sig_pattern.match(sig)
if sig_match is None:
raise ValueError('Signature does not parse: {0}'.format(sig)) # depends on [control=['if'], data=[]]
(func_prefix, name_prefix, name, retann) = sig_match.groups()
arglist = None # depends on [control=['if'], data=[]]
else:
sig_match = chpl_sig_pattern.match(sig)
if sig_match is None:
raise ValueError('Signature does not parse: {0}'.format(sig)) # depends on [control=['if'], data=[]]
(func_prefix, name_prefix, name, arglist, retann) = sig_match.groups()
modname = self.options.get('module', self.env.temp_data.get('chpl:module'))
classname = self.env.temp_data.get('chpl:class')
if classname:
if name_prefix and name_prefix.startswith(classname):
fullname = name_prefix + name
# class name is given again in the signature
name_prefix = name_prefix[len(classname):].lstrip('.') # depends on [control=['if'], data=[]]
elif name_prefix:
# class name is given in the signature, but different
# (shouldn't happen)
fullname = classname + '.' + name_prefix + name # depends on [control=['if'], data=[]]
else:
# class name is not given in the signature
fullname = classname + '.' + name # depends on [control=['if'], data=[]]
elif name_prefix:
classname = name_prefix.rstrip('.')
fullname = name_prefix + name # depends on [control=['if'], data=[]]
else:
classname = ''
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix) # depends on [control=['if'], data=[]]
# if func_prefix:
# signode += addnodes.desc_addname(func_prefix, func_prefix)
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix) # depends on [control=['if'], data=[]]
anno = self.options.get('annotation')
signode += addnodes.desc_name(name, name)
if not arglist:
# If this needs an arglist, and parens were provided in the
# signature, add a parameterlist. Chapel supports paren-less
# functions and methods, which can act as computed properties. If
# arglist is the empty string, the signature included parens. If
# arglist is None, it did not include parens.
if self.needs_arglist() and arglist is not None:
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist() # depends on [control=['if'], data=[]]
if retann:
signode += addnodes.desc_type(retann, retann) # depends on [control=['if'], data=[]]
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno) # depends on [control=['if'], data=[]]
return (fullname, name_prefix) # depends on [control=['if'], data=[]]
self._pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_type(retann, retann) # depends on [control=['if'], data=[]]
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno) # depends on [control=['if'], data=[]]
return (fullname, name_prefix)
|
def setTime(self, yy, mm, dd, hh, minutes, ss, password="00000000"):
""" Serial set time with day of week calculation.
Args:
yy (int): Last two digits of year.
mm (int): Month 1-12.
dd (int): Day 1-31
hh (int): Hour 0 to 23.
minutes (int): Minutes 0 to 59.
ss (int): Seconds 0 to 59.
password (str): Optional password.
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setTime")
try:
if mm < 1 or mm > 12:
self.writeCmdMsg("Month must be between 1 and 12")
self.setContext("")
return result
if dd < 1 or dd > 31:
self.writeCmdMsg("Day must be between 1 and 31")
self.setContext("")
return result
if hh < 0 or hh > 23:
self.writeCmdMsg("Hour must be between 0 and 23, inclusive")
self.setContext("")
return result
if minutes < 0 or minutes > 59:
self.writeCmdMsg("Minutes must be between 0 and 59, inclusive")
self.setContext("")
return result
if ss < 0 or ss > 59:
self.writeCmdMsg("Seconds must be between 0 and 59, inclusive")
self.setContext("")
return result
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
dt_buf = datetime.datetime(int(yy), int(mm), int(dd), int(hh), int(minutes), int(ss))
ekm_log("Writing Date and Time " + dt_buf.strftime("%Y-%m-%d %H:%M"))
dayofweek = dt_buf.date().isoweekday()
ekm_log("Calculated weekday " + str(dayofweek))
req_str = "015731023030363028"
req_str += binascii.hexlify(str(yy)[-2:])
req_str += binascii.hexlify(str(mm).zfill(2))
req_str += binascii.hexlify(str(dd).zfill(2))
req_str += binascii.hexlify(str(dayofweek).zfill(2))
req_str += binascii.hexlify(str(hh).zfill(2))
req_str += binascii.hexlify(str(minutes).zfill(2))
req_str += binascii.hexlify(str(ss).zfill(2))
req_str += "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setTime): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
|
def function[setTime, parameter[self, yy, mm, dd, hh, minutes, ss, password]]:
constant[ Serial set time with day of week calculation.
Args:
yy (int): Last two digits of year.
mm (int): Month 1-12.
dd (int): Day 1-31
hh (int): Hour 0 to 23.
minutes (int): Minutes 0 to 59.
ss (int): Seconds 0 to 59.
password (str): Optional password.
Returns:
bool: True on completion and ACK.
]
variable[result] assign[=] constant[False]
call[name[self].setContext, parameter[constant[setTime]]]
<ast.Try object at 0x7da1b2345b10>
call[name[self].setContext, parameter[constant[]]]
return[name[result]]
|
keyword[def] identifier[setTime] ( identifier[self] , identifier[yy] , identifier[mm] , identifier[dd] , identifier[hh] , identifier[minutes] , identifier[ss] , identifier[password] = literal[string] ):
literal[string]
identifier[result] = keyword[False]
identifier[self] . identifier[setContext] ( literal[string] )
keyword[try] :
keyword[if] identifier[mm] < literal[int] keyword[or] identifier[mm] > literal[int] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
keyword[if] identifier[dd] < literal[int] keyword[or] identifier[dd] > literal[int] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
keyword[if] identifier[hh] < literal[int] keyword[or] identifier[hh] > literal[int] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
keyword[if] identifier[minutes] < literal[int] keyword[or] identifier[minutes] > literal[int] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
keyword[if] identifier[ss] < literal[int] keyword[or] identifier[ss] > literal[int] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
keyword[if] identifier[len] ( identifier[password] )!= literal[int] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
keyword[if] keyword[not] identifier[self] . identifier[request] ( keyword[False] ):
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[serialCmdPwdAuth] ( identifier[password] ):
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
keyword[else] :
identifier[dt_buf] = identifier[datetime] . identifier[datetime] ( identifier[int] ( identifier[yy] ), identifier[int] ( identifier[mm] ), identifier[int] ( identifier[dd] ), identifier[int] ( identifier[hh] ), identifier[int] ( identifier[minutes] ), identifier[int] ( identifier[ss] ))
identifier[ekm_log] ( literal[string] + identifier[dt_buf] . identifier[strftime] ( literal[string] ))
identifier[dayofweek] = identifier[dt_buf] . identifier[date] (). identifier[isoweekday] ()
identifier[ekm_log] ( literal[string] + identifier[str] ( identifier[dayofweek] ))
identifier[req_str] = literal[string]
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[yy] )[- literal[int] :])
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[mm] ). identifier[zfill] ( literal[int] ))
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[dd] ). identifier[zfill] ( literal[int] ))
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[dayofweek] ). identifier[zfill] ( literal[int] ))
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[hh] ). identifier[zfill] ( literal[int] ))
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[minutes] ). identifier[zfill] ( literal[int] ))
identifier[req_str] += identifier[binascii] . identifier[hexlify] ( identifier[str] ( identifier[ss] ). identifier[zfill] ( literal[int] ))
identifier[req_str] += literal[string]
identifier[req_str] += identifier[self] . identifier[calc_crc16] ( identifier[req_str] [ literal[int] :]. identifier[decode] ( literal[string] ))
identifier[self] . identifier[m_serial_port] . identifier[write] ( identifier[req_str] . identifier[decode] ( literal[string] ))
keyword[if] identifier[self] . identifier[m_serial_port] . identifier[getResponse] ( identifier[self] . identifier[getContext] ()). identifier[encode] ( literal[string] )== literal[string] :
identifier[self] . identifier[writeCmdMsg] ( literal[string] )
identifier[result] = keyword[True]
identifier[self] . identifier[serialPostEnd] ()
keyword[except] :
identifier[ekm_log] ( identifier[traceback] . identifier[format_exc] ( identifier[sys] . identifier[exc_info] ()))
identifier[self] . identifier[setContext] ( literal[string] )
keyword[return] identifier[result]
|
def setTime(self, yy, mm, dd, hh, minutes, ss, password='00000000'):
""" Serial set time with day of week calculation.
Args:
yy (int): Last two digits of year.
mm (int): Month 1-12.
dd (int): Day 1-31
hh (int): Hour 0 to 23.
minutes (int): Minutes 0 to 59.
ss (int): Seconds 0 to 59.
password (str): Optional password.
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext('setTime')
try:
if mm < 1 or mm > 12:
self.writeCmdMsg('Month must be between 1 and 12')
self.setContext('')
return result # depends on [control=['if'], data=[]]
if dd < 1 or dd > 31:
self.writeCmdMsg('Day must be between 1 and 31')
self.setContext('')
return result # depends on [control=['if'], data=[]]
if hh < 0 or hh > 23:
self.writeCmdMsg('Hour must be between 0 and 23, inclusive')
self.setContext('')
return result # depends on [control=['if'], data=[]]
if minutes < 0 or minutes > 59:
self.writeCmdMsg('Minutes must be between 0 and 59, inclusive')
self.setContext('')
return result # depends on [control=['if'], data=[]]
if ss < 0 or ss > 59:
self.writeCmdMsg('Seconds must be between 0 and 59, inclusive')
self.setContext('')
return result # depends on [control=['if'], data=[]]
if len(password) != 8:
self.writeCmdMsg('Invalid password length.')
self.setContext('')
return result # depends on [control=['if'], data=[]]
if not self.request(False):
self.writeCmdMsg('Bad read CRC on setting') # depends on [control=['if'], data=[]]
elif not self.serialCmdPwdAuth(password):
self.writeCmdMsg('Password failure') # depends on [control=['if'], data=[]]
else:
dt_buf = datetime.datetime(int(yy), int(mm), int(dd), int(hh), int(minutes), int(ss))
ekm_log('Writing Date and Time ' + dt_buf.strftime('%Y-%m-%d %H:%M'))
dayofweek = dt_buf.date().isoweekday()
ekm_log('Calculated weekday ' + str(dayofweek))
req_str = '015731023030363028'
req_str += binascii.hexlify(str(yy)[-2:])
req_str += binascii.hexlify(str(mm).zfill(2))
req_str += binascii.hexlify(str(dd).zfill(2))
req_str += binascii.hexlify(str(dayofweek).zfill(2))
req_str += binascii.hexlify(str(hh).zfill(2))
req_str += binascii.hexlify(str(minutes).zfill(2))
req_str += binascii.hexlify(str(ss).zfill(2))
req_str += '2903'
req_str += self.calc_crc16(req_str[2:].decode('hex'))
self.m_serial_port.write(req_str.decode('hex'))
if self.m_serial_port.getResponse(self.getContext()).encode('hex') == '06':
self.writeCmdMsg('Success(setTime): 06 returned.')
result = True # depends on [control=['if'], data=[]]
self.serialPostEnd() # depends on [control=['try'], data=[]]
except:
ekm_log(traceback.format_exc(sys.exc_info())) # depends on [control=['except'], data=[]]
self.setContext('')
return result
|
def localmax(x, axis=0):
"""Find local maxima in an array `x`.
An element `x[i]` is considered a local maximum if the following
conditions are met:
- `x[i] > x[i-1]`
- `x[i] >= x[i+1]`
Note that the first condition is strict, and that the first element
`x[0]` will never be considered as a local maximum.
Examples
--------
>>> x = np.array([1, 0, 1, 2, -1, 0, -2, 1])
>>> librosa.util.localmax(x)
array([False, False, False, True, False, True, False, True], dtype=bool)
>>> # Two-dimensional example
>>> x = np.array([[1,0,1], [2, -1, 0], [2, 1, 3]])
>>> librosa.util.localmax(x, axis=0)
array([[False, False, False],
[ True, False, False],
[False, True, True]], dtype=bool)
>>> librosa.util.localmax(x, axis=1)
array([[False, False, True],
[False, False, True],
[False, False, True]], dtype=bool)
Parameters
----------
x : np.ndarray [shape=(d1,d2,...)]
input vector or array
axis : int
axis along which to compute local maximality
Returns
-------
m : np.ndarray [shape=x.shape, dtype=bool]
indicator array of local maximality along `axis`
"""
paddings = [(0, 0)] * x.ndim
paddings[axis] = (1, 1)
x_pad = np.pad(x, paddings, mode='edge')
inds1 = [slice(None)] * x.ndim
inds1[axis] = slice(0, -2)
inds2 = [slice(None)] * x.ndim
inds2[axis] = slice(2, x_pad.shape[axis])
return (x > x_pad[tuple(inds1)]) & (x >= x_pad[tuple(inds2)])
|
def function[localmax, parameter[x, axis]]:
constant[Find local maxima in an array `x`.
An element `x[i]` is considered a local maximum if the following
conditions are met:
- `x[i] > x[i-1]`
- `x[i] >= x[i+1]`
Note that the first condition is strict, and that the first element
`x[0]` will never be considered as a local maximum.
Examples
--------
>>> x = np.array([1, 0, 1, 2, -1, 0, -2, 1])
>>> librosa.util.localmax(x)
array([False, False, False, True, False, True, False, True], dtype=bool)
>>> # Two-dimensional example
>>> x = np.array([[1,0,1], [2, -1, 0], [2, 1, 3]])
>>> librosa.util.localmax(x, axis=0)
array([[False, False, False],
[ True, False, False],
[False, True, True]], dtype=bool)
>>> librosa.util.localmax(x, axis=1)
array([[False, False, True],
[False, False, True],
[False, False, True]], dtype=bool)
Parameters
----------
x : np.ndarray [shape=(d1,d2,...)]
input vector or array
axis : int
axis along which to compute local maximality
Returns
-------
m : np.ndarray [shape=x.shape, dtype=bool]
indicator array of local maximality along `axis`
]
variable[paddings] assign[=] binary_operation[list[[<ast.Tuple object at 0x7da1b0512d70>]] * name[x].ndim]
call[name[paddings]][name[axis]] assign[=] tuple[[<ast.Constant object at 0x7da1b0513ee0>, <ast.Constant object at 0x7da1b0513e80>]]
variable[x_pad] assign[=] call[name[np].pad, parameter[name[x], name[paddings]]]
variable[inds1] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b0512200>]] * name[x].ndim]
call[name[inds1]][name[axis]] assign[=] call[name[slice], parameter[constant[0], <ast.UnaryOp object at 0x7da1b0513b50>]]
variable[inds2] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b0512110>]] * name[x].ndim]
call[name[inds2]][name[axis]] assign[=] call[name[slice], parameter[constant[2], call[name[x_pad].shape][name[axis]]]]
return[binary_operation[compare[name[x] greater[>] call[name[x_pad]][call[name[tuple], parameter[name[inds1]]]]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[x] greater_or_equal[>=] call[name[x_pad]][call[name[tuple], parameter[name[inds2]]]]]]]
|
keyword[def] identifier[localmax] ( identifier[x] , identifier[axis] = literal[int] ):
literal[string]
identifier[paddings] =[( literal[int] , literal[int] )]* identifier[x] . identifier[ndim]
identifier[paddings] [ identifier[axis] ]=( literal[int] , literal[int] )
identifier[x_pad] = identifier[np] . identifier[pad] ( identifier[x] , identifier[paddings] , identifier[mode] = literal[string] )
identifier[inds1] =[ identifier[slice] ( keyword[None] )]* identifier[x] . identifier[ndim]
identifier[inds1] [ identifier[axis] ]= identifier[slice] ( literal[int] ,- literal[int] )
identifier[inds2] =[ identifier[slice] ( keyword[None] )]* identifier[x] . identifier[ndim]
identifier[inds2] [ identifier[axis] ]= identifier[slice] ( literal[int] , identifier[x_pad] . identifier[shape] [ identifier[axis] ])
keyword[return] ( identifier[x] > identifier[x_pad] [ identifier[tuple] ( identifier[inds1] )])&( identifier[x] >= identifier[x_pad] [ identifier[tuple] ( identifier[inds2] )])
|
def localmax(x, axis=0):
"""Find local maxima in an array `x`.
An element `x[i]` is considered a local maximum if the following
conditions are met:
- `x[i] > x[i-1]`
- `x[i] >= x[i+1]`
Note that the first condition is strict, and that the first element
`x[0]` will never be considered as a local maximum.
Examples
--------
>>> x = np.array([1, 0, 1, 2, -1, 0, -2, 1])
>>> librosa.util.localmax(x)
array([False, False, False, True, False, True, False, True], dtype=bool)
>>> # Two-dimensional example
>>> x = np.array([[1,0,1], [2, -1, 0], [2, 1, 3]])
>>> librosa.util.localmax(x, axis=0)
array([[False, False, False],
[ True, False, False],
[False, True, True]], dtype=bool)
>>> librosa.util.localmax(x, axis=1)
array([[False, False, True],
[False, False, True],
[False, False, True]], dtype=bool)
Parameters
----------
x : np.ndarray [shape=(d1,d2,...)]
input vector or array
axis : int
axis along which to compute local maximality
Returns
-------
m : np.ndarray [shape=x.shape, dtype=bool]
indicator array of local maximality along `axis`
"""
paddings = [(0, 0)] * x.ndim
paddings[axis] = (1, 1)
x_pad = np.pad(x, paddings, mode='edge')
inds1 = [slice(None)] * x.ndim
inds1[axis] = slice(0, -2)
inds2 = [slice(None)] * x.ndim
inds2[axis] = slice(2, x_pad.shape[axis])
return (x > x_pad[tuple(inds1)]) & (x >= x_pad[tuple(inds2)])
|
def write(self,
container: Container,
filepath: str,
contents: str
) -> str:
"""
Reads the contents of a given file belonging to a container.
"""
logger.debug("writing to file [%s] inside container [%s]",
filepath, container.id)
filepath = self._resolve_path(container, filepath)
# write the file contents to a temporary file on the host before
# copying that file to the container
(_, fn_host) = tempfile.mkstemp(suffix='.bugzoo')
try:
with open(fn_host, 'w') as fh:
fh.write(contents)
self.__mgr_ctr.copy_to(container, fn_host, filepath)
finally:
os.remove(fn_host)
logger.debug("wrote to file [%s] inside container [%s]",
filepath, container.id)
|
def function[write, parameter[self, container, filepath, contents]]:
constant[
Reads the contents of a given file belonging to a container.
]
call[name[logger].debug, parameter[constant[writing to file [%s] inside container [%s]], name[filepath], name[container].id]]
variable[filepath] assign[=] call[name[self]._resolve_path, parameter[name[container], name[filepath]]]
<ast.Tuple object at 0x7da1b0c488b0> assign[=] call[name[tempfile].mkstemp, parameter[]]
<ast.Try object at 0x7da1b0c4b6d0>
call[name[logger].debug, parameter[constant[wrote to file [%s] inside container [%s]], name[filepath], name[container].id]]
|
keyword[def] identifier[write] ( identifier[self] ,
identifier[container] : identifier[Container] ,
identifier[filepath] : identifier[str] ,
identifier[contents] : identifier[str]
)-> identifier[str] :
literal[string]
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[filepath] , identifier[container] . identifier[id] )
identifier[filepath] = identifier[self] . identifier[_resolve_path] ( identifier[container] , identifier[filepath] )
( identifier[_] , identifier[fn_host] )= identifier[tempfile] . identifier[mkstemp] ( identifier[suffix] = literal[string] )
keyword[try] :
keyword[with] identifier[open] ( identifier[fn_host] , literal[string] ) keyword[as] identifier[fh] :
identifier[fh] . identifier[write] ( identifier[contents] )
identifier[self] . identifier[__mgr_ctr] . identifier[copy_to] ( identifier[container] , identifier[fn_host] , identifier[filepath] )
keyword[finally] :
identifier[os] . identifier[remove] ( identifier[fn_host] )
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[filepath] , identifier[container] . identifier[id] )
|
def write(self, container: Container, filepath: str, contents: str) -> str:
"""
Reads the contents of a given file belonging to a container.
"""
logger.debug('writing to file [%s] inside container [%s]', filepath, container.id)
filepath = self._resolve_path(container, filepath)
# write the file contents to a temporary file on the host before
# copying that file to the container
(_, fn_host) = tempfile.mkstemp(suffix='.bugzoo')
try:
with open(fn_host, 'w') as fh:
fh.write(contents) # depends on [control=['with'], data=['fh']]
self.__mgr_ctr.copy_to(container, fn_host, filepath) # depends on [control=['try'], data=[]]
finally:
os.remove(fn_host)
logger.debug('wrote to file [%s] inside container [%s]', filepath, container.id)
|
def _remove_old_stderr_files(self):
"""
Remove stderr files left by previous Spyder instances.
This is only required on Windows because we can't
clean up stderr files while Spyder is running on it.
"""
if os.name == 'nt':
tmpdir = get_temp_dir()
for fname in os.listdir(tmpdir):
if osp.splitext(fname)[1] == '.stderr':
try:
os.remove(osp.join(tmpdir, fname))
except Exception:
pass
|
def function[_remove_old_stderr_files, parameter[self]]:
constant[
Remove stderr files left by previous Spyder instances.
This is only required on Windows because we can't
clean up stderr files while Spyder is running on it.
]
if compare[name[os].name equal[==] constant[nt]] begin[:]
variable[tmpdir] assign[=] call[name[get_temp_dir], parameter[]]
for taget[name[fname]] in starred[call[name[os].listdir, parameter[name[tmpdir]]]] begin[:]
if compare[call[call[name[osp].splitext, parameter[name[fname]]]][constant[1]] equal[==] constant[.stderr]] begin[:]
<ast.Try object at 0x7da20e9b25f0>
|
keyword[def] identifier[_remove_old_stderr_files] ( identifier[self] ):
literal[string]
keyword[if] identifier[os] . identifier[name] == literal[string] :
identifier[tmpdir] = identifier[get_temp_dir] ()
keyword[for] identifier[fname] keyword[in] identifier[os] . identifier[listdir] ( identifier[tmpdir] ):
keyword[if] identifier[osp] . identifier[splitext] ( identifier[fname] )[ literal[int] ]== literal[string] :
keyword[try] :
identifier[os] . identifier[remove] ( identifier[osp] . identifier[join] ( identifier[tmpdir] , identifier[fname] ))
keyword[except] identifier[Exception] :
keyword[pass]
|
def _remove_old_stderr_files(self):
"""
Remove stderr files left by previous Spyder instances.
This is only required on Windows because we can't
clean up stderr files while Spyder is running on it.
"""
if os.name == 'nt':
tmpdir = get_temp_dir()
for fname in os.listdir(tmpdir):
if osp.splitext(fname)[1] == '.stderr':
try:
os.remove(osp.join(tmpdir, fname)) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fname']] # depends on [control=['if'], data=[]]
|
def stop_recording_skipped(cls):
"""
Stop collecting OptionErrors recorded with the
record_skipped_option method and return them
"""
if cls._errors_recorded is None:
raise Exception('Cannot stop recording before it is started')
recorded = cls._errors_recorded[:]
cls._errors_recorded = None
return recorded
|
def function[stop_recording_skipped, parameter[cls]]:
constant[
Stop collecting OptionErrors recorded with the
record_skipped_option method and return them
]
if compare[name[cls]._errors_recorded is constant[None]] begin[:]
<ast.Raise object at 0x7da1b2347400>
variable[recorded] assign[=] call[name[cls]._errors_recorded][<ast.Slice object at 0x7da2054a7970>]
name[cls]._errors_recorded assign[=] constant[None]
return[name[recorded]]
|
keyword[def] identifier[stop_recording_skipped] ( identifier[cls] ):
literal[string]
keyword[if] identifier[cls] . identifier[_errors_recorded] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[recorded] = identifier[cls] . identifier[_errors_recorded] [:]
identifier[cls] . identifier[_errors_recorded] = keyword[None]
keyword[return] identifier[recorded]
|
def stop_recording_skipped(cls):
"""
Stop collecting OptionErrors recorded with the
record_skipped_option method and return them
"""
if cls._errors_recorded is None:
raise Exception('Cannot stop recording before it is started') # depends on [control=['if'], data=[]]
recorded = cls._errors_recorded[:]
cls._errors_recorded = None
return recorded
|
def tupled_argmax(a):
"""
Argmax that returns an index tuple. Note that `numpy.argmax` will return a
scalar index as if you had flattened the array.
Parameters
----------
a : array_like
Input array.
Returns
-------
index : tuple
Tuple of index, even if `a` is one-dimensional. Note that this can
immediately be used to index `a` as in ``a[index]``.
Examples
--------
>>> import numpy as np
>>> import deepdish as dd
>>> a = np.arange(6).reshape(2,3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> dd.tupled_argmax(a)
(1, 2)
"""
return np.unravel_index(np.argmax(a), np.shape(a))
|
def function[tupled_argmax, parameter[a]]:
constant[
Argmax that returns an index tuple. Note that `numpy.argmax` will return a
scalar index as if you had flattened the array.
Parameters
----------
a : array_like
Input array.
Returns
-------
index : tuple
Tuple of index, even if `a` is one-dimensional. Note that this can
immediately be used to index `a` as in ``a[index]``.
Examples
--------
>>> import numpy as np
>>> import deepdish as dd
>>> a = np.arange(6).reshape(2,3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> dd.tupled_argmax(a)
(1, 2)
]
return[call[name[np].unravel_index, parameter[call[name[np].argmax, parameter[name[a]]], call[name[np].shape, parameter[name[a]]]]]]
|
keyword[def] identifier[tupled_argmax] ( identifier[a] ):
literal[string]
keyword[return] identifier[np] . identifier[unravel_index] ( identifier[np] . identifier[argmax] ( identifier[a] ), identifier[np] . identifier[shape] ( identifier[a] ))
|
def tupled_argmax(a):
"""
Argmax that returns an index tuple. Note that `numpy.argmax` will return a
scalar index as if you had flattened the array.
Parameters
----------
a : array_like
Input array.
Returns
-------
index : tuple
Tuple of index, even if `a` is one-dimensional. Note that this can
immediately be used to index `a` as in ``a[index]``.
Examples
--------
>>> import numpy as np
>>> import deepdish as dd
>>> a = np.arange(6).reshape(2,3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> dd.tupled_argmax(a)
(1, 2)
"""
return np.unravel_index(np.argmax(a), np.shape(a))
|
def summarize_methdods_and_functions(api_modules, out_dir,
printlog=False, clean=True,
str_above_header=''):
"""Generates subpacke-level summary files.
Description
-----------
A function to generate subpacke-level summary markdown API files from
a module-level API documentation previously created via the
`generate_api_docs` function.
The output structure is:
package/package.subpackage.md
Parameters
----------
api_modules : str
Path to the API documentation crated via `generate_api_docs`
out_dir : str
Path to the desired output directory for the new markdown files.
clean : bool (default: False)
Removes previously existing API directory if True.
printlog : bool (default: True)
Prints a progress log to the standard output screen if True.
str_above_header : str (default: '')
Places a string just above the header.
"""
if printlog:
print('\n\nGenerating Subpackage Files\n%s\n' % (50 * '='))
if clean:
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
if printlog:
print('created %s' % out_dir)
subdir_paths = [os.path.join(api_modules, d)
for d in os.listdir(api_modules)
if not d.startswith('.')]
out_files = [os.path.join(out_dir, os.path.basename(d)) + '.md'
for d in subdir_paths]
for sub_p, out_f in zip(subdir_paths, out_files):
module_paths = (os.path.join(sub_p, m)
for m in os.listdir(sub_p)
if not m.startswith('.'))
new_output = []
if str_above_header:
new_output.append(str_above_header)
for p in module_paths:
with open(p, 'r') as r:
new_output.extend(r.readlines())
msg = ''
if not os.path.isfile(out_f):
msg = 'created'
if msg != 'created':
with open(out_f, 'r') as f:
prev = f.readlines()
if prev != new_output:
msg = 'updated'
else:
msg = 'skipped'
if msg != 'skipped':
with open(out_f, 'w') as f:
f.write(''.join(new_output))
if printlog:
print('%s %s' % (msg, out_f))
|
def function[summarize_methdods_and_functions, parameter[api_modules, out_dir, printlog, clean, str_above_header]]:
constant[Generates subpacke-level summary files.
Description
-----------
A function to generate subpacke-level summary markdown API files from
a module-level API documentation previously created via the
`generate_api_docs` function.
The output structure is:
package/package.subpackage.md
Parameters
----------
api_modules : str
Path to the API documentation crated via `generate_api_docs`
out_dir : str
Path to the desired output directory for the new markdown files.
clean : bool (default: False)
Removes previously existing API directory if True.
printlog : bool (default: True)
Prints a progress log to the standard output screen if True.
str_above_header : str (default: '')
Places a string just above the header.
]
if name[printlog] begin[:]
call[name[print], parameter[binary_operation[constant[
Generating Subpackage Files
%s
] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[50] * constant[=]]]]]
if name[clean] begin[:]
if call[name[os].path.isdir, parameter[name[out_dir]]] begin[:]
call[name[shutil].rmtree, parameter[name[out_dir]]]
if <ast.UnaryOp object at 0x7da1b0c32ad0> begin[:]
call[name[os].mkdir, parameter[name[out_dir]]]
if name[printlog] begin[:]
call[name[print], parameter[binary_operation[constant[created %s] <ast.Mod object at 0x7da2590d6920> name[out_dir]]]]
variable[subdir_paths] assign[=] <ast.ListComp object at 0x7da1b0c30e20>
variable[out_files] assign[=] <ast.ListComp object at 0x7da1b0c33010>
for taget[tuple[[<ast.Name object at 0x7da1b0c325c0>, <ast.Name object at 0x7da1b0c32530>]]] in starred[call[name[zip], parameter[name[subdir_paths], name[out_files]]]] begin[:]
variable[module_paths] assign[=] <ast.GeneratorExp object at 0x7da1b0c309a0>
variable[new_output] assign[=] list[[]]
if name[str_above_header] begin[:]
call[name[new_output].append, parameter[name[str_above_header]]]
for taget[name[p]] in starred[name[module_paths]] begin[:]
with call[name[open], parameter[name[p], constant[r]]] begin[:]
call[name[new_output].extend, parameter[call[name[r].readlines, parameter[]]]]
variable[msg] assign[=] constant[]
if <ast.UnaryOp object at 0x7da1b0c32fb0> begin[:]
variable[msg] assign[=] constant[created]
if compare[name[msg] not_equal[!=] constant[created]] begin[:]
with call[name[open], parameter[name[out_f], constant[r]]] begin[:]
variable[prev] assign[=] call[name[f].readlines, parameter[]]
if compare[name[prev] not_equal[!=] name[new_output]] begin[:]
variable[msg] assign[=] constant[updated]
if compare[name[msg] not_equal[!=] constant[skipped]] begin[:]
with call[name[open], parameter[name[out_f], constant[w]]] begin[:]
call[name[f].write, parameter[call[constant[].join, parameter[name[new_output]]]]]
if name[printlog] begin[:]
call[name[print], parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e2e0e0>, <ast.Name object at 0x7da1b0cb6290>]]]]]
|
keyword[def] identifier[summarize_methdods_and_functions] ( identifier[api_modules] , identifier[out_dir] ,
identifier[printlog] = keyword[False] , identifier[clean] = keyword[True] ,
identifier[str_above_header] = literal[string] ):
literal[string]
keyword[if] identifier[printlog] :
identifier[print] ( literal[string] %( literal[int] * literal[string] ))
keyword[if] identifier[clean] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[out_dir] ):
identifier[shutil] . identifier[rmtree] ( identifier[out_dir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[out_dir] ):
identifier[os] . identifier[mkdir] ( identifier[out_dir] )
keyword[if] identifier[printlog] :
identifier[print] ( literal[string] % identifier[out_dir] )
identifier[subdir_paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[api_modules] , identifier[d] )
keyword[for] identifier[d] keyword[in] identifier[os] . identifier[listdir] ( identifier[api_modules] )
keyword[if] keyword[not] identifier[d] . identifier[startswith] ( literal[string] )]
identifier[out_files] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[d] ))+ literal[string]
keyword[for] identifier[d] keyword[in] identifier[subdir_paths] ]
keyword[for] identifier[sub_p] , identifier[out_f] keyword[in] identifier[zip] ( identifier[subdir_paths] , identifier[out_files] ):
identifier[module_paths] =( identifier[os] . identifier[path] . identifier[join] ( identifier[sub_p] , identifier[m] )
keyword[for] identifier[m] keyword[in] identifier[os] . identifier[listdir] ( identifier[sub_p] )
keyword[if] keyword[not] identifier[m] . identifier[startswith] ( literal[string] ))
identifier[new_output] =[]
keyword[if] identifier[str_above_header] :
identifier[new_output] . identifier[append] ( identifier[str_above_header] )
keyword[for] identifier[p] keyword[in] identifier[module_paths] :
keyword[with] identifier[open] ( identifier[p] , literal[string] ) keyword[as] identifier[r] :
identifier[new_output] . identifier[extend] ( identifier[r] . identifier[readlines] ())
identifier[msg] = literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[out_f] ):
identifier[msg] = literal[string]
keyword[if] identifier[msg] != literal[string] :
keyword[with] identifier[open] ( identifier[out_f] , literal[string] ) keyword[as] identifier[f] :
identifier[prev] = identifier[f] . identifier[readlines] ()
keyword[if] identifier[prev] != identifier[new_output] :
identifier[msg] = literal[string]
keyword[else] :
identifier[msg] = literal[string]
keyword[if] identifier[msg] != literal[string] :
keyword[with] identifier[open] ( identifier[out_f] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[new_output] ))
keyword[if] identifier[printlog] :
identifier[print] ( literal[string] %( identifier[msg] , identifier[out_f] ))
|
def summarize_methdods_and_functions(api_modules, out_dir, printlog=False, clean=True, str_above_header=''):
"""Generates subpacke-level summary files.
Description
-----------
A function to generate subpacke-level summary markdown API files from
a module-level API documentation previously created via the
`generate_api_docs` function.
The output structure is:
package/package.subpackage.md
Parameters
----------
api_modules : str
Path to the API documentation crated via `generate_api_docs`
out_dir : str
Path to the desired output directory for the new markdown files.
clean : bool (default: False)
Removes previously existing API directory if True.
printlog : bool (default: True)
Prints a progress log to the standard output screen if True.
str_above_header : str (default: '')
Places a string just above the header.
"""
if printlog:
print('\n\nGenerating Subpackage Files\n%s\n' % (50 * '=')) # depends on [control=['if'], data=[]]
if clean:
if os.path.isdir(out_dir):
shutil.rmtree(out_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
if printlog:
print('created %s' % out_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
subdir_paths = [os.path.join(api_modules, d) for d in os.listdir(api_modules) if not d.startswith('.')]
out_files = [os.path.join(out_dir, os.path.basename(d)) + '.md' for d in subdir_paths]
for (sub_p, out_f) in zip(subdir_paths, out_files):
module_paths = (os.path.join(sub_p, m) for m in os.listdir(sub_p) if not m.startswith('.'))
new_output = []
if str_above_header:
new_output.append(str_above_header) # depends on [control=['if'], data=[]]
for p in module_paths:
with open(p, 'r') as r:
new_output.extend(r.readlines()) # depends on [control=['with'], data=['r']] # depends on [control=['for'], data=['p']]
msg = ''
if not os.path.isfile(out_f):
msg = 'created' # depends on [control=['if'], data=[]]
if msg != 'created':
with open(out_f, 'r') as f:
prev = f.readlines() # depends on [control=['with'], data=['f']]
if prev != new_output:
msg = 'updated' # depends on [control=['if'], data=[]]
else:
msg = 'skipped' # depends on [control=['if'], data=['msg']]
if msg != 'skipped':
with open(out_f, 'w') as f:
f.write(''.join(new_output)) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
if printlog:
print('%s %s' % (msg, out_f)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def list_teams(profile="github", ignore_cache=False):
'''
Lists all teams with the organization.
profile
The name of the profile configuration to use. Defaults to ``github``.
ignore_cache
Bypasses the use of cached teams.
CLI Example:
.. code-block:: bash
salt myminion github.list_teams
.. versionadded:: 2016.11.0
'''
key = 'github.{0}:teams'.format(
_get_config_value(profile, 'org_name')
)
if key not in __context__ or ignore_cache:
client = _get_client(profile)
organization = client.get_organization(
_get_config_value(profile, 'org_name')
)
teams_data = organization.get_teams()
teams = {}
for team in teams_data:
# Note that _rawData is used to access some properties here as they
# are not exposed in older versions of PyGithub. It's VERY important
# to use team._rawData instead of team.raw_data, as the latter forces
# an API call to retrieve team details again.
teams[team.name] = {
'id': team.id,
'slug': team.slug,
'description': team._rawData['description'],
'permission': team.permission,
'privacy': team._rawData['privacy']
}
__context__[key] = teams
return __context__[key]
|
def function[list_teams, parameter[profile, ignore_cache]]:
constant[
Lists all teams with the organization.
profile
The name of the profile configuration to use. Defaults to ``github``.
ignore_cache
Bypasses the use of cached teams.
CLI Example:
.. code-block:: bash
salt myminion github.list_teams
.. versionadded:: 2016.11.0
]
variable[key] assign[=] call[constant[github.{0}:teams].format, parameter[call[name[_get_config_value], parameter[name[profile], constant[org_name]]]]]
if <ast.BoolOp object at 0x7da1b1c5f010> begin[:]
variable[client] assign[=] call[name[_get_client], parameter[name[profile]]]
variable[organization] assign[=] call[name[client].get_organization, parameter[call[name[_get_config_value], parameter[name[profile], constant[org_name]]]]]
variable[teams_data] assign[=] call[name[organization].get_teams, parameter[]]
variable[teams] assign[=] dictionary[[], []]
for taget[name[team]] in starred[name[teams_data]] begin[:]
call[name[teams]][name[team].name] assign[=] dictionary[[<ast.Constant object at 0x7da1b21ad5a0>, <ast.Constant object at 0x7da1b21ace20>, <ast.Constant object at 0x7da1b21ac6a0>, <ast.Constant object at 0x7da1b21ad8d0>, <ast.Constant object at 0x7da1b21aeda0>], [<ast.Attribute object at 0x7da1b21e96f0>, <ast.Attribute object at 0x7da1b21e8d00>, <ast.Subscript object at 0x7da1b21ea440>, <ast.Attribute object at 0x7da1b21eb1c0>, <ast.Subscript object at 0x7da1b21ea6e0>]]
call[name[__context__]][name[key]] assign[=] name[teams]
return[call[name[__context__]][name[key]]]
|
keyword[def] identifier[list_teams] ( identifier[profile] = literal[string] , identifier[ignore_cache] = keyword[False] ):
literal[string]
identifier[key] = literal[string] . identifier[format] (
identifier[_get_config_value] ( identifier[profile] , literal[string] )
)
keyword[if] identifier[key] keyword[not] keyword[in] identifier[__context__] keyword[or] identifier[ignore_cache] :
identifier[client] = identifier[_get_client] ( identifier[profile] )
identifier[organization] = identifier[client] . identifier[get_organization] (
identifier[_get_config_value] ( identifier[profile] , literal[string] )
)
identifier[teams_data] = identifier[organization] . identifier[get_teams] ()
identifier[teams] ={}
keyword[for] identifier[team] keyword[in] identifier[teams_data] :
identifier[teams] [ identifier[team] . identifier[name] ]={
literal[string] : identifier[team] . identifier[id] ,
literal[string] : identifier[team] . identifier[slug] ,
literal[string] : identifier[team] . identifier[_rawData] [ literal[string] ],
literal[string] : identifier[team] . identifier[permission] ,
literal[string] : identifier[team] . identifier[_rawData] [ literal[string] ]
}
identifier[__context__] [ identifier[key] ]= identifier[teams]
keyword[return] identifier[__context__] [ identifier[key] ]
|
def list_teams(profile='github', ignore_cache=False):
"""
Lists all teams with the organization.
profile
The name of the profile configuration to use. Defaults to ``github``.
ignore_cache
Bypasses the use of cached teams.
CLI Example:
.. code-block:: bash
salt myminion github.list_teams
.. versionadded:: 2016.11.0
"""
key = 'github.{0}:teams'.format(_get_config_value(profile, 'org_name'))
if key not in __context__ or ignore_cache:
client = _get_client(profile)
organization = client.get_organization(_get_config_value(profile, 'org_name'))
teams_data = organization.get_teams()
teams = {}
for team in teams_data:
# Note that _rawData is used to access some properties here as they
# are not exposed in older versions of PyGithub. It's VERY important
# to use team._rawData instead of team.raw_data, as the latter forces
# an API call to retrieve team details again.
teams[team.name] = {'id': team.id, 'slug': team.slug, 'description': team._rawData['description'], 'permission': team.permission, 'privacy': team._rawData['privacy']} # depends on [control=['for'], data=['team']]
__context__[key] = teams # depends on [control=['if'], data=[]]
return __context__[key]
|
def format_index(prefix, timestamp, sep='-'):
"""
:type prefix str
:type timestamp int
:type sep str
:rtype: str
"""
tz_info = tz.tzutc()
# ex. logstash-other-2017.05.09
return "{prefix}{sep}{date}".format(
prefix=prefix, sep=sep, date=datetime.fromtimestamp(timestamp, tz=tz_info).strftime('%Y.%m.%d'))
|
def function[format_index, parameter[prefix, timestamp, sep]]:
constant[
:type prefix str
:type timestamp int
:type sep str
:rtype: str
]
variable[tz_info] assign[=] call[name[tz].tzutc, parameter[]]
return[call[constant[{prefix}{sep}{date}].format, parameter[]]]
|
keyword[def] identifier[format_index] ( identifier[prefix] , identifier[timestamp] , identifier[sep] = literal[string] ):
literal[string]
identifier[tz_info] = identifier[tz] . identifier[tzutc] ()
keyword[return] literal[string] . identifier[format] (
identifier[prefix] = identifier[prefix] , identifier[sep] = identifier[sep] , identifier[date] = identifier[datetime] . identifier[fromtimestamp] ( identifier[timestamp] , identifier[tz] = identifier[tz_info] ). identifier[strftime] ( literal[string] ))
|
def format_index(prefix, timestamp, sep='-'):
"""
:type prefix str
:type timestamp int
:type sep str
:rtype: str
"""
tz_info = tz.tzutc()
# ex. logstash-other-2017.05.09
return '{prefix}{sep}{date}'.format(prefix=prefix, sep=sep, date=datetime.fromtimestamp(timestamp, tz=tz_info).strftime('%Y.%m.%d'))
|
def time_segments_average(X, interval, time_column):
"""Compute average of values over fixed length time segments."""
warnings.warn(_TIME_SEGMENTS_AVERAGE_DEPRECATION_WARNING, DeprecationWarning)
if isinstance(X, np.ndarray):
X = pd.DataFrame(X)
X = X.sort_values(time_column).set_index(time_column)
start_ts = X.index.values[0]
max_ts = X.index.values[-1]
values = list()
index = list()
while start_ts <= max_ts:
end_ts = start_ts + interval
subset = X.loc[start_ts:end_ts - 1]
means = subset.mean(skipna=True).values
values.append(means)
index.append(start_ts)
start_ts = end_ts
return np.asarray(values), np.asarray(index)
|
def function[time_segments_average, parameter[X, interval, time_column]]:
constant[Compute average of values over fixed length time segments.]
call[name[warnings].warn, parameter[name[_TIME_SEGMENTS_AVERAGE_DEPRECATION_WARNING], name[DeprecationWarning]]]
if call[name[isinstance], parameter[name[X], name[np].ndarray]] begin[:]
variable[X] assign[=] call[name[pd].DataFrame, parameter[name[X]]]
variable[X] assign[=] call[call[name[X].sort_values, parameter[name[time_column]]].set_index, parameter[name[time_column]]]
variable[start_ts] assign[=] call[name[X].index.values][constant[0]]
variable[max_ts] assign[=] call[name[X].index.values][<ast.UnaryOp object at 0x7da207f030d0>]
variable[values] assign[=] call[name[list], parameter[]]
variable[index] assign[=] call[name[list], parameter[]]
while compare[name[start_ts] less_or_equal[<=] name[max_ts]] begin[:]
variable[end_ts] assign[=] binary_operation[name[start_ts] + name[interval]]
variable[subset] assign[=] call[name[X].loc][<ast.Slice object at 0x7da207f01f00>]
variable[means] assign[=] call[name[subset].mean, parameter[]].values
call[name[values].append, parameter[name[means]]]
call[name[index].append, parameter[name[start_ts]]]
variable[start_ts] assign[=] name[end_ts]
return[tuple[[<ast.Call object at 0x7da207f01cc0>, <ast.Call object at 0x7da207f01bd0>]]]
|
keyword[def] identifier[time_segments_average] ( identifier[X] , identifier[interval] , identifier[time_column] ):
literal[string]
identifier[warnings] . identifier[warn] ( identifier[_TIME_SEGMENTS_AVERAGE_DEPRECATION_WARNING] , identifier[DeprecationWarning] )
keyword[if] identifier[isinstance] ( identifier[X] , identifier[np] . identifier[ndarray] ):
identifier[X] = identifier[pd] . identifier[DataFrame] ( identifier[X] )
identifier[X] = identifier[X] . identifier[sort_values] ( identifier[time_column] ). identifier[set_index] ( identifier[time_column] )
identifier[start_ts] = identifier[X] . identifier[index] . identifier[values] [ literal[int] ]
identifier[max_ts] = identifier[X] . identifier[index] . identifier[values] [- literal[int] ]
identifier[values] = identifier[list] ()
identifier[index] = identifier[list] ()
keyword[while] identifier[start_ts] <= identifier[max_ts] :
identifier[end_ts] = identifier[start_ts] + identifier[interval]
identifier[subset] = identifier[X] . identifier[loc] [ identifier[start_ts] : identifier[end_ts] - literal[int] ]
identifier[means] = identifier[subset] . identifier[mean] ( identifier[skipna] = keyword[True] ). identifier[values]
identifier[values] . identifier[append] ( identifier[means] )
identifier[index] . identifier[append] ( identifier[start_ts] )
identifier[start_ts] = identifier[end_ts]
keyword[return] identifier[np] . identifier[asarray] ( identifier[values] ), identifier[np] . identifier[asarray] ( identifier[index] )
|
def time_segments_average(X, interval, time_column):
"""Compute average of values over fixed length time segments."""
warnings.warn(_TIME_SEGMENTS_AVERAGE_DEPRECATION_WARNING, DeprecationWarning)
if isinstance(X, np.ndarray):
X = pd.DataFrame(X) # depends on [control=['if'], data=[]]
X = X.sort_values(time_column).set_index(time_column)
start_ts = X.index.values[0]
max_ts = X.index.values[-1]
values = list()
index = list()
while start_ts <= max_ts:
end_ts = start_ts + interval
subset = X.loc[start_ts:end_ts - 1]
means = subset.mean(skipna=True).values
values.append(means)
index.append(start_ts)
start_ts = end_ts # depends on [control=['while'], data=['start_ts']]
return (np.asarray(values), np.asarray(index))
|
def read_mm_uic3(fd, byte_order, dtype, count):
"""Read MM_UIC3 tag from file and return as dictionary."""
t = numpy.fromfile(fd, byte_order+'I', 2*count)
return {'wavelengths': t[0::2] // t[1::2]}
|
def function[read_mm_uic3, parameter[fd, byte_order, dtype, count]]:
constant[Read MM_UIC3 tag from file and return as dictionary.]
variable[t] assign[=] call[name[numpy].fromfile, parameter[name[fd], binary_operation[name[byte_order] + constant[I]], binary_operation[constant[2] * name[count]]]]
return[dictionary[[<ast.Constant object at 0x7da18fe93c40>], [<ast.BinOp object at 0x7da18fe922c0>]]]
|
keyword[def] identifier[read_mm_uic3] ( identifier[fd] , identifier[byte_order] , identifier[dtype] , identifier[count] ):
literal[string]
identifier[t] = identifier[numpy] . identifier[fromfile] ( identifier[fd] , identifier[byte_order] + literal[string] , literal[int] * identifier[count] )
keyword[return] { literal[string] : identifier[t] [ literal[int] :: literal[int] ]// identifier[t] [ literal[int] :: literal[int] ]}
|
def read_mm_uic3(fd, byte_order, dtype, count):
"""Read MM_UIC3 tag from file and return as dictionary."""
t = numpy.fromfile(fd, byte_order + 'I', 2 * count)
return {'wavelengths': t[0::2] // t[1::2]}
|
def get_acquisition_options(self, item_id, installation_target, test_commerce=None, is_free_or_trial_install=None):
"""GetAcquisitionOptions.
[Preview API]
:param str item_id:
:param str installation_target:
:param bool test_commerce:
:param bool is_free_or_trial_install:
:rtype: :class:`<AcquisitionOptions> <azure.devops.v5_1.gallery.models.AcquisitionOptions>`
"""
route_values = {}
if item_id is not None:
route_values['itemId'] = self._serialize.url('item_id', item_id, 'str')
query_parameters = {}
if installation_target is not None:
query_parameters['installationTarget'] = self._serialize.query('installation_target', installation_target, 'str')
if test_commerce is not None:
query_parameters['testCommerce'] = self._serialize.query('test_commerce', test_commerce, 'bool')
if is_free_or_trial_install is not None:
query_parameters['isFreeOrTrialInstall'] = self._serialize.query('is_free_or_trial_install', is_free_or_trial_install, 'bool')
response = self._send(http_method='GET',
location_id='9d0a0105-075e-4760-aa15-8bcf54d1bd7d',
version='5.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('AcquisitionOptions', response)
|
def function[get_acquisition_options, parameter[self, item_id, installation_target, test_commerce, is_free_or_trial_install]]:
constant[GetAcquisitionOptions.
[Preview API]
:param str item_id:
:param str installation_target:
:param bool test_commerce:
:param bool is_free_or_trial_install:
:rtype: :class:`<AcquisitionOptions> <azure.devops.v5_1.gallery.models.AcquisitionOptions>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[item_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[itemId]] assign[=] call[name[self]._serialize.url, parameter[constant[item_id], name[item_id], constant[str]]]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[installation_target] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[installationTarget]] assign[=] call[name[self]._serialize.query, parameter[constant[installation_target], name[installation_target], constant[str]]]
if compare[name[test_commerce] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[testCommerce]] assign[=] call[name[self]._serialize.query, parameter[constant[test_commerce], name[test_commerce], constant[bool]]]
if compare[name[is_free_or_trial_install] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[isFreeOrTrialInstall]] assign[=] call[name[self]._serialize.query, parameter[constant[is_free_or_trial_install], name[is_free_or_trial_install], constant[bool]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[AcquisitionOptions], name[response]]]]
|
keyword[def] identifier[get_acquisition_options] ( identifier[self] , identifier[item_id] , identifier[installation_target] , identifier[test_commerce] = keyword[None] , identifier[is_free_or_trial_install] = keyword[None] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[item_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[item_id] , literal[string] )
identifier[query_parameters] ={}
keyword[if] identifier[installation_target] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[installation_target] , literal[string] )
keyword[if] identifier[test_commerce] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[test_commerce] , literal[string] )
keyword[if] identifier[is_free_or_trial_install] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[is_free_or_trial_install] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
|
def get_acquisition_options(self, item_id, installation_target, test_commerce=None, is_free_or_trial_install=None):
"""GetAcquisitionOptions.
[Preview API]
:param str item_id:
:param str installation_target:
:param bool test_commerce:
:param bool is_free_or_trial_install:
:rtype: :class:`<AcquisitionOptions> <azure.devops.v5_1.gallery.models.AcquisitionOptions>`
"""
route_values = {}
if item_id is not None:
route_values['itemId'] = self._serialize.url('item_id', item_id, 'str') # depends on [control=['if'], data=['item_id']]
query_parameters = {}
if installation_target is not None:
query_parameters['installationTarget'] = self._serialize.query('installation_target', installation_target, 'str') # depends on [control=['if'], data=['installation_target']]
if test_commerce is not None:
query_parameters['testCommerce'] = self._serialize.query('test_commerce', test_commerce, 'bool') # depends on [control=['if'], data=['test_commerce']]
if is_free_or_trial_install is not None:
query_parameters['isFreeOrTrialInstall'] = self._serialize.query('is_free_or_trial_install', is_free_or_trial_install, 'bool') # depends on [control=['if'], data=['is_free_or_trial_install']]
response = self._send(http_method='GET', location_id='9d0a0105-075e-4760-aa15-8bcf54d1bd7d', version='5.1-preview.1', route_values=route_values, query_parameters=query_parameters)
return self._deserialize('AcquisitionOptions', response)
|
def parse_public(data):
"""
Loads a public key from a DER or PEM-formatted file. Supports RSA, DSA and
EC public keys. For RSA keys, both the old RSAPublicKey and
SubjectPublicKeyInfo structures are supported. Also allows extracting a
public key from an X.509 certificate.
:param data:
A byte string to load the public key from
:raises:
ValueError - when the data does not appear to contain a public key
:return:
An asn1crypto.keys.PublicKeyInfo object
"""
if not isinstance(data, byte_cls):
raise TypeError(pretty_message(
'''
data must be a byte string, not %s
''',
type_name(data)
))
key_type = None
# Appears to be PEM formatted
if data[0:5] == b'-----':
key_type, algo, data = _unarmor_pem(data)
if key_type == 'private key':
raise ValueError(pretty_message(
'''
The data specified does not appear to be a public key or
certificate, but rather a private key
'''
))
# When a public key returning from _unarmor_pem has a known algorithm
# of RSA, that means the DER structure is of the type RSAPublicKey, so
# we need to wrap it in the PublicKeyInfo structure.
if algo == 'rsa':
return keys.PublicKeyInfo.wrap(data, 'rsa')
if key_type is None or key_type == 'public key':
try:
pki = keys.PublicKeyInfo.load(data)
# Call .native to fully parse since asn1crypto is lazy
pki.native
return pki
except (ValueError):
pass # Data was not PublicKeyInfo
try:
rpk = keys.RSAPublicKey.load(data)
# Call .native to fully parse since asn1crypto is lazy
rpk.native
return keys.PublicKeyInfo.wrap(rpk, 'rsa')
except (ValueError):
pass # Data was not an RSAPublicKey
if key_type is None or key_type == 'certificate':
try:
parsed_cert = x509.Certificate.load(data)
key_info = parsed_cert['tbs_certificate']['subject_public_key_info']
return key_info
except (ValueError):
pass # Data was not a cert
raise ValueError('The data specified does not appear to be a known public key or certificate format')
|
def function[parse_public, parameter[data]]:
constant[
Loads a public key from a DER or PEM-formatted file. Supports RSA, DSA and
EC public keys. For RSA keys, both the old RSAPublicKey and
SubjectPublicKeyInfo structures are supported. Also allows extracting a
public key from an X.509 certificate.
:param data:
A byte string to load the public key from
:raises:
ValueError - when the data does not appear to contain a public key
:return:
An asn1crypto.keys.PublicKeyInfo object
]
if <ast.UnaryOp object at 0x7da1b00ea890> begin[:]
<ast.Raise object at 0x7da1b00eb850>
variable[key_type] assign[=] constant[None]
if compare[call[name[data]][<ast.Slice object at 0x7da1b00ea560>] equal[==] constant[b'-----']] begin[:]
<ast.Tuple object at 0x7da1b00eabc0> assign[=] call[name[_unarmor_pem], parameter[name[data]]]
if compare[name[key_type] equal[==] constant[private key]] begin[:]
<ast.Raise object at 0x7da1b00dcb80>
if compare[name[algo] equal[==] constant[rsa]] begin[:]
return[call[name[keys].PublicKeyInfo.wrap, parameter[name[data], constant[rsa]]]]
if <ast.BoolOp object at 0x7da1b00de0b0> begin[:]
<ast.Try object at 0x7da1b00de380>
<ast.Try object at 0x7da1b00b0340>
if <ast.BoolOp object at 0x7da1b0089f60> begin[:]
<ast.Try object at 0x7da1b008a170>
<ast.Raise object at 0x7da1b0089450>
|
keyword[def] identifier[parse_public] ( identifier[data] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[byte_cls] ):
keyword[raise] identifier[TypeError] ( identifier[pretty_message] (
literal[string] ,
identifier[type_name] ( identifier[data] )
))
identifier[key_type] = keyword[None]
keyword[if] identifier[data] [ literal[int] : literal[int] ]== literal[string] :
identifier[key_type] , identifier[algo] , identifier[data] = identifier[_unarmor_pem] ( identifier[data] )
keyword[if] identifier[key_type] == literal[string] :
keyword[raise] identifier[ValueError] ( identifier[pretty_message] (
literal[string]
))
keyword[if] identifier[algo] == literal[string] :
keyword[return] identifier[keys] . identifier[PublicKeyInfo] . identifier[wrap] ( identifier[data] , literal[string] )
keyword[if] identifier[key_type] keyword[is] keyword[None] keyword[or] identifier[key_type] == literal[string] :
keyword[try] :
identifier[pki] = identifier[keys] . identifier[PublicKeyInfo] . identifier[load] ( identifier[data] )
identifier[pki] . identifier[native]
keyword[return] identifier[pki]
keyword[except] ( identifier[ValueError] ):
keyword[pass]
keyword[try] :
identifier[rpk] = identifier[keys] . identifier[RSAPublicKey] . identifier[load] ( identifier[data] )
identifier[rpk] . identifier[native]
keyword[return] identifier[keys] . identifier[PublicKeyInfo] . identifier[wrap] ( identifier[rpk] , literal[string] )
keyword[except] ( identifier[ValueError] ):
keyword[pass]
keyword[if] identifier[key_type] keyword[is] keyword[None] keyword[or] identifier[key_type] == literal[string] :
keyword[try] :
identifier[parsed_cert] = identifier[x509] . identifier[Certificate] . identifier[load] ( identifier[data] )
identifier[key_info] = identifier[parsed_cert] [ literal[string] ][ literal[string] ]
keyword[return] identifier[key_info]
keyword[except] ( identifier[ValueError] ):
keyword[pass]
keyword[raise] identifier[ValueError] ( literal[string] )
|
def parse_public(data):
"""
Loads a public key from a DER or PEM-formatted file. Supports RSA, DSA and
EC public keys. For RSA keys, both the old RSAPublicKey and
SubjectPublicKeyInfo structures are supported. Also allows extracting a
public key from an X.509 certificate.
:param data:
A byte string to load the public key from
:raises:
ValueError - when the data does not appear to contain a public key
:return:
An asn1crypto.keys.PublicKeyInfo object
"""
if not isinstance(data, byte_cls):
raise TypeError(pretty_message('\n data must be a byte string, not %s\n ', type_name(data))) # depends on [control=['if'], data=[]]
key_type = None
# Appears to be PEM formatted
if data[0:5] == b'-----':
(key_type, algo, data) = _unarmor_pem(data)
if key_type == 'private key':
raise ValueError(pretty_message('\n The data specified does not appear to be a public key or\n certificate, but rather a private key\n ')) # depends on [control=['if'], data=[]]
# When a public key returning from _unarmor_pem has a known algorithm
# of RSA, that means the DER structure is of the type RSAPublicKey, so
# we need to wrap it in the PublicKeyInfo structure.
if algo == 'rsa':
return keys.PublicKeyInfo.wrap(data, 'rsa') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if key_type is None or key_type == 'public key':
try:
pki = keys.PublicKeyInfo.load(data)
# Call .native to fully parse since asn1crypto is lazy
pki.native
return pki # depends on [control=['try'], data=[]]
except ValueError:
pass # Data was not PublicKeyInfo # depends on [control=['except'], data=[]]
try:
rpk = keys.RSAPublicKey.load(data)
# Call .native to fully parse since asn1crypto is lazy
rpk.native
return keys.PublicKeyInfo.wrap(rpk, 'rsa') # depends on [control=['try'], data=[]]
except ValueError:
pass # Data was not an RSAPublicKey # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if key_type is None or key_type == 'certificate':
try:
parsed_cert = x509.Certificate.load(data)
key_info = parsed_cert['tbs_certificate']['subject_public_key_info']
return key_info # depends on [control=['try'], data=[]]
except ValueError:
pass # Data was not a cert # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
raise ValueError('The data specified does not appear to be a known public key or certificate format')
|
def get_name(self):
"""Get the host name.
Try several attributes before returning UNNAMED*
:return: The name of the host
:rtype: str
"""
if not self.is_tpl():
try:
return self.host_name
except AttributeError: # outch, no hostname
return 'UNNAMEDHOST'
else:
try:
return self.name
except AttributeError: # outch, no name for this template
return 'UNNAMEDHOSTTEMPLATE'
|
def function[get_name, parameter[self]]:
constant[Get the host name.
Try several attributes before returning UNNAMED*
:return: The name of the host
:rtype: str
]
if <ast.UnaryOp object at 0x7da20c7c9960> begin[:]
<ast.Try object at 0x7da20c7cbac0>
|
keyword[def] identifier[get_name] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_tpl] ():
keyword[try] :
keyword[return] identifier[self] . identifier[host_name]
keyword[except] identifier[AttributeError] :
keyword[return] literal[string]
keyword[else] :
keyword[try] :
keyword[return] identifier[self] . identifier[name]
keyword[except] identifier[AttributeError] :
keyword[return] literal[string]
|
def get_name(self):
"""Get the host name.
Try several attributes before returning UNNAMED*
:return: The name of the host
:rtype: str
"""
if not self.is_tpl():
try:
return self.host_name # depends on [control=['try'], data=[]]
except AttributeError: # outch, no hostname
return 'UNNAMEDHOST' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
try:
return self.name # depends on [control=['try'], data=[]]
except AttributeError: # outch, no name for this template
return 'UNNAMEDHOSTTEMPLATE' # depends on [control=['except'], data=[]]
|
def connect(self, sinks):
"""! @brief Connect one or more downstream trace sinks.
@param self
@param sinks If this parameter is a single object, it will be added to the list of
downstream trace event sinks. If it is an iterable (list, tuple, etc.), then it will
completely replace the current list of trace event sinks.
"""
if isinstance(sinks, collections.Iterable):
self._sinks = sinks
elif sinks not in self._sinks:
self._sinks.append(sinks)
|
def function[connect, parameter[self, sinks]]:
constant[! @brief Connect one or more downstream trace sinks.
@param self
@param sinks If this parameter is a single object, it will be added to the list of
downstream trace event sinks. If it is an iterable (list, tuple, etc.), then it will
completely replace the current list of trace event sinks.
]
if call[name[isinstance], parameter[name[sinks], name[collections].Iterable]] begin[:]
name[self]._sinks assign[=] name[sinks]
|
keyword[def] identifier[connect] ( identifier[self] , identifier[sinks] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[sinks] , identifier[collections] . identifier[Iterable] ):
identifier[self] . identifier[_sinks] = identifier[sinks]
keyword[elif] identifier[sinks] keyword[not] keyword[in] identifier[self] . identifier[_sinks] :
identifier[self] . identifier[_sinks] . identifier[append] ( identifier[sinks] )
|
def connect(self, sinks):
"""! @brief Connect one or more downstream trace sinks.
@param self
@param sinks If this parameter is a single object, it will be added to the list of
downstream trace event sinks. If it is an iterable (list, tuple, etc.), then it will
completely replace the current list of trace event sinks.
"""
if isinstance(sinks, collections.Iterable):
self._sinks = sinks # depends on [control=['if'], data=[]]
elif sinks not in self._sinks:
self._sinks.append(sinks) # depends on [control=['if'], data=['sinks']]
|
def extra_downloader_converter(value):
"""Parses extra_{downloader,converter} arguments.
Parameters
----------
value : iterable or str
If the value is a string, it is split into a list using spaces
as delimiters. Otherwise, it is returned as is.
"""
if isinstance(value, six.string_types):
value = value.split(" ")
return value
|
def function[extra_downloader_converter, parameter[value]]:
constant[Parses extra_{downloader,converter} arguments.
Parameters
----------
value : iterable or str
If the value is a string, it is split into a list using spaces
as delimiters. Otherwise, it is returned as is.
]
if call[name[isinstance], parameter[name[value], name[six].string_types]] begin[:]
variable[value] assign[=] call[name[value].split, parameter[constant[ ]]]
return[name[value]]
|
keyword[def] identifier[extra_downloader_converter] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ):
identifier[value] = identifier[value] . identifier[split] ( literal[string] )
keyword[return] identifier[value]
|
def extra_downloader_converter(value):
"""Parses extra_{downloader,converter} arguments.
Parameters
----------
value : iterable or str
If the value is a string, it is split into a list using spaces
as delimiters. Otherwise, it is returned as is.
"""
if isinstance(value, six.string_types):
value = value.split(' ') # depends on [control=['if'], data=[]]
return value
|
def copy_abs(self):
""" Return a copy of self with the sign bit unset.
Unlike abs(self), this does not make use of the context: the result
has the same precision as the original.
"""
result = mpfr.Mpfr_t.__new__(BigFloat)
mpfr.mpfr_init2(result, self.precision)
mpfr.mpfr_setsign(result, self, False, ROUND_TIES_TO_EVEN)
return result
|
def function[copy_abs, parameter[self]]:
constant[ Return a copy of self with the sign bit unset.
Unlike abs(self), this does not make use of the context: the result
has the same precision as the original.
]
variable[result] assign[=] call[name[mpfr].Mpfr_t.__new__, parameter[name[BigFloat]]]
call[name[mpfr].mpfr_init2, parameter[name[result], name[self].precision]]
call[name[mpfr].mpfr_setsign, parameter[name[result], name[self], constant[False], name[ROUND_TIES_TO_EVEN]]]
return[name[result]]
|
keyword[def] identifier[copy_abs] ( identifier[self] ):
literal[string]
identifier[result] = identifier[mpfr] . identifier[Mpfr_t] . identifier[__new__] ( identifier[BigFloat] )
identifier[mpfr] . identifier[mpfr_init2] ( identifier[result] , identifier[self] . identifier[precision] )
identifier[mpfr] . identifier[mpfr_setsign] ( identifier[result] , identifier[self] , keyword[False] , identifier[ROUND_TIES_TO_EVEN] )
keyword[return] identifier[result]
|
def copy_abs(self):
""" Return a copy of self with the sign bit unset.
Unlike abs(self), this does not make use of the context: the result
has the same precision as the original.
"""
result = mpfr.Mpfr_t.__new__(BigFloat)
mpfr.mpfr_init2(result, self.precision)
mpfr.mpfr_setsign(result, self, False, ROUND_TIES_TO_EVEN)
return result
|
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist
for dist in self[req.key]:
if dist in req:
return dist
# try to download/install
return self.obtain(req, installer)
|
def function[best_match, parameter[self, req, working_set, installer]]:
constant[Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
]
variable[dist] assign[=] call[name[working_set].find, parameter[name[req]]]
if compare[name[dist] is_not constant[None]] begin[:]
return[name[dist]]
for taget[name[dist]] in starred[call[name[self]][name[req].key]] begin[:]
if compare[name[dist] in name[req]] begin[:]
return[name[dist]]
return[call[name[self].obtain, parameter[name[req], name[installer]]]]
|
keyword[def] identifier[best_match] ( identifier[self] , identifier[req] , identifier[working_set] , identifier[installer] = keyword[None] ):
literal[string]
identifier[dist] = identifier[working_set] . identifier[find] ( identifier[req] )
keyword[if] identifier[dist] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[dist]
keyword[for] identifier[dist] keyword[in] identifier[self] [ identifier[req] . identifier[key] ]:
keyword[if] identifier[dist] keyword[in] identifier[req] :
keyword[return] identifier[dist]
keyword[return] identifier[self] . identifier[obtain] ( identifier[req] , identifier[installer] )
|
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist # depends on [control=['if'], data=['dist']]
for dist in self[req.key]:
if dist in req:
return dist # depends on [control=['if'], data=['dist']] # depends on [control=['for'], data=['dist']]
# try to download/install
return self.obtain(req, installer)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.