code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _listen_inbox_messages(self):
"""Start listening to messages, using a separate thread."""
# Collect messages in a queue
inbox_queue = Queue(maxsize=self._n_jobs * 4)
threads = [] # type: List[BotQueueWorker]
try:
# Create n_jobs inbox threads
for i in range(self._n_jobs):
t = BotQueueWorker(name='InboxThread-t-{}'.format(i),
jobs=inbox_queue,
target=self._process_inbox_message)
t.start()
self._threads.append(t)
# Iterate over all messages in the messages stream
for message in self._reddit.inbox.stream():
# Check for stopping
if self._stop:
self._do_stop(inbox_queue, threads)
break
inbox_queue.put(message)
self.log.debug('Listen inbox stopped')
except Exception as e:
self._do_stop(inbox_queue, threads)
self.log.error('Exception while listening to inbox:')
self.log.error(str(e))
self.log.error('Waiting for 10 minutes and trying again.')
time.sleep(10 * 60)
# Retry:
self._listen_inbox_messages() | def function[_listen_inbox_messages, parameter[self]]:
constant[Start listening to messages, using a separate thread.]
variable[inbox_queue] assign[=] call[name[Queue], parameter[]]
variable[threads] assign[=] list[[]]
<ast.Try object at 0x7da1b1d21cf0> | keyword[def] identifier[_listen_inbox_messages] ( identifier[self] ):
literal[string]
identifier[inbox_queue] = identifier[Queue] ( identifier[maxsize] = identifier[self] . identifier[_n_jobs] * literal[int] )
identifier[threads] =[]
keyword[try] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_n_jobs] ):
identifier[t] = identifier[BotQueueWorker] ( identifier[name] = literal[string] . identifier[format] ( identifier[i] ),
identifier[jobs] = identifier[inbox_queue] ,
identifier[target] = identifier[self] . identifier[_process_inbox_message] )
identifier[t] . identifier[start] ()
identifier[self] . identifier[_threads] . identifier[append] ( identifier[t] )
keyword[for] identifier[message] keyword[in] identifier[self] . identifier[_reddit] . identifier[inbox] . identifier[stream] ():
keyword[if] identifier[self] . identifier[_stop] :
identifier[self] . identifier[_do_stop] ( identifier[inbox_queue] , identifier[threads] )
keyword[break]
identifier[inbox_queue] . identifier[put] ( identifier[message] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[_do_stop] ( identifier[inbox_queue] , identifier[threads] )
identifier[self] . identifier[log] . identifier[error] ( literal[string] )
identifier[self] . identifier[log] . identifier[error] ( identifier[str] ( identifier[e] ))
identifier[self] . identifier[log] . identifier[error] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] * literal[int] )
identifier[self] . identifier[_listen_inbox_messages] () | def _listen_inbox_messages(self):
"""Start listening to messages, using a separate thread."""
# Collect messages in a queue
inbox_queue = Queue(maxsize=self._n_jobs * 4)
threads = [] # type: List[BotQueueWorker]
try:
# Create n_jobs inbox threads
for i in range(self._n_jobs):
t = BotQueueWorker(name='InboxThread-t-{}'.format(i), jobs=inbox_queue, target=self._process_inbox_message)
t.start()
self._threads.append(t) # depends on [control=['for'], data=['i']]
# Iterate over all messages in the messages stream
for message in self._reddit.inbox.stream():
# Check for stopping
if self._stop:
self._do_stop(inbox_queue, threads)
break # depends on [control=['if'], data=[]]
inbox_queue.put(message) # depends on [control=['for'], data=['message']]
self.log.debug('Listen inbox stopped') # depends on [control=['try'], data=[]]
except Exception as e:
self._do_stop(inbox_queue, threads)
self.log.error('Exception while listening to inbox:')
self.log.error(str(e))
self.log.error('Waiting for 10 minutes and trying again.')
time.sleep(10 * 60)
# Retry:
self._listen_inbox_messages() # depends on [control=['except'], data=['e']] |
def getFileFormat(self, name, args):
""" Récupération du contenu d'un fichier via la configuration
et interprétation des variables données en argument """
# récupération du nom du fichier
template_pathname = self.get(name, "--")
if not os.path.isfile(template_pathname):
return False
# configuration
content = ""
with open(template_pathname) as fp:
# Create a text/plain message
content = fp.read().format(**args)
# retour ok
return content | def function[getFileFormat, parameter[self, name, args]]:
constant[ Récupération du contenu d'un fichier via la configuration
et interprétation des variables données en argument ]
variable[template_pathname] assign[=] call[name[self].get, parameter[name[name], constant[--]]]
if <ast.UnaryOp object at 0x7da20c7cb8b0> begin[:]
return[constant[False]]
variable[content] assign[=] constant[]
with call[name[open], parameter[name[template_pathname]]] begin[:]
variable[content] assign[=] call[call[name[fp].read, parameter[]].format, parameter[]]
return[name[content]] | keyword[def] identifier[getFileFormat] ( identifier[self] , identifier[name] , identifier[args] ):
literal[string]
identifier[template_pathname] = identifier[self] . identifier[get] ( identifier[name] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[template_pathname] ):
keyword[return] keyword[False]
identifier[content] = literal[string]
keyword[with] identifier[open] ( identifier[template_pathname] ) keyword[as] identifier[fp] :
identifier[content] = identifier[fp] . identifier[read] (). identifier[format] (** identifier[args] )
keyword[return] identifier[content] | def getFileFormat(self, name, args):
""" Récupération du contenu d'un fichier via la configuration
et interprétation des variables données en argument """ # récupération du nom du fichier
template_pathname = self.get(name, '--')
if not os.path.isfile(template_pathname):
return False # depends on [control=['if'], data=[]] # configuration
content = ''
with open(template_pathname) as fp: # Create a text/plain message
content = fp.read().format(**args) # depends on [control=['with'], data=['fp']] # retour ok
return content |
def counts_to_dicts(df, column):
"""
convert (values, counts) as returned by aggregate.aggregate_counts() to dicts
makes expand_counts much faster
"""
# index where there are counts and they aren't null
d = df[column].apply(lambda c: pd.notnull(c) and len(c[0]) > 0)
return df.loc[d, column].apply(lambda c: {k: v for k, v in zip(*c)}) | def function[counts_to_dicts, parameter[df, column]]:
constant[
convert (values, counts) as returned by aggregate.aggregate_counts() to dicts
makes expand_counts much faster
]
variable[d] assign[=] call[call[name[df]][name[column]].apply, parameter[<ast.Lambda object at 0x7da1b23514e0>]]
return[call[call[name[df].loc][tuple[[<ast.Name object at 0x7da1b2350f70>, <ast.Name object at 0x7da1b2351780>]]].apply, parameter[<ast.Lambda object at 0x7da1b2350c40>]]] | keyword[def] identifier[counts_to_dicts] ( identifier[df] , identifier[column] ):
literal[string]
identifier[d] = identifier[df] [ identifier[column] ]. identifier[apply] ( keyword[lambda] identifier[c] : identifier[pd] . identifier[notnull] ( identifier[c] ) keyword[and] identifier[len] ( identifier[c] [ literal[int] ])> literal[int] )
keyword[return] identifier[df] . identifier[loc] [ identifier[d] , identifier[column] ]. identifier[apply] ( keyword[lambda] identifier[c] :{ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[zip] (* identifier[c] )}) | def counts_to_dicts(df, column):
"""
convert (values, counts) as returned by aggregate.aggregate_counts() to dicts
makes expand_counts much faster
"""
# index where there are counts and they aren't null
d = df[column].apply(lambda c: pd.notnull(c) and len(c[0]) > 0)
return df.loc[d, column].apply(lambda c: {k: v for (k, v) in zip(*c)}) |
def prefixed(self, identifier, version):
"""
Whether or not the identifier will be prefixed.
Strings that require the prefix are generally not recommended.
"""
invalid_starting = ['Mn', 'Mc', 'Nd', 'Pc']
if identifier.startswith('_'): return True
return((identifier[0] in string.digits) if version==2
else (unicodedata.category(identifier[0]) in invalid_starting)) | def function[prefixed, parameter[self, identifier, version]]:
constant[
Whether or not the identifier will be prefixed.
Strings that require the prefix are generally not recommended.
]
variable[invalid_starting] assign[=] list[[<ast.Constant object at 0x7da18dc079a0>, <ast.Constant object at 0x7da18dc04f40>, <ast.Constant object at 0x7da18dc06620>, <ast.Constant object at 0x7da18dc07b50>]]
if call[name[identifier].startswith, parameter[constant[_]]] begin[:]
return[constant[True]]
return[<ast.IfExp object at 0x7da18dc05780>] | keyword[def] identifier[prefixed] ( identifier[self] , identifier[identifier] , identifier[version] ):
literal[string]
identifier[invalid_starting] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[identifier] . identifier[startswith] ( literal[string] ): keyword[return] keyword[True]
keyword[return] (( identifier[identifier] [ literal[int] ] keyword[in] identifier[string] . identifier[digits] ) keyword[if] identifier[version] == literal[int]
keyword[else] ( identifier[unicodedata] . identifier[category] ( identifier[identifier] [ literal[int] ]) keyword[in] identifier[invalid_starting] )) | def prefixed(self, identifier, version):
"""
Whether or not the identifier will be prefixed.
Strings that require the prefix are generally not recommended.
"""
invalid_starting = ['Mn', 'Mc', 'Nd', 'Pc']
if identifier.startswith('_'):
return True # depends on [control=['if'], data=[]]
return identifier[0] in string.digits if version == 2 else unicodedata.category(identifier[0]) in invalid_starting |
def show_grid(cells, data):
"""!
@brief Show CLIQUE blocks as a grid in data space.
@details Each block contains points and according to this density is displayed. CLIQUE grid helps to visualize
grid that was used for clustering process.
@param[in] cells (list): List of cells that is produced by CLIQUE algorithm.
@param[in] data (array_like): Input data that was used for clustering process.
"""
dimension = cells[0].dimensions
amount_canvases = 1
if dimension > 1:
amount_canvases = int(dimension * (dimension - 1) / 2)
figure = plt.figure()
grid_spec = gridspec.GridSpec(1, amount_canvases)
pairs = list(itertools.combinations(range(dimension), 2))
if len(pairs) == 0: pairs = [(0, 0)]
for index in range(amount_canvases):
ax = figure.add_subplot(grid_spec[index])
clique_visualizer.__draw_cells(ax, cells, pairs[index])
clique_visualizer.__draw_two_dimension_data(ax, data, pairs[index])
plt.show() | def function[show_grid, parameter[cells, data]]:
constant[!
@brief Show CLIQUE blocks as a grid in data space.
@details Each block contains points and according to this density is displayed. CLIQUE grid helps to visualize
grid that was used for clustering process.
@param[in] cells (list): List of cells that is produced by CLIQUE algorithm.
@param[in] data (array_like): Input data that was used for clustering process.
]
variable[dimension] assign[=] call[name[cells]][constant[0]].dimensions
variable[amount_canvases] assign[=] constant[1]
if compare[name[dimension] greater[>] constant[1]] begin[:]
variable[amount_canvases] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[dimension] * binary_operation[name[dimension] - constant[1]]] / constant[2]]]]
variable[figure] assign[=] call[name[plt].figure, parameter[]]
variable[grid_spec] assign[=] call[name[gridspec].GridSpec, parameter[constant[1], name[amount_canvases]]]
variable[pairs] assign[=] call[name[list], parameter[call[name[itertools].combinations, parameter[call[name[range], parameter[name[dimension]]], constant[2]]]]]
if compare[call[name[len], parameter[name[pairs]]] equal[==] constant[0]] begin[:]
variable[pairs] assign[=] list[[<ast.Tuple object at 0x7da1b013d7b0>]]
for taget[name[index]] in starred[call[name[range], parameter[name[amount_canvases]]]] begin[:]
variable[ax] assign[=] call[name[figure].add_subplot, parameter[call[name[grid_spec]][name[index]]]]
call[name[clique_visualizer].__draw_cells, parameter[name[ax], name[cells], call[name[pairs]][name[index]]]]
call[name[clique_visualizer].__draw_two_dimension_data, parameter[name[ax], name[data], call[name[pairs]][name[index]]]]
call[name[plt].show, parameter[]] | keyword[def] identifier[show_grid] ( identifier[cells] , identifier[data] ):
literal[string]
identifier[dimension] = identifier[cells] [ literal[int] ]. identifier[dimensions]
identifier[amount_canvases] = literal[int]
keyword[if] identifier[dimension] > literal[int] :
identifier[amount_canvases] = identifier[int] ( identifier[dimension] *( identifier[dimension] - literal[int] )/ literal[int] )
identifier[figure] = identifier[plt] . identifier[figure] ()
identifier[grid_spec] = identifier[gridspec] . identifier[GridSpec] ( literal[int] , identifier[amount_canvases] )
identifier[pairs] = identifier[list] ( identifier[itertools] . identifier[combinations] ( identifier[range] ( identifier[dimension] ), literal[int] ))
keyword[if] identifier[len] ( identifier[pairs] )== literal[int] : identifier[pairs] =[( literal[int] , literal[int] )]
keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[amount_canvases] ):
identifier[ax] = identifier[figure] . identifier[add_subplot] ( identifier[grid_spec] [ identifier[index] ])
identifier[clique_visualizer] . identifier[__draw_cells] ( identifier[ax] , identifier[cells] , identifier[pairs] [ identifier[index] ])
identifier[clique_visualizer] . identifier[__draw_two_dimension_data] ( identifier[ax] , identifier[data] , identifier[pairs] [ identifier[index] ])
identifier[plt] . identifier[show] () | def show_grid(cells, data):
"""!
@brief Show CLIQUE blocks as a grid in data space.
@details Each block contains points and according to this density is displayed. CLIQUE grid helps to visualize
grid that was used for clustering process.
@param[in] cells (list): List of cells that is produced by CLIQUE algorithm.
@param[in] data (array_like): Input data that was used for clustering process.
"""
dimension = cells[0].dimensions
amount_canvases = 1
if dimension > 1:
amount_canvases = int(dimension * (dimension - 1) / 2) # depends on [control=['if'], data=['dimension']]
figure = plt.figure()
grid_spec = gridspec.GridSpec(1, amount_canvases)
pairs = list(itertools.combinations(range(dimension), 2))
if len(pairs) == 0:
pairs = [(0, 0)] # depends on [control=['if'], data=[]]
for index in range(amount_canvases):
ax = figure.add_subplot(grid_spec[index])
clique_visualizer.__draw_cells(ax, cells, pairs[index])
clique_visualizer.__draw_two_dimension_data(ax, data, pairs[index]) # depends on [control=['for'], data=['index']]
plt.show() |
def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _findOne because findParents takes a different
# set of arguments.
r = None
l = self.findParents(name, attrs, 1)
if l:
r = l[0]
return r | def function[findParent, parameter[self, name, attrs]]:
constant[Returns the closest parent of this Tag that matches the given
criteria.]
variable[r] assign[=] constant[None]
variable[l] assign[=] call[name[self].findParents, parameter[name[name], name[attrs], constant[1]]]
if name[l] begin[:]
variable[r] assign[=] call[name[l]][constant[0]]
return[name[r]] | keyword[def] identifier[findParent] ( identifier[self] , identifier[name] = keyword[None] , identifier[attrs] ={},** identifier[kwargs] ):
literal[string]
identifier[r] = keyword[None]
identifier[l] = identifier[self] . identifier[findParents] ( identifier[name] , identifier[attrs] , literal[int] )
keyword[if] identifier[l] :
identifier[r] = identifier[l] [ literal[int] ]
keyword[return] identifier[r] | def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _findOne because findParents takes a different
# set of arguments.
r = None
l = self.findParents(name, attrs, 1)
if l:
r = l[0] # depends on [control=['if'], data=[]]
return r |
def autocut_params_changed_cb(self, paramObj, ac_obj):
"""This callback is called when the user changes the attributes of
an object via the paramSet.
"""
args, kwdargs = paramObj.get_params()
params = list(kwdargs.items())
self.t_.set(autocut_params=params) | def function[autocut_params_changed_cb, parameter[self, paramObj, ac_obj]]:
constant[This callback is called when the user changes the attributes of
an object via the paramSet.
]
<ast.Tuple object at 0x7da1b0d55cc0> assign[=] call[name[paramObj].get_params, parameter[]]
variable[params] assign[=] call[name[list], parameter[call[name[kwdargs].items, parameter[]]]]
call[name[self].t_.set, parameter[]] | keyword[def] identifier[autocut_params_changed_cb] ( identifier[self] , identifier[paramObj] , identifier[ac_obj] ):
literal[string]
identifier[args] , identifier[kwdargs] = identifier[paramObj] . identifier[get_params] ()
identifier[params] = identifier[list] ( identifier[kwdargs] . identifier[items] ())
identifier[self] . identifier[t_] . identifier[set] ( identifier[autocut_params] = identifier[params] ) | def autocut_params_changed_cb(self, paramObj, ac_obj):
"""This callback is called when the user changes the attributes of
an object via the paramSet.
"""
(args, kwdargs) = paramObj.get_params()
params = list(kwdargs.items())
self.t_.set(autocut_params=params) |
def center_widget_on_screen(widget, screen=None):
"""
Centers given Widget on the screen.
:param widget: Current Widget.
:type widget: QWidget
:param screen: Screen used for centering.
:type screen: int
:return: Definition success.
:rtype: bool
"""
screen = screen and screen or QApplication.desktop().primaryScreen()
desktop_width = QApplication.desktop().screenGeometry(screen).width()
desktop_height = QApplication.desktop().screenGeometry(screen).height()
widget.move(desktop_width / 2 - widget.sizeHint().width() / 2, desktop_height / 2 - widget.sizeHint().height() / 2)
return True | def function[center_widget_on_screen, parameter[widget, screen]]:
constant[
Centers given Widget on the screen.
:param widget: Current Widget.
:type widget: QWidget
:param screen: Screen used for centering.
:type screen: int
:return: Definition success.
:rtype: bool
]
variable[screen] assign[=] <ast.BoolOp object at 0x7da20c7cbc40>
variable[desktop_width] assign[=] call[call[call[name[QApplication].desktop, parameter[]].screenGeometry, parameter[name[screen]]].width, parameter[]]
variable[desktop_height] assign[=] call[call[call[name[QApplication].desktop, parameter[]].screenGeometry, parameter[name[screen]]].height, parameter[]]
call[name[widget].move, parameter[binary_operation[binary_operation[name[desktop_width] / constant[2]] - binary_operation[call[call[name[widget].sizeHint, parameter[]].width, parameter[]] / constant[2]]], binary_operation[binary_operation[name[desktop_height] / constant[2]] - binary_operation[call[call[name[widget].sizeHint, parameter[]].height, parameter[]] / constant[2]]]]]
return[constant[True]] | keyword[def] identifier[center_widget_on_screen] ( identifier[widget] , identifier[screen] = keyword[None] ):
literal[string]
identifier[screen] = identifier[screen] keyword[and] identifier[screen] keyword[or] identifier[QApplication] . identifier[desktop] (). identifier[primaryScreen] ()
identifier[desktop_width] = identifier[QApplication] . identifier[desktop] (). identifier[screenGeometry] ( identifier[screen] ). identifier[width] ()
identifier[desktop_height] = identifier[QApplication] . identifier[desktop] (). identifier[screenGeometry] ( identifier[screen] ). identifier[height] ()
identifier[widget] . identifier[move] ( identifier[desktop_width] / literal[int] - identifier[widget] . identifier[sizeHint] (). identifier[width] ()/ literal[int] , identifier[desktop_height] / literal[int] - identifier[widget] . identifier[sizeHint] (). identifier[height] ()/ literal[int] )
keyword[return] keyword[True] | def center_widget_on_screen(widget, screen=None):
"""
Centers given Widget on the screen.
:param widget: Current Widget.
:type widget: QWidget
:param screen: Screen used for centering.
:type screen: int
:return: Definition success.
:rtype: bool
"""
screen = screen and screen or QApplication.desktop().primaryScreen()
desktop_width = QApplication.desktop().screenGeometry(screen).width()
desktop_height = QApplication.desktop().screenGeometry(screen).height()
widget.move(desktop_width / 2 - widget.sizeHint().width() / 2, desktop_height / 2 - widget.sizeHint().height() / 2)
return True |
def create_inputhook_qt5(mgr, app=None):
"""Create an input hook for running the Qt5 application event loop.
Parameters
----------
mgr : an InputHookManager
app : Qt Application, optional.
Running application to use. If not given, we probe Qt for an
existing application object, and create a new one if none is found.
Returns
-------
A pair consisting of a Qt Application (either the one given or the
one found or created) and a inputhook.
Notes
-----
We use a custom input hook instead of PyQt5's default one, as it
interacts better with the readline packages (issue #481).
The inputhook function works in tandem with a 'pre_prompt_hook'
which automatically restores the hook as an inputhook in case the
latter has been temporarily disabled after having intercepted a
KeyboardInterrupt.
"""
if app is None:
app = QtCore.QCoreApplication.instance()
if app is None:
from PyQt5 import QtWidgets
app = QtWidgets.QApplication([" "])
# Re-use previously created inputhook if any
ip = InteractiveShell.instance()
if hasattr(ip, '_inputhook_qt5'):
return app, ip._inputhook_qt5
# Otherwise create the inputhook_qt5/preprompthook_qt5 pair of
# hooks (they both share the got_kbdint flag)
def inputhook_qt5():
"""PyOS_InputHook python hook for Qt5.
Process pending Qt events and if there's no pending keyboard
input, spend a short slice of time (50ms) running the Qt event
loop.
As a Python ctypes callback can't raise an exception, we catch
the KeyboardInterrupt and temporarily deactivate the hook,
which will let a *second* CTRL+C be processed normally and go
back to a clean prompt line.
"""
try:
allow_CTRL_C()
app = QtCore.QCoreApplication.instance()
if not app: # shouldn't happen, but safer if it happens anyway...
return 0
app.processEvents(QtCore.QEventLoop.AllEvents, 300)
if not stdin_ready():
# Generally a program would run QCoreApplication::exec()
# from main() to enter and process the Qt event loop until
# quit() or exit() is called and the program terminates.
#
# For our input hook integration, we need to repeatedly
# enter and process the Qt event loop for only a short
# amount of time (say 50ms) to ensure that Python stays
# responsive to other user inputs.
#
# A naive approach would be to repeatedly call
# QCoreApplication::exec(), using a timer to quit after a
# short amount of time. Unfortunately, QCoreApplication
# emits an aboutToQuit signal before stopping, which has
# the undesirable effect of closing all modal windows.
#
# To work around this problem, we instead create a
# QEventLoop and call QEventLoop::exec(). Other than
# setting some state variables which do not seem to be
# used anywhere, the only thing QCoreApplication adds is
# the aboutToQuit signal which is precisely what we are
# trying to avoid.
timer = QtCore.QTimer()
event_loop = QtCore.QEventLoop()
timer.timeout.connect(event_loop.quit)
while not stdin_ready():
timer.start(50)
event_loop.exec_()
timer.stop()
except KeyboardInterrupt:
global got_kbdint, sigint_timer
ignore_CTRL_C()
got_kbdint = True
mgr.clear_inputhook()
# This generates a second SIGINT so the user doesn't have to
# press CTRL+C twice to get a clean prompt.
#
# Since we can't catch the resulting KeyboardInterrupt here
# (because this is a ctypes callback), we use a timer to
# generate the SIGINT after we leave this callback.
#
# Unfortunately this doesn't work on Windows (SIGINT kills
# Python and CTRL_C_EVENT doesn't work).
if(os.name == 'posix'):
pid = os.getpid()
if(not sigint_timer):
sigint_timer = threading.Timer(.01, os.kill,
args=[pid, signal.SIGINT] )
sigint_timer.start()
else:
print("\nKeyboardInterrupt - Ctrl-C again for new prompt")
except: # NO exceptions are allowed to escape from a ctypes callback
ignore_CTRL_C()
from traceback import print_exc
print_exc()
print("Got exception from inputhook_qt5, unregistering.")
mgr.clear_inputhook()
finally:
allow_CTRL_C()
return 0
def preprompthook_qt5(ishell):
"""'pre_prompt_hook' used to restore the Qt5 input hook
(in case the latter was temporarily deactivated after a
CTRL+C)
"""
global got_kbdint, sigint_timer
if(sigint_timer):
sigint_timer.cancel()
sigint_timer = None
if got_kbdint:
mgr.set_inputhook(inputhook_qt5)
got_kbdint = False
ip._inputhook_qt5 = inputhook_qt5
ip.set_hook('pre_prompt_hook', preprompthook_qt5)
return app, inputhook_qt5 | def function[create_inputhook_qt5, parameter[mgr, app]]:
constant[Create an input hook for running the Qt5 application event loop.
Parameters
----------
mgr : an InputHookManager
app : Qt Application, optional.
Running application to use. If not given, we probe Qt for an
existing application object, and create a new one if none is found.
Returns
-------
A pair consisting of a Qt Application (either the one given or the
one found or created) and a inputhook.
Notes
-----
We use a custom input hook instead of PyQt5's default one, as it
interacts better with the readline packages (issue #481).
The inputhook function works in tandem with a 'pre_prompt_hook'
which automatically restores the hook as an inputhook in case the
latter has been temporarily disabled after having intercepted a
KeyboardInterrupt.
]
if compare[name[app] is constant[None]] begin[:]
variable[app] assign[=] call[name[QtCore].QCoreApplication.instance, parameter[]]
if compare[name[app] is constant[None]] begin[:]
from relative_module[PyQt5] import module[QtWidgets]
variable[app] assign[=] call[name[QtWidgets].QApplication, parameter[list[[<ast.Constant object at 0x7da18fe908b0>]]]]
variable[ip] assign[=] call[name[InteractiveShell].instance, parameter[]]
if call[name[hasattr], parameter[name[ip], constant[_inputhook_qt5]]] begin[:]
return[tuple[[<ast.Name object at 0x7da2046200d0>, <ast.Attribute object at 0x7da204621f90>]]]
def function[inputhook_qt5, parameter[]]:
constant[PyOS_InputHook python hook for Qt5.
Process pending Qt events and if there's no pending keyboard
input, spend a short slice of time (50ms) running the Qt event
loop.
As a Python ctypes callback can't raise an exception, we catch
the KeyboardInterrupt and temporarily deactivate the hook,
which will let a *second* CTRL+C be processed normally and go
back to a clean prompt line.
]
<ast.Try object at 0x7da2046227a0>
return[constant[0]]
def function[preprompthook_qt5, parameter[ishell]]:
constant['pre_prompt_hook' used to restore the Qt5 input hook
(in case the latter was temporarily deactivated after a
CTRL+C)
]
<ast.Global object at 0x7da18fe92230>
if name[sigint_timer] begin[:]
call[name[sigint_timer].cancel, parameter[]]
variable[sigint_timer] assign[=] constant[None]
if name[got_kbdint] begin[:]
call[name[mgr].set_inputhook, parameter[name[inputhook_qt5]]]
variable[got_kbdint] assign[=] constant[False]
name[ip]._inputhook_qt5 assign[=] name[inputhook_qt5]
call[name[ip].set_hook, parameter[constant[pre_prompt_hook], name[preprompthook_qt5]]]
return[tuple[[<ast.Name object at 0x7da20c6c4610>, <ast.Name object at 0x7da20c6c6890>]]] | keyword[def] identifier[create_inputhook_qt5] ( identifier[mgr] , identifier[app] = keyword[None] ):
literal[string]
keyword[if] identifier[app] keyword[is] keyword[None] :
identifier[app] = identifier[QtCore] . identifier[QCoreApplication] . identifier[instance] ()
keyword[if] identifier[app] keyword[is] keyword[None] :
keyword[from] identifier[PyQt5] keyword[import] identifier[QtWidgets]
identifier[app] = identifier[QtWidgets] . identifier[QApplication] ([ literal[string] ])
identifier[ip] = identifier[InteractiveShell] . identifier[instance] ()
keyword[if] identifier[hasattr] ( identifier[ip] , literal[string] ):
keyword[return] identifier[app] , identifier[ip] . identifier[_inputhook_qt5]
keyword[def] identifier[inputhook_qt5] ():
literal[string]
keyword[try] :
identifier[allow_CTRL_C] ()
identifier[app] = identifier[QtCore] . identifier[QCoreApplication] . identifier[instance] ()
keyword[if] keyword[not] identifier[app] :
keyword[return] literal[int]
identifier[app] . identifier[processEvents] ( identifier[QtCore] . identifier[QEventLoop] . identifier[AllEvents] , literal[int] )
keyword[if] keyword[not] identifier[stdin_ready] ():
identifier[timer] = identifier[QtCore] . identifier[QTimer] ()
identifier[event_loop] = identifier[QtCore] . identifier[QEventLoop] ()
identifier[timer] . identifier[timeout] . identifier[connect] ( identifier[event_loop] . identifier[quit] )
keyword[while] keyword[not] identifier[stdin_ready] ():
identifier[timer] . identifier[start] ( literal[int] )
identifier[event_loop] . identifier[exec_] ()
identifier[timer] . identifier[stop] ()
keyword[except] identifier[KeyboardInterrupt] :
keyword[global] identifier[got_kbdint] , identifier[sigint_timer]
identifier[ignore_CTRL_C] ()
identifier[got_kbdint] = keyword[True]
identifier[mgr] . identifier[clear_inputhook] ()
keyword[if] ( identifier[os] . identifier[name] == literal[string] ):
identifier[pid] = identifier[os] . identifier[getpid] ()
keyword[if] ( keyword[not] identifier[sigint_timer] ):
identifier[sigint_timer] = identifier[threading] . identifier[Timer] ( literal[int] , identifier[os] . identifier[kill] ,
identifier[args] =[ identifier[pid] , identifier[signal] . identifier[SIGINT] ])
identifier[sigint_timer] . identifier[start] ()
keyword[else] :
identifier[print] ( literal[string] )
keyword[except] :
identifier[ignore_CTRL_C] ()
keyword[from] identifier[traceback] keyword[import] identifier[print_exc]
identifier[print_exc] ()
identifier[print] ( literal[string] )
identifier[mgr] . identifier[clear_inputhook] ()
keyword[finally] :
identifier[allow_CTRL_C] ()
keyword[return] literal[int]
keyword[def] identifier[preprompthook_qt5] ( identifier[ishell] ):
literal[string]
keyword[global] identifier[got_kbdint] , identifier[sigint_timer]
keyword[if] ( identifier[sigint_timer] ):
identifier[sigint_timer] . identifier[cancel] ()
identifier[sigint_timer] = keyword[None]
keyword[if] identifier[got_kbdint] :
identifier[mgr] . identifier[set_inputhook] ( identifier[inputhook_qt5] )
identifier[got_kbdint] = keyword[False]
identifier[ip] . identifier[_inputhook_qt5] = identifier[inputhook_qt5]
identifier[ip] . identifier[set_hook] ( literal[string] , identifier[preprompthook_qt5] )
keyword[return] identifier[app] , identifier[inputhook_qt5] | def create_inputhook_qt5(mgr, app=None):
"""Create an input hook for running the Qt5 application event loop.
Parameters
----------
mgr : an InputHookManager
app : Qt Application, optional.
Running application to use. If not given, we probe Qt for an
existing application object, and create a new one if none is found.
Returns
-------
A pair consisting of a Qt Application (either the one given or the
one found or created) and a inputhook.
Notes
-----
We use a custom input hook instead of PyQt5's default one, as it
interacts better with the readline packages (issue #481).
The inputhook function works in tandem with a 'pre_prompt_hook'
which automatically restores the hook as an inputhook in case the
latter has been temporarily disabled after having intercepted a
KeyboardInterrupt.
"""
if app is None:
app = QtCore.QCoreApplication.instance()
if app is None:
from PyQt5 import QtWidgets
app = QtWidgets.QApplication([' ']) # depends on [control=['if'], data=['app']] # depends on [control=['if'], data=['app']]
# Re-use previously created inputhook if any
ip = InteractiveShell.instance()
if hasattr(ip, '_inputhook_qt5'):
return (app, ip._inputhook_qt5) # depends on [control=['if'], data=[]]
# Otherwise create the inputhook_qt5/preprompthook_qt5 pair of
# hooks (they both share the got_kbdint flag)
def inputhook_qt5():
"""PyOS_InputHook python hook for Qt5.
Process pending Qt events and if there's no pending keyboard
input, spend a short slice of time (50ms) running the Qt event
loop.
As a Python ctypes callback can't raise an exception, we catch
the KeyboardInterrupt and temporarily deactivate the hook,
which will let a *second* CTRL+C be processed normally and go
back to a clean prompt line.
"""
try:
allow_CTRL_C()
app = QtCore.QCoreApplication.instance()
if not app: # shouldn't happen, but safer if it happens anyway...
return 0 # depends on [control=['if'], data=[]]
app.processEvents(QtCore.QEventLoop.AllEvents, 300)
if not stdin_ready():
# Generally a program would run QCoreApplication::exec()
# from main() to enter and process the Qt event loop until
# quit() or exit() is called and the program terminates.
#
# For our input hook integration, we need to repeatedly
# enter and process the Qt event loop for only a short
# amount of time (say 50ms) to ensure that Python stays
# responsive to other user inputs.
#
# A naive approach would be to repeatedly call
# QCoreApplication::exec(), using a timer to quit after a
# short amount of time. Unfortunately, QCoreApplication
# emits an aboutToQuit signal before stopping, which has
# the undesirable effect of closing all modal windows.
#
# To work around this problem, we instead create a
# QEventLoop and call QEventLoop::exec(). Other than
# setting some state variables which do not seem to be
# used anywhere, the only thing QCoreApplication adds is
# the aboutToQuit signal which is precisely what we are
# trying to avoid.
timer = QtCore.QTimer()
event_loop = QtCore.QEventLoop()
timer.timeout.connect(event_loop.quit)
while not stdin_ready():
timer.start(50)
event_loop.exec_()
timer.stop() # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
global got_kbdint, sigint_timer
ignore_CTRL_C()
got_kbdint = True
mgr.clear_inputhook()
# This generates a second SIGINT so the user doesn't have to
# press CTRL+C twice to get a clean prompt.
#
# Since we can't catch the resulting KeyboardInterrupt here
# (because this is a ctypes callback), we use a timer to
# generate the SIGINT after we leave this callback.
#
# Unfortunately this doesn't work on Windows (SIGINT kills
# Python and CTRL_C_EVENT doesn't work).
if os.name == 'posix':
pid = os.getpid()
if not sigint_timer:
sigint_timer = threading.Timer(0.01, os.kill, args=[pid, signal.SIGINT])
sigint_timer.start() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('\nKeyboardInterrupt - Ctrl-C again for new prompt') # depends on [control=['except'], data=[]]
except: # NO exceptions are allowed to escape from a ctypes callback
ignore_CTRL_C()
from traceback import print_exc
print_exc()
print('Got exception from inputhook_qt5, unregistering.')
mgr.clear_inputhook() # depends on [control=['except'], data=[]]
finally:
allow_CTRL_C()
return 0
def preprompthook_qt5(ishell):
"""'pre_prompt_hook' used to restore the Qt5 input hook
(in case the latter was temporarily deactivated after a
CTRL+C)
"""
global got_kbdint, sigint_timer
if sigint_timer:
sigint_timer.cancel()
sigint_timer = None # depends on [control=['if'], data=[]]
if got_kbdint:
mgr.set_inputhook(inputhook_qt5) # depends on [control=['if'], data=[]]
got_kbdint = False
ip._inputhook_qt5 = inputhook_qt5
ip.set_hook('pre_prompt_hook', preprompthook_qt5)
return (app, inputhook_qt5) |
def setup(app):
"""Initialize Sphinx extension."""
app.setup_extension('nbsphinx')
app.add_source_suffix('.nblink', 'linked_jupyter_notebook')
app.add_source_parser(LinkedNotebookParser)
app.add_config_value('nbsphinx_link_target_root', None, rebuild='env')
return {'version': __version__, 'parallel_read_safe': True} | def function[setup, parameter[app]]:
constant[Initialize Sphinx extension.]
call[name[app].setup_extension, parameter[constant[nbsphinx]]]
call[name[app].add_source_suffix, parameter[constant[.nblink], constant[linked_jupyter_notebook]]]
call[name[app].add_source_parser, parameter[name[LinkedNotebookParser]]]
call[name[app].add_config_value, parameter[constant[nbsphinx_link_target_root], constant[None]]]
return[dictionary[[<ast.Constant object at 0x7da1b0ff1630>, <ast.Constant object at 0x7da2101f44f0>], [<ast.Name object at 0x7da2101f4f70>, <ast.Constant object at 0x7da2101f4dc0>]]] | keyword[def] identifier[setup] ( identifier[app] ):
literal[string]
identifier[app] . identifier[setup_extension] ( literal[string] )
identifier[app] . identifier[add_source_suffix] ( literal[string] , literal[string] )
identifier[app] . identifier[add_source_parser] ( identifier[LinkedNotebookParser] )
identifier[app] . identifier[add_config_value] ( literal[string] , keyword[None] , identifier[rebuild] = literal[string] )
keyword[return] { literal[string] : identifier[__version__] , literal[string] : keyword[True] } | def setup(app):
"""Initialize Sphinx extension."""
app.setup_extension('nbsphinx')
app.add_source_suffix('.nblink', 'linked_jupyter_notebook')
app.add_source_parser(LinkedNotebookParser)
app.add_config_value('nbsphinx_link_target_root', None, rebuild='env')
return {'version': __version__, 'parallel_read_safe': True} |
def get_collection(cls, request_args):
r"""
Used to fetch a collection of resource object of type 'cls' in response to a GET request\
. get_resource_or_collection should only be invoked on a resource when the client specifies a GET request.
:param request_args: The query parameters supplied with the request. currently supports page[offset], and \
page[limit]. Pagination only applies to collection requests. See http://jsonapi.org/format/#fetching-pagination.
:return: An HTTP response object in accordance with the specification at \
http://jsonapi.org/format/#fetching-resources
"""
try:
if request_args.get('include'):
raise ParameterNotSupported
offset = request_args.get('page[offset]', 0)
limit = request_args.get('page[limit]', 20)
query = "MATCH (n) WHERE n:{label} AND n.active RETURN n ORDER BY n.id SKIP {offset} LIMIT {limit}".format(
label=cls.__name__,
offset=offset,
limit=limit)
results, meta = db.cypher_query(query)
data = dict()
data['data'] = list()
data['links'] = dict()
data['links']['self'] = "{class_link}?page[offset]={offset}&page[limit]={limit}".format(
class_link=cls.get_class_link(),
offset=offset,
limit=limit
)
data['links']['first'] = "{class_link}?page[offset]={offset}&page[limit]={limit}".format(
class_link=cls.get_class_link(),
offset=0,
limit=limit
)
if int(offset) - int(limit) > 0:
data['links']['prev'] = "{class_link}?page[offset]={offset}&page[limit]={limit}".format(
class_link=cls.get_class_link(),
offset=int(offset)-int(limit),
limit=limit
)
if len(cls.nodes) > int(offset) + int(limit):
data['links']['next'] = "{class_link}?page[offset]={offset}&page[limit]={limit}".format(
class_link=cls.get_class_link(),
offset=int(offset)+int(limit),
limit=limit
)
data['links']['last'] = "{class_link}?page[offset]={offset}&page[limit]={limit}".format(
class_link=cls.get_class_link(),
offset=len(cls.nodes.filter(active=True)) - (len(cls.nodes.filter(active=True)) % int(limit))-1,
limit=limit
)
list_of_nodes = [cls.inflate(row[0]) for row in results]
for this_node in list_of_nodes:
data['data'].append(this_node.get_resource_object())
r = make_response(jsonify(data))
r.status_code = http_error_codes.OK
r.headers['Content-Type'] = CONTENT_TYPE
return r
except ParameterNotSupported:
return application_codes.error_response([application_codes.PARAMETER_NOT_SUPPORTED_VIOLATION]) | def function[get_collection, parameter[cls, request_args]]:
constant[
Used to fetch a collection of resource object of type 'cls' in response to a GET request\
. get_resource_or_collection should only be invoked on a resource when the client specifies a GET request.
:param request_args: The query parameters supplied with the request. currently supports page[offset], and \
page[limit]. Pagination only applies to collection requests. See http://jsonapi.org/format/#fetching-pagination.
:return: An HTTP response object in accordance with the specification at \
http://jsonapi.org/format/#fetching-resources
]
<ast.Try object at 0x7da1b094d6f0> | keyword[def] identifier[get_collection] ( identifier[cls] , identifier[request_args] ):
literal[string]
keyword[try] :
keyword[if] identifier[request_args] . identifier[get] ( literal[string] ):
keyword[raise] identifier[ParameterNotSupported]
identifier[offset] = identifier[request_args] . identifier[get] ( literal[string] , literal[int] )
identifier[limit] = identifier[request_args] . identifier[get] ( literal[string] , literal[int] )
identifier[query] = literal[string] . identifier[format] (
identifier[label] = identifier[cls] . identifier[__name__] ,
identifier[offset] = identifier[offset] ,
identifier[limit] = identifier[limit] )
identifier[results] , identifier[meta] = identifier[db] . identifier[cypher_query] ( identifier[query] )
identifier[data] = identifier[dict] ()
identifier[data] [ literal[string] ]= identifier[list] ()
identifier[data] [ literal[string] ]= identifier[dict] ()
identifier[data] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] (
identifier[class_link] = identifier[cls] . identifier[get_class_link] (),
identifier[offset] = identifier[offset] ,
identifier[limit] = identifier[limit]
)
identifier[data] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] (
identifier[class_link] = identifier[cls] . identifier[get_class_link] (),
identifier[offset] = literal[int] ,
identifier[limit] = identifier[limit]
)
keyword[if] identifier[int] ( identifier[offset] )- identifier[int] ( identifier[limit] )> literal[int] :
identifier[data] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] (
identifier[class_link] = identifier[cls] . identifier[get_class_link] (),
identifier[offset] = identifier[int] ( identifier[offset] )- identifier[int] ( identifier[limit] ),
identifier[limit] = identifier[limit]
)
keyword[if] identifier[len] ( identifier[cls] . identifier[nodes] )> identifier[int] ( identifier[offset] )+ identifier[int] ( identifier[limit] ):
identifier[data] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] (
identifier[class_link] = identifier[cls] . identifier[get_class_link] (),
identifier[offset] = identifier[int] ( identifier[offset] )+ identifier[int] ( identifier[limit] ),
identifier[limit] = identifier[limit]
)
identifier[data] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] (
identifier[class_link] = identifier[cls] . identifier[get_class_link] (),
identifier[offset] = identifier[len] ( identifier[cls] . identifier[nodes] . identifier[filter] ( identifier[active] = keyword[True] ))-( identifier[len] ( identifier[cls] . identifier[nodes] . identifier[filter] ( identifier[active] = keyword[True] ))% identifier[int] ( identifier[limit] ))- literal[int] ,
identifier[limit] = identifier[limit]
)
identifier[list_of_nodes] =[ identifier[cls] . identifier[inflate] ( identifier[row] [ literal[int] ]) keyword[for] identifier[row] keyword[in] identifier[results] ]
keyword[for] identifier[this_node] keyword[in] identifier[list_of_nodes] :
identifier[data] [ literal[string] ]. identifier[append] ( identifier[this_node] . identifier[get_resource_object] ())
identifier[r] = identifier[make_response] ( identifier[jsonify] ( identifier[data] ))
identifier[r] . identifier[status_code] = identifier[http_error_codes] . identifier[OK]
identifier[r] . identifier[headers] [ literal[string] ]= identifier[CONTENT_TYPE]
keyword[return] identifier[r]
keyword[except] identifier[ParameterNotSupported] :
keyword[return] identifier[application_codes] . identifier[error_response] ([ identifier[application_codes] . identifier[PARAMETER_NOT_SUPPORTED_VIOLATION] ]) | def get_collection(cls, request_args):
"""
Used to fetch a collection of resource object of type 'cls' in response to a GET request\\
. get_resource_or_collection should only be invoked on a resource when the client specifies a GET request.
:param request_args: The query parameters supplied with the request. currently supports page[offset], and \\
page[limit]. Pagination only applies to collection requests. See http://jsonapi.org/format/#fetching-pagination.
:return: An HTTP response object in accordance with the specification at \\
http://jsonapi.org/format/#fetching-resources
"""
try:
if request_args.get('include'):
raise ParameterNotSupported # depends on [control=['if'], data=[]]
offset = request_args.get('page[offset]', 0)
limit = request_args.get('page[limit]', 20)
query = 'MATCH (n) WHERE n:{label} AND n.active RETURN n ORDER BY n.id SKIP {offset} LIMIT {limit}'.format(label=cls.__name__, offset=offset, limit=limit)
(results, meta) = db.cypher_query(query)
data = dict()
data['data'] = list()
data['links'] = dict()
data['links']['self'] = '{class_link}?page[offset]={offset}&page[limit]={limit}'.format(class_link=cls.get_class_link(), offset=offset, limit=limit)
data['links']['first'] = '{class_link}?page[offset]={offset}&page[limit]={limit}'.format(class_link=cls.get_class_link(), offset=0, limit=limit)
if int(offset) - int(limit) > 0:
data['links']['prev'] = '{class_link}?page[offset]={offset}&page[limit]={limit}'.format(class_link=cls.get_class_link(), offset=int(offset) - int(limit), limit=limit) # depends on [control=['if'], data=[]]
if len(cls.nodes) > int(offset) + int(limit):
data['links']['next'] = '{class_link}?page[offset]={offset}&page[limit]={limit}'.format(class_link=cls.get_class_link(), offset=int(offset) + int(limit), limit=limit) # depends on [control=['if'], data=[]]
data['links']['last'] = '{class_link}?page[offset]={offset}&page[limit]={limit}'.format(class_link=cls.get_class_link(), offset=len(cls.nodes.filter(active=True)) - len(cls.nodes.filter(active=True)) % int(limit) - 1, limit=limit)
list_of_nodes = [cls.inflate(row[0]) for row in results]
for this_node in list_of_nodes:
data['data'].append(this_node.get_resource_object()) # depends on [control=['for'], data=['this_node']]
r = make_response(jsonify(data))
r.status_code = http_error_codes.OK
r.headers['Content-Type'] = CONTENT_TYPE
return r # depends on [control=['try'], data=[]]
except ParameterNotSupported:
return application_codes.error_response([application_codes.PARAMETER_NOT_SUPPORTED_VIOLATION]) # depends on [control=['except'], data=[]] |
def volume_percentage_used(self, volume):
"""Total used size in percentage for volume"""
volume = self._get_volume(volume)
if volume is not None:
total = int(volume["size"]["total"])
used = int(volume["size"]["used"])
if used is not None and used > 0 and \
total is not None and total > 0:
return round((float(used) / float(total)) * 100.0, 1) | def function[volume_percentage_used, parameter[self, volume]]:
constant[Total used size in percentage for volume]
variable[volume] assign[=] call[name[self]._get_volume, parameter[name[volume]]]
if compare[name[volume] is_not constant[None]] begin[:]
variable[total] assign[=] call[name[int], parameter[call[call[name[volume]][constant[size]]][constant[total]]]]
variable[used] assign[=] call[name[int], parameter[call[call[name[volume]][constant[size]]][constant[used]]]]
if <ast.BoolOp object at 0x7da1b024fa00> begin[:]
return[call[name[round], parameter[binary_operation[binary_operation[call[name[float], parameter[name[used]]] / call[name[float], parameter[name[total]]]] * constant[100.0]], constant[1]]]] | keyword[def] identifier[volume_percentage_used] ( identifier[self] , identifier[volume] ):
literal[string]
identifier[volume] = identifier[self] . identifier[_get_volume] ( identifier[volume] )
keyword[if] identifier[volume] keyword[is] keyword[not] keyword[None] :
identifier[total] = identifier[int] ( identifier[volume] [ literal[string] ][ literal[string] ])
identifier[used] = identifier[int] ( identifier[volume] [ literal[string] ][ literal[string] ])
keyword[if] identifier[used] keyword[is] keyword[not] keyword[None] keyword[and] identifier[used] > literal[int] keyword[and] identifier[total] keyword[is] keyword[not] keyword[None] keyword[and] identifier[total] > literal[int] :
keyword[return] identifier[round] (( identifier[float] ( identifier[used] )/ identifier[float] ( identifier[total] ))* literal[int] , literal[int] ) | def volume_percentage_used(self, volume):
"""Total used size in percentage for volume"""
volume = self._get_volume(volume)
if volume is not None:
total = int(volume['size']['total'])
used = int(volume['size']['used'])
if used is not None and used > 0 and (total is not None) and (total > 0):
return round(float(used) / float(total) * 100.0, 1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['volume']] |
def _sinusoid(x, p, L, y):
""" Return the sinusoid cont func evaluated at input x for the continuum.
Parameters
----------
x: float or np.array
data, input to function
p: ndarray
coefficients of fitting function
L: float
width of x data
y: float or np.array
output data corresponding to input x
Returns
-------
func: float
function evaluated for the input x
"""
N = int(len(p)/2)
n = np.linspace(0, N, N+1)
k = n*np.pi/L
func = 0
for n in range(0, N):
func += p[2*n]*np.sin(k[n]*x)+p[2*n+1]*np.cos(k[n]*x)
return func | def function[_sinusoid, parameter[x, p, L, y]]:
constant[ Return the sinusoid cont func evaluated at input x for the continuum.
Parameters
----------
x: float or np.array
data, input to function
p: ndarray
coefficients of fitting function
L: float
width of x data
y: float or np.array
output data corresponding to input x
Returns
-------
func: float
function evaluated for the input x
]
variable[N] assign[=] call[name[int], parameter[binary_operation[call[name[len], parameter[name[p]]] / constant[2]]]]
variable[n] assign[=] call[name[np].linspace, parameter[constant[0], name[N], binary_operation[name[N] + constant[1]]]]
variable[k] assign[=] binary_operation[binary_operation[name[n] * name[np].pi] / name[L]]
variable[func] assign[=] constant[0]
for taget[name[n]] in starred[call[name[range], parameter[constant[0], name[N]]]] begin[:]
<ast.AugAssign object at 0x7da1b10c3af0>
return[name[func]] | keyword[def] identifier[_sinusoid] ( identifier[x] , identifier[p] , identifier[L] , identifier[y] ):
literal[string]
identifier[N] = identifier[int] ( identifier[len] ( identifier[p] )/ literal[int] )
identifier[n] = identifier[np] . identifier[linspace] ( literal[int] , identifier[N] , identifier[N] + literal[int] )
identifier[k] = identifier[n] * identifier[np] . identifier[pi] / identifier[L]
identifier[func] = literal[int]
keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[N] ):
identifier[func] += identifier[p] [ literal[int] * identifier[n] ]* identifier[np] . identifier[sin] ( identifier[k] [ identifier[n] ]* identifier[x] )+ identifier[p] [ literal[int] * identifier[n] + literal[int] ]* identifier[np] . identifier[cos] ( identifier[k] [ identifier[n] ]* identifier[x] )
keyword[return] identifier[func] | def _sinusoid(x, p, L, y):
""" Return the sinusoid cont func evaluated at input x for the continuum.
Parameters
----------
x: float or np.array
data, input to function
p: ndarray
coefficients of fitting function
L: float
width of x data
y: float or np.array
output data corresponding to input x
Returns
-------
func: float
function evaluated for the input x
"""
N = int(len(p) / 2)
n = np.linspace(0, N, N + 1)
k = n * np.pi / L
func = 0
for n in range(0, N):
func += p[2 * n] * np.sin(k[n] * x) + p[2 * n + 1] * np.cos(k[n] * x) # depends on [control=['for'], data=['n']]
return func |
def compose(self, file_path, velocity_mean=None, velocity_std=None):
'''
Compose by learned model.
Args:
file_path: Path to generated MIDI file.
velocity_mean: Mean of velocity.
This class samples the velocity from a Gaussian distribution of
`velocity_mean` and `velocity_std`.
If `None`, the average velocity in MIDI files set to this parameter.
velocity_std: Standard deviation(SD) of velocity.
This class samples the velocity from a Gaussian distribution of
`velocity_mean` and `velocity_std`.
If `None`, the SD of velocity in MIDI files set to this parameter.
'''
generated_arr = self.__generative_model.draw()
channel = generated_arr.shape[1] // 2
generated_arr = generated_arr[:, :channel]
# @TODO(chimera0(RUM)): Fix the redundant processings.
if velocity_mean is None:
velocity_mean = np.array(
[self.__midi_df_list[i].velocity.mean() for i in range(len(self.__midi_df_list))]
).mean()
if velocity_std is None:
velocity_std = np.array(
[self.__midi_df_list[i].velocity.std() for i in range(len(self.__midi_df_list))]
).std()
generated_list = []
start = 0
end = self.__time_fraction
for batch in range(generated_arr.shape[0]):
for seq in range(generated_arr.shape[2]):
add_flag = False
for program_key in range(generated_arr.shape[1]):
pitch_key = np.argmax(generated_arr[batch, program_key, seq])
pitch_tuple = self.__bar_gram.pitch_tuple_list[pitch_key]
for pitch in pitch_tuple:
velocity = np.random.normal(
loc=velocity_mean,
scale=velocity_std
)
velocity = int(velocity)
program = self.__noise_sampler.program_list[program_key]
generated_list.append((program, start, end, pitch, velocity))
add_flag = True
if add_flag is True:
start += self.__time_fraction
end += self.__time_fraction
generated_midi_df = pd.DataFrame(
generated_list,
columns=[
"program",
"start",
"end",
"pitch",
"velocity"
]
)
pitch_arr = generated_midi_df.pitch.drop_duplicates()
df_list = []
for pitch in pitch_arr:
df = generated_midi_df[generated_midi_df.pitch == pitch]
df = df.sort_values(by=["start", "end"])
df["next_start"] = df.start.shift(-1)
df["next_end"] = df.end.shift(-1)
df.loc[df.end == df.next_start, "end"] = df.loc[df.end == df.next_start, "next_end"]
df = df.drop_duplicates(["end"])
df_list.append(df)
generated_midi_df = pd.concat(df_list)
generated_midi_df = generated_midi_df.sort_values(by=["start", "end"])
self.__midi_controller.save(
file_path=file_path,
note_df=generated_midi_df
) | def function[compose, parameter[self, file_path, velocity_mean, velocity_std]]:
constant[
Compose by learned model.
Args:
file_path: Path to generated MIDI file.
velocity_mean: Mean of velocity.
This class samples the velocity from a Gaussian distribution of
`velocity_mean` and `velocity_std`.
If `None`, the average velocity in MIDI files set to this parameter.
velocity_std: Standard deviation(SD) of velocity.
This class samples the velocity from a Gaussian distribution of
`velocity_mean` and `velocity_std`.
If `None`, the SD of velocity in MIDI files set to this parameter.
]
variable[generated_arr] assign[=] call[name[self].__generative_model.draw, parameter[]]
variable[channel] assign[=] binary_operation[call[name[generated_arr].shape][constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
variable[generated_arr] assign[=] call[name[generated_arr]][tuple[[<ast.Slice object at 0x7da204963be0>, <ast.Slice object at 0x7da204963340>]]]
if compare[name[velocity_mean] is constant[None]] begin[:]
variable[velocity_mean] assign[=] call[call[name[np].array, parameter[<ast.ListComp object at 0x7da204962e60>]].mean, parameter[]]
if compare[name[velocity_std] is constant[None]] begin[:]
variable[velocity_std] assign[=] call[call[name[np].array, parameter[<ast.ListComp object at 0x7da2049638b0>]].std, parameter[]]
variable[generated_list] assign[=] list[[]]
variable[start] assign[=] constant[0]
variable[end] assign[=] name[self].__time_fraction
for taget[name[batch]] in starred[call[name[range], parameter[call[name[generated_arr].shape][constant[0]]]]] begin[:]
for taget[name[seq]] in starred[call[name[range], parameter[call[name[generated_arr].shape][constant[2]]]]] begin[:]
variable[add_flag] assign[=] constant[False]
for taget[name[program_key]] in starred[call[name[range], parameter[call[name[generated_arr].shape][constant[1]]]]] begin[:]
variable[pitch_key] assign[=] call[name[np].argmax, parameter[call[name[generated_arr]][tuple[[<ast.Name object at 0x7da1b07ac9d0>, <ast.Name object at 0x7da1b07adb40>, <ast.Name object at 0x7da1b07af580>]]]]]
variable[pitch_tuple] assign[=] call[name[self].__bar_gram.pitch_tuple_list][name[pitch_key]]
for taget[name[pitch]] in starred[name[pitch_tuple]] begin[:]
variable[velocity] assign[=] call[name[np].random.normal, parameter[]]
variable[velocity] assign[=] call[name[int], parameter[name[velocity]]]
variable[program] assign[=] call[name[self].__noise_sampler.program_list][name[program_key]]
call[name[generated_list].append, parameter[tuple[[<ast.Name object at 0x7da1b07afc10>, <ast.Name object at 0x7da1b07acd90>, <ast.Name object at 0x7da1b07ae470>, <ast.Name object at 0x7da1b07adb70>, <ast.Name object at 0x7da1b07ac6a0>]]]]
variable[add_flag] assign[=] constant[True]
if compare[name[add_flag] is constant[True]] begin[:]
<ast.AugAssign object at 0x7da1b07ae7a0>
<ast.AugAssign object at 0x7da1b07acc70>
variable[generated_midi_df] assign[=] call[name[pd].DataFrame, parameter[name[generated_list]]]
variable[pitch_arr] assign[=] call[name[generated_midi_df].pitch.drop_duplicates, parameter[]]
variable[df_list] assign[=] list[[]]
for taget[name[pitch]] in starred[name[pitch_arr]] begin[:]
variable[df] assign[=] call[name[generated_midi_df]][compare[name[generated_midi_df].pitch equal[==] name[pitch]]]
variable[df] assign[=] call[name[df].sort_values, parameter[]]
call[name[df]][constant[next_start]] assign[=] call[name[df].start.shift, parameter[<ast.UnaryOp object at 0x7da1b07ac820>]]
call[name[df]][constant[next_end]] assign[=] call[name[df].end.shift, parameter[<ast.UnaryOp object at 0x7da1b0841f30>]]
call[name[df].loc][tuple[[<ast.Compare object at 0x7da1b0842890>, <ast.Constant object at 0x7da1b0840610>]]] assign[=] call[name[df].loc][tuple[[<ast.Compare object at 0x7da1b0841c60>, <ast.Constant object at 0x7da1b0841c90>]]]
variable[df] assign[=] call[name[df].drop_duplicates, parameter[list[[<ast.Constant object at 0x7da1b08414e0>]]]]
call[name[df_list].append, parameter[name[df]]]
variable[generated_midi_df] assign[=] call[name[pd].concat, parameter[name[df_list]]]
variable[generated_midi_df] assign[=] call[name[generated_midi_df].sort_values, parameter[]]
call[name[self].__midi_controller.save, parameter[]] | keyword[def] identifier[compose] ( identifier[self] , identifier[file_path] , identifier[velocity_mean] = keyword[None] , identifier[velocity_std] = keyword[None] ):
literal[string]
identifier[generated_arr] = identifier[self] . identifier[__generative_model] . identifier[draw] ()
identifier[channel] = identifier[generated_arr] . identifier[shape] [ literal[int] ]// literal[int]
identifier[generated_arr] = identifier[generated_arr] [:,: identifier[channel] ]
keyword[if] identifier[velocity_mean] keyword[is] keyword[None] :
identifier[velocity_mean] = identifier[np] . identifier[array] (
[ identifier[self] . identifier[__midi_df_list] [ identifier[i] ]. identifier[velocity] . identifier[mean] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[__midi_df_list] ))]
). identifier[mean] ()
keyword[if] identifier[velocity_std] keyword[is] keyword[None] :
identifier[velocity_std] = identifier[np] . identifier[array] (
[ identifier[self] . identifier[__midi_df_list] [ identifier[i] ]. identifier[velocity] . identifier[std] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[__midi_df_list] ))]
). identifier[std] ()
identifier[generated_list] =[]
identifier[start] = literal[int]
identifier[end] = identifier[self] . identifier[__time_fraction]
keyword[for] identifier[batch] keyword[in] identifier[range] ( identifier[generated_arr] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[seq] keyword[in] identifier[range] ( identifier[generated_arr] . identifier[shape] [ literal[int] ]):
identifier[add_flag] = keyword[False]
keyword[for] identifier[program_key] keyword[in] identifier[range] ( identifier[generated_arr] . identifier[shape] [ literal[int] ]):
identifier[pitch_key] = identifier[np] . identifier[argmax] ( identifier[generated_arr] [ identifier[batch] , identifier[program_key] , identifier[seq] ])
identifier[pitch_tuple] = identifier[self] . identifier[__bar_gram] . identifier[pitch_tuple_list] [ identifier[pitch_key] ]
keyword[for] identifier[pitch] keyword[in] identifier[pitch_tuple] :
identifier[velocity] = identifier[np] . identifier[random] . identifier[normal] (
identifier[loc] = identifier[velocity_mean] ,
identifier[scale] = identifier[velocity_std]
)
identifier[velocity] = identifier[int] ( identifier[velocity] )
identifier[program] = identifier[self] . identifier[__noise_sampler] . identifier[program_list] [ identifier[program_key] ]
identifier[generated_list] . identifier[append] (( identifier[program] , identifier[start] , identifier[end] , identifier[pitch] , identifier[velocity] ))
identifier[add_flag] = keyword[True]
keyword[if] identifier[add_flag] keyword[is] keyword[True] :
identifier[start] += identifier[self] . identifier[__time_fraction]
identifier[end] += identifier[self] . identifier[__time_fraction]
identifier[generated_midi_df] = identifier[pd] . identifier[DataFrame] (
identifier[generated_list] ,
identifier[columns] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]
)
identifier[pitch_arr] = identifier[generated_midi_df] . identifier[pitch] . identifier[drop_duplicates] ()
identifier[df_list] =[]
keyword[for] identifier[pitch] keyword[in] identifier[pitch_arr] :
identifier[df] = identifier[generated_midi_df] [ identifier[generated_midi_df] . identifier[pitch] == identifier[pitch] ]
identifier[df] = identifier[df] . identifier[sort_values] ( identifier[by] =[ literal[string] , literal[string] ])
identifier[df] [ literal[string] ]= identifier[df] . identifier[start] . identifier[shift] (- literal[int] )
identifier[df] [ literal[string] ]= identifier[df] . identifier[end] . identifier[shift] (- literal[int] )
identifier[df] . identifier[loc] [ identifier[df] . identifier[end] == identifier[df] . identifier[next_start] , literal[string] ]= identifier[df] . identifier[loc] [ identifier[df] . identifier[end] == identifier[df] . identifier[next_start] , literal[string] ]
identifier[df] = identifier[df] . identifier[drop_duplicates] ([ literal[string] ])
identifier[df_list] . identifier[append] ( identifier[df] )
identifier[generated_midi_df] = identifier[pd] . identifier[concat] ( identifier[df_list] )
identifier[generated_midi_df] = identifier[generated_midi_df] . identifier[sort_values] ( identifier[by] =[ literal[string] , literal[string] ])
identifier[self] . identifier[__midi_controller] . identifier[save] (
identifier[file_path] = identifier[file_path] ,
identifier[note_df] = identifier[generated_midi_df]
) | def compose(self, file_path, velocity_mean=None, velocity_std=None):
"""
Compose by learned model.
Args:
file_path: Path to generated MIDI file.
velocity_mean: Mean of velocity.
This class samples the velocity from a Gaussian distribution of
`velocity_mean` and `velocity_std`.
If `None`, the average velocity in MIDI files set to this parameter.
velocity_std: Standard deviation(SD) of velocity.
This class samples the velocity from a Gaussian distribution of
`velocity_mean` and `velocity_std`.
If `None`, the SD of velocity in MIDI files set to this parameter.
"""
generated_arr = self.__generative_model.draw()
channel = generated_arr.shape[1] // 2
generated_arr = generated_arr[:, :channel] # @TODO(chimera0(RUM)): Fix the redundant processings.
if velocity_mean is None:
velocity_mean = np.array([self.__midi_df_list[i].velocity.mean() for i in range(len(self.__midi_df_list))]).mean() # depends on [control=['if'], data=['velocity_mean']]
if velocity_std is None:
velocity_std = np.array([self.__midi_df_list[i].velocity.std() for i in range(len(self.__midi_df_list))]).std() # depends on [control=['if'], data=['velocity_std']]
generated_list = []
start = 0
end = self.__time_fraction
for batch in range(generated_arr.shape[0]):
for seq in range(generated_arr.shape[2]):
add_flag = False
for program_key in range(generated_arr.shape[1]):
pitch_key = np.argmax(generated_arr[batch, program_key, seq])
pitch_tuple = self.__bar_gram.pitch_tuple_list[pitch_key]
for pitch in pitch_tuple:
velocity = np.random.normal(loc=velocity_mean, scale=velocity_std)
velocity = int(velocity)
program = self.__noise_sampler.program_list[program_key]
generated_list.append((program, start, end, pitch, velocity))
add_flag = True # depends on [control=['for'], data=['pitch']] # depends on [control=['for'], data=['program_key']]
if add_flag is True:
start += self.__time_fraction
end += self.__time_fraction # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seq']] # depends on [control=['for'], data=['batch']]
generated_midi_df = pd.DataFrame(generated_list, columns=['program', 'start', 'end', 'pitch', 'velocity'])
pitch_arr = generated_midi_df.pitch.drop_duplicates()
df_list = []
for pitch in pitch_arr:
df = generated_midi_df[generated_midi_df.pitch == pitch]
df = df.sort_values(by=['start', 'end'])
df['next_start'] = df.start.shift(-1)
df['next_end'] = df.end.shift(-1)
df.loc[df.end == df.next_start, 'end'] = df.loc[df.end == df.next_start, 'next_end']
df = df.drop_duplicates(['end'])
df_list.append(df) # depends on [control=['for'], data=['pitch']]
generated_midi_df = pd.concat(df_list)
generated_midi_df = generated_midi_df.sort_values(by=['start', 'end'])
self.__midi_controller.save(file_path=file_path, note_df=generated_midi_df) |
def get_git_status(git_path='git'):
"""Returns the state of the git working copy
"""
status_output = subprocess.call((git_path, 'diff-files', '--quiet'))
if status_output != 0:
return 'UNCLEAN: Modified working tree'
else:
# check index for changes
status_output = subprocess.call((git_path, 'diff-index', '--cached',
'--quiet', 'HEAD'))
if status_output != 0:
return 'UNCLEAN: Modified index'
else:
return 'CLEAN: All modifications committed' | def function[get_git_status, parameter[git_path]]:
constant[Returns the state of the git working copy
]
variable[status_output] assign[=] call[name[subprocess].call, parameter[tuple[[<ast.Name object at 0x7da204621c90>, <ast.Constant object at 0x7da204621780>, <ast.Constant object at 0x7da204623940>]]]]
if compare[name[status_output] not_equal[!=] constant[0]] begin[:]
return[constant[UNCLEAN: Modified working tree]] | keyword[def] identifier[get_git_status] ( identifier[git_path] = literal[string] ):
literal[string]
identifier[status_output] = identifier[subprocess] . identifier[call] (( identifier[git_path] , literal[string] , literal[string] ))
keyword[if] identifier[status_output] != literal[int] :
keyword[return] literal[string]
keyword[else] :
identifier[status_output] = identifier[subprocess] . identifier[call] (( identifier[git_path] , literal[string] , literal[string] ,
literal[string] , literal[string] ))
keyword[if] identifier[status_output] != literal[int] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string] | def get_git_status(git_path='git'):
"""Returns the state of the git working copy
"""
status_output = subprocess.call((git_path, 'diff-files', '--quiet'))
if status_output != 0:
return 'UNCLEAN: Modified working tree' # depends on [control=['if'], data=[]]
else:
# check index for changes
status_output = subprocess.call((git_path, 'diff-index', '--cached', '--quiet', 'HEAD'))
if status_output != 0:
return 'UNCLEAN: Modified index' # depends on [control=['if'], data=[]]
else:
return 'CLEAN: All modifications committed' |
def get_chasm_context(tri_nuc):
"""Returns the mutation context acording to CHASM.
For more information about CHASM's mutation context, look
at http://wiki.chasmsoftware.org/index.php/CHASM_Overview.
Essentially CHASM uses a few specified di-nucleotide contexts
followed by single nucleotide context.
Parameters
----------
tri_nuc : str
three nucleotide string with mutated base in the middle.
Returns
-------
chasm context : str
a string representing the context used in CHASM
"""
# check if string is correct length
if len(tri_nuc) != 3:
raise ValueError('Chasm context requires a three nucleotide string '
'(Provided: "{0}")'.format(tri_nuc))
# try dinuc context if found
if tri_nuc[1:] == 'CG':
return 'C*pG'
elif tri_nuc[:2] == 'CG':
return 'CpG*'
elif tri_nuc[:2] == 'TC':
return 'TpC*'
elif tri_nuc[1:] == 'GA':
return 'G*pA'
else:
# just return single nuc context
return tri_nuc[1] | def function[get_chasm_context, parameter[tri_nuc]]:
constant[Returns the mutation context acording to CHASM.
For more information about CHASM's mutation context, look
at http://wiki.chasmsoftware.org/index.php/CHASM_Overview.
Essentially CHASM uses a few specified di-nucleotide contexts
followed by single nucleotide context.
Parameters
----------
tri_nuc : str
three nucleotide string with mutated base in the middle.
Returns
-------
chasm context : str
a string representing the context used in CHASM
]
if compare[call[name[len], parameter[name[tri_nuc]]] not_equal[!=] constant[3]] begin[:]
<ast.Raise object at 0x7da18bc73250>
if compare[call[name[tri_nuc]][<ast.Slice object at 0x7da18bc71c30>] equal[==] constant[CG]] begin[:]
return[constant[C*pG]] | keyword[def] identifier[get_chasm_context] ( identifier[tri_nuc] ):
literal[string]
keyword[if] identifier[len] ( identifier[tri_nuc] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[tri_nuc] ))
keyword[if] identifier[tri_nuc] [ literal[int] :]== literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[tri_nuc] [: literal[int] ]== literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[tri_nuc] [: literal[int] ]== literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[tri_nuc] [ literal[int] :]== literal[string] :
keyword[return] literal[string]
keyword[else] :
keyword[return] identifier[tri_nuc] [ literal[int] ] | def get_chasm_context(tri_nuc):
"""Returns the mutation context acording to CHASM.
For more information about CHASM's mutation context, look
at http://wiki.chasmsoftware.org/index.php/CHASM_Overview.
Essentially CHASM uses a few specified di-nucleotide contexts
followed by single nucleotide context.
Parameters
----------
tri_nuc : str
three nucleotide string with mutated base in the middle.
Returns
-------
chasm context : str
a string representing the context used in CHASM
"""
# check if string is correct length
if len(tri_nuc) != 3:
raise ValueError('Chasm context requires a three nucleotide string (Provided: "{0}")'.format(tri_nuc)) # depends on [control=['if'], data=[]]
# try dinuc context if found
if tri_nuc[1:] == 'CG':
return 'C*pG' # depends on [control=['if'], data=[]]
elif tri_nuc[:2] == 'CG':
return 'CpG*' # depends on [control=['if'], data=[]]
elif tri_nuc[:2] == 'TC':
return 'TpC*' # depends on [control=['if'], data=[]]
elif tri_nuc[1:] == 'GA':
return 'G*pA' # depends on [control=['if'], data=[]]
else:
# just return single nuc context
return tri_nuc[1] |
def intersection (set1, set2):
""" Removes from set1 any items which don't appear in set2 and returns the result.
"""
assert is_iterable(set1)
assert is_iterable(set2)
result = []
for v in set1:
if v in set2:
result.append (v)
return result | def function[intersection, parameter[set1, set2]]:
constant[ Removes from set1 any items which don't appear in set2 and returns the result.
]
assert[call[name[is_iterable], parameter[name[set1]]]]
assert[call[name[is_iterable], parameter[name[set2]]]]
variable[result] assign[=] list[[]]
for taget[name[v]] in starred[name[set1]] begin[:]
if compare[name[v] in name[set2]] begin[:]
call[name[result].append, parameter[name[v]]]
return[name[result]] | keyword[def] identifier[intersection] ( identifier[set1] , identifier[set2] ):
literal[string]
keyword[assert] identifier[is_iterable] ( identifier[set1] )
keyword[assert] identifier[is_iterable] ( identifier[set2] )
identifier[result] =[]
keyword[for] identifier[v] keyword[in] identifier[set1] :
keyword[if] identifier[v] keyword[in] identifier[set2] :
identifier[result] . identifier[append] ( identifier[v] )
keyword[return] identifier[result] | def intersection(set1, set2):
""" Removes from set1 any items which don't appear in set2 and returns the result.
"""
assert is_iterable(set1)
assert is_iterable(set2)
result = []
for v in set1:
if v in set2:
result.append(v) # depends on [control=['if'], data=['v']] # depends on [control=['for'], data=['v']]
return result |
def _load(self):
"""Load data from a pickle file. """
with open(self._pickle_file, 'rb') as source:
pickler = pickle.Unpickler(source)
for attribute in self._pickle_attributes:
pickle_data = pickler.load()
setattr(self, attribute, pickle_data) | def function[_load, parameter[self]]:
constant[Load data from a pickle file. ]
with call[name[open], parameter[name[self]._pickle_file, constant[rb]]] begin[:]
variable[pickler] assign[=] call[name[pickle].Unpickler, parameter[name[source]]]
for taget[name[attribute]] in starred[name[self]._pickle_attributes] begin[:]
variable[pickle_data] assign[=] call[name[pickler].load, parameter[]]
call[name[setattr], parameter[name[self], name[attribute], name[pickle_data]]] | keyword[def] identifier[_load] ( identifier[self] ):
literal[string]
keyword[with] identifier[open] ( identifier[self] . identifier[_pickle_file] , literal[string] ) keyword[as] identifier[source] :
identifier[pickler] = identifier[pickle] . identifier[Unpickler] ( identifier[source] )
keyword[for] identifier[attribute] keyword[in] identifier[self] . identifier[_pickle_attributes] :
identifier[pickle_data] = identifier[pickler] . identifier[load] ()
identifier[setattr] ( identifier[self] , identifier[attribute] , identifier[pickle_data] ) | def _load(self):
"""Load data from a pickle file. """
with open(self._pickle_file, 'rb') as source:
pickler = pickle.Unpickler(source)
for attribute in self._pickle_attributes:
pickle_data = pickler.load()
setattr(self, attribute, pickle_data) # depends on [control=['for'], data=['attribute']] # depends on [control=['with'], data=['source']] |
def parse_default_property_value(property_name, property_type_id, default_value_string):
"""Parse the default value string into its proper form given the property type ID.
Args:
property_name: string, the name of the property whose default value is being parsed.
Used primarily to construct meaningful error messages, should the default
value prove invalid.
property_type_id: int, one of the property type ID constants defined in this file that
OrientDB uses to designate the native type of a given property.
default_value_string: string, the textual representation of the default value for
for the property, as returned by OrientDB's schema introspection code.
Returns:
an object of type matching the property that can be used as the property's default value.
For example, if the property is of string type, the return type will be a string, and if
the property is of list type, the return type will be a list.
Raises:
AssertionError, if the default value is not supported or does not match the
property's declared type (e.g. if a default of "[]" is set on an integer property).
"""
if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}':
return set()
elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]':
return list()
elif (property_type_id == PROPERTY_TYPE_STRING_ID and
isinstance(default_value_string, six.string_types)):
return default_value_string
elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID:
return _parse_bool_default_value(property_name, default_value_string)
elif property_type_id == PROPERTY_TYPE_DATETIME_ID:
return _parse_datetime_default_value(property_name, default_value_string)
elif property_type_id == PROPERTY_TYPE_DATE_ID:
return _parse_date_default_value(property_name, default_value_string)
else:
raise AssertionError(u'Unsupported default value for property "{}" with type id {}: '
u'{}'.format(property_name, property_type_id, default_value_string)) | def function[parse_default_property_value, parameter[property_name, property_type_id, default_value_string]]:
constant[Parse the default value string into its proper form given the property type ID.
Args:
property_name: string, the name of the property whose default value is being parsed.
Used primarily to construct meaningful error messages, should the default
value prove invalid.
property_type_id: int, one of the property type ID constants defined in this file that
OrientDB uses to designate the native type of a given property.
default_value_string: string, the textual representation of the default value for
for the property, as returned by OrientDB's schema introspection code.
Returns:
an object of type matching the property that can be used as the property's default value.
For example, if the property is of string type, the return type will be a string, and if
the property is of list type, the return type will be a list.
Raises:
AssertionError, if the default value is not supported or does not match the
property's declared type (e.g. if a default of "[]" is set on an integer property).
]
if <ast.BoolOp object at 0x7da1b1880850> begin[:]
return[call[name[set], parameter[]]] | keyword[def] identifier[parse_default_property_value] ( identifier[property_name] , identifier[property_type_id] , identifier[default_value_string] ):
literal[string]
keyword[if] identifier[property_type_id] == identifier[PROPERTY_TYPE_EMBEDDED_SET_ID] keyword[and] identifier[default_value_string] == literal[string] :
keyword[return] identifier[set] ()
keyword[elif] identifier[property_type_id] == identifier[PROPERTY_TYPE_EMBEDDED_LIST_ID] keyword[and] identifier[default_value_string] == literal[string] :
keyword[return] identifier[list] ()
keyword[elif] ( identifier[property_type_id] == identifier[PROPERTY_TYPE_STRING_ID] keyword[and]
identifier[isinstance] ( identifier[default_value_string] , identifier[six] . identifier[string_types] )):
keyword[return] identifier[default_value_string]
keyword[elif] identifier[property_type_id] == identifier[PROPERTY_TYPE_BOOLEAN_ID] :
keyword[return] identifier[_parse_bool_default_value] ( identifier[property_name] , identifier[default_value_string] )
keyword[elif] identifier[property_type_id] == identifier[PROPERTY_TYPE_DATETIME_ID] :
keyword[return] identifier[_parse_datetime_default_value] ( identifier[property_name] , identifier[default_value_string] )
keyword[elif] identifier[property_type_id] == identifier[PROPERTY_TYPE_DATE_ID] :
keyword[return] identifier[_parse_date_default_value] ( identifier[property_name] , identifier[default_value_string] )
keyword[else] :
keyword[raise] identifier[AssertionError] ( literal[string]
literal[string] . identifier[format] ( identifier[property_name] , identifier[property_type_id] , identifier[default_value_string] )) | def parse_default_property_value(property_name, property_type_id, default_value_string):
"""Parse the default value string into its proper form given the property type ID.
Args:
property_name: string, the name of the property whose default value is being parsed.
Used primarily to construct meaningful error messages, should the default
value prove invalid.
property_type_id: int, one of the property type ID constants defined in this file that
OrientDB uses to designate the native type of a given property.
default_value_string: string, the textual representation of the default value for
for the property, as returned by OrientDB's schema introspection code.
Returns:
an object of type matching the property that can be used as the property's default value.
For example, if the property is of string type, the return type will be a string, and if
the property is of list type, the return type will be a list.
Raises:
AssertionError, if the default value is not supported or does not match the
property's declared type (e.g. if a default of "[]" is set on an integer property).
"""
if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}':
return set() # depends on [control=['if'], data=[]]
elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]':
return list() # depends on [control=['if'], data=[]]
elif property_type_id == PROPERTY_TYPE_STRING_ID and isinstance(default_value_string, six.string_types):
return default_value_string # depends on [control=['if'], data=[]]
elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID:
return _parse_bool_default_value(property_name, default_value_string) # depends on [control=['if'], data=[]]
elif property_type_id == PROPERTY_TYPE_DATETIME_ID:
return _parse_datetime_default_value(property_name, default_value_string) # depends on [control=['if'], data=[]]
elif property_type_id == PROPERTY_TYPE_DATE_ID:
return _parse_date_default_value(property_name, default_value_string) # depends on [control=['if'], data=[]]
else:
raise AssertionError(u'Unsupported default value for property "{}" with type id {}: {}'.format(property_name, property_type_id, default_value_string)) |
def start(self):
"""Start this gateway agent."""
self._prepare()
self._disconnector = tornado.ioloop.PeriodicCallback(self._disconnect_hanging_devices, 1000, self._loop)
self._disconnector.start() | def function[start, parameter[self]]:
constant[Start this gateway agent.]
call[name[self]._prepare, parameter[]]
name[self]._disconnector assign[=] call[name[tornado].ioloop.PeriodicCallback, parameter[name[self]._disconnect_hanging_devices, constant[1000], name[self]._loop]]
call[name[self]._disconnector.start, parameter[]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_prepare] ()
identifier[self] . identifier[_disconnector] = identifier[tornado] . identifier[ioloop] . identifier[PeriodicCallback] ( identifier[self] . identifier[_disconnect_hanging_devices] , literal[int] , identifier[self] . identifier[_loop] )
identifier[self] . identifier[_disconnector] . identifier[start] () | def start(self):
"""Start this gateway agent."""
self._prepare()
self._disconnector = tornado.ioloop.PeriodicCallback(self._disconnect_hanging_devices, 1000, self._loop)
self._disconnector.start() |
def _bin(g):
"""Applies the BIN rule to 'g' (see top comment)."""
new_rules = []
for rule in g.rules:
if len(rule.rhs) > 2:
new_rules += _split(rule)
else:
new_rules.append(rule)
return Grammar(new_rules) | def function[_bin, parameter[g]]:
constant[Applies the BIN rule to 'g' (see top comment).]
variable[new_rules] assign[=] list[[]]
for taget[name[rule]] in starred[name[g].rules] begin[:]
if compare[call[name[len], parameter[name[rule].rhs]] greater[>] constant[2]] begin[:]
<ast.AugAssign object at 0x7da18f812b00>
return[call[name[Grammar], parameter[name[new_rules]]]] | keyword[def] identifier[_bin] ( identifier[g] ):
literal[string]
identifier[new_rules] =[]
keyword[for] identifier[rule] keyword[in] identifier[g] . identifier[rules] :
keyword[if] identifier[len] ( identifier[rule] . identifier[rhs] )> literal[int] :
identifier[new_rules] += identifier[_split] ( identifier[rule] )
keyword[else] :
identifier[new_rules] . identifier[append] ( identifier[rule] )
keyword[return] identifier[Grammar] ( identifier[new_rules] ) | def _bin(g):
"""Applies the BIN rule to 'g' (see top comment)."""
new_rules = []
for rule in g.rules:
if len(rule.rhs) > 2:
new_rules += _split(rule) # depends on [control=['if'], data=[]]
else:
new_rules.append(rule) # depends on [control=['for'], data=['rule']]
return Grammar(new_rules) |
def list(self, **kwargs):
"""
Returns a list of items in the database. Encrypted attributes are not
decrypted when listing items.
"""
response = self._new_response()
if self._check_supported_op('list', response):
self._call_ddb_method(self.table.scan, {}, response)
if response.status == 'success':
response.data = self._replace_decimals(
response.raw_response['Items'])
response.prepare()
return response | def function[list, parameter[self]]:
constant[
Returns a list of items in the database. Encrypted attributes are not
decrypted when listing items.
]
variable[response] assign[=] call[name[self]._new_response, parameter[]]
if call[name[self]._check_supported_op, parameter[constant[list], name[response]]] begin[:]
call[name[self]._call_ddb_method, parameter[name[self].table.scan, dictionary[[], []], name[response]]]
if compare[name[response].status equal[==] constant[success]] begin[:]
name[response].data assign[=] call[name[self]._replace_decimals, parameter[call[name[response].raw_response][constant[Items]]]]
call[name[response].prepare, parameter[]]
return[name[response]] | keyword[def] identifier[list] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[response] = identifier[self] . identifier[_new_response] ()
keyword[if] identifier[self] . identifier[_check_supported_op] ( literal[string] , identifier[response] ):
identifier[self] . identifier[_call_ddb_method] ( identifier[self] . identifier[table] . identifier[scan] ,{}, identifier[response] )
keyword[if] identifier[response] . identifier[status] == literal[string] :
identifier[response] . identifier[data] = identifier[self] . identifier[_replace_decimals] (
identifier[response] . identifier[raw_response] [ literal[string] ])
identifier[response] . identifier[prepare] ()
keyword[return] identifier[response] | def list(self, **kwargs):
"""
Returns a list of items in the database. Encrypted attributes are not
decrypted when listing items.
"""
response = self._new_response()
if self._check_supported_op('list', response):
self._call_ddb_method(self.table.scan, {}, response)
if response.status == 'success':
response.data = self._replace_decimals(response.raw_response['Items']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
response.prepare()
return response |
def read_loop(self):
"""
Infinite loop that reads messages off of the socket while not closed.
When a message is received its corresponding pending Future is set
to have the message as its result.
This is never used directly and is fired as a separate callback on the
I/O loop via the `connect()` method.
"""
while not self.closing:
try:
xid, zxid, response = yield self.read_response()
except iostream.StreamClosedError:
return
except Exception:
log.exception("Error reading response.")
self.abort()
return
payload_log.debug("[RECV] (xid: %s) %s", xid, response)
if xid == protocol.WATCH_XID:
self.watch_handler(response)
continue
elif xid in protocol.SPECIAL_XIDS:
f = self.pending_specials[xid].pop()
else:
f = self.pending.pop(xid)
if isinstance(response, Exception):
f.set_exception(response)
else:
f.set_result((zxid, response)) | def function[read_loop, parameter[self]]:
constant[
Infinite loop that reads messages off of the socket while not closed.
When a message is received its corresponding pending Future is set
to have the message as its result.
This is never used directly and is fired as a separate callback on the
I/O loop via the `connect()` method.
]
while <ast.UnaryOp object at 0x7da20e9b0a30> begin[:]
<ast.Try object at 0x7da20e9b21d0>
call[name[payload_log].debug, parameter[constant[[RECV] (xid: %s) %s], name[xid], name[response]]]
if compare[name[xid] equal[==] name[protocol].WATCH_XID] begin[:]
call[name[self].watch_handler, parameter[name[response]]]
continue
if call[name[isinstance], parameter[name[response], name[Exception]]] begin[:]
call[name[f].set_exception, parameter[name[response]]] | keyword[def] identifier[read_loop] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[closing] :
keyword[try] :
identifier[xid] , identifier[zxid] , identifier[response] = keyword[yield] identifier[self] . identifier[read_response] ()
keyword[except] identifier[iostream] . identifier[StreamClosedError] :
keyword[return]
keyword[except] identifier[Exception] :
identifier[log] . identifier[exception] ( literal[string] )
identifier[self] . identifier[abort] ()
keyword[return]
identifier[payload_log] . identifier[debug] ( literal[string] , identifier[xid] , identifier[response] )
keyword[if] identifier[xid] == identifier[protocol] . identifier[WATCH_XID] :
identifier[self] . identifier[watch_handler] ( identifier[response] )
keyword[continue]
keyword[elif] identifier[xid] keyword[in] identifier[protocol] . identifier[SPECIAL_XIDS] :
identifier[f] = identifier[self] . identifier[pending_specials] [ identifier[xid] ]. identifier[pop] ()
keyword[else] :
identifier[f] = identifier[self] . identifier[pending] . identifier[pop] ( identifier[xid] )
keyword[if] identifier[isinstance] ( identifier[response] , identifier[Exception] ):
identifier[f] . identifier[set_exception] ( identifier[response] )
keyword[else] :
identifier[f] . identifier[set_result] (( identifier[zxid] , identifier[response] )) | def read_loop(self):
"""
Infinite loop that reads messages off of the socket while not closed.
When a message is received its corresponding pending Future is set
to have the message as its result.
This is never used directly and is fired as a separate callback on the
I/O loop via the `connect()` method.
"""
while not self.closing:
try:
(xid, zxid, response) = (yield self.read_response()) # depends on [control=['try'], data=[]]
except iostream.StreamClosedError:
return # depends on [control=['except'], data=[]]
except Exception:
log.exception('Error reading response.')
self.abort()
return # depends on [control=['except'], data=[]]
payload_log.debug('[RECV] (xid: %s) %s', xid, response)
if xid == protocol.WATCH_XID:
self.watch_handler(response)
continue # depends on [control=['if'], data=[]]
elif xid in protocol.SPECIAL_XIDS:
f = self.pending_specials[xid].pop() # depends on [control=['if'], data=['xid']]
else:
f = self.pending.pop(xid)
if isinstance(response, Exception):
f.set_exception(response) # depends on [control=['if'], data=[]]
else:
f.set_result((zxid, response)) # depends on [control=['while'], data=[]] |
def receive_subscription_message(self, topic_name, subscription_name,
peek_lock=True, timeout=60):
'''
Receive a message from a subscription for processing.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
peek_lock:
Optional. True to retrieve and lock the message. False to read and
delete the message. Default is True (lock).
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
if peek_lock:
return self.peek_lock_subscription_message(topic_name,
subscription_name,
timeout)
return self.read_delete_subscription_message(topic_name,
subscription_name,
timeout) | def function[receive_subscription_message, parameter[self, topic_name, subscription_name, peek_lock, timeout]]:
constant[
Receive a message from a subscription for processing.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
peek_lock:
Optional. True to retrieve and lock the message. False to read and
delete the message. Default is True (lock).
timeout:
Optional. The timeout parameter is expressed in seconds.
]
if name[peek_lock] begin[:]
return[call[name[self].peek_lock_subscription_message, parameter[name[topic_name], name[subscription_name], name[timeout]]]]
return[call[name[self].read_delete_subscription_message, parameter[name[topic_name], name[subscription_name], name[timeout]]]] | keyword[def] identifier[receive_subscription_message] ( identifier[self] , identifier[topic_name] , identifier[subscription_name] ,
identifier[peek_lock] = keyword[True] , identifier[timeout] = literal[int] ):
literal[string]
keyword[if] identifier[peek_lock] :
keyword[return] identifier[self] . identifier[peek_lock_subscription_message] ( identifier[topic_name] ,
identifier[subscription_name] ,
identifier[timeout] )
keyword[return] identifier[self] . identifier[read_delete_subscription_message] ( identifier[topic_name] ,
identifier[subscription_name] ,
identifier[timeout] ) | def receive_subscription_message(self, topic_name, subscription_name, peek_lock=True, timeout=60):
"""
Receive a message from a subscription for processing.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
peek_lock:
Optional. True to retrieve and lock the message. False to read and
delete the message. Default is True (lock).
timeout:
Optional. The timeout parameter is expressed in seconds.
"""
if peek_lock:
return self.peek_lock_subscription_message(topic_name, subscription_name, timeout) # depends on [control=['if'], data=[]]
return self.read_delete_subscription_message(topic_name, subscription_name, timeout) |
def filter(self, media_type, **params):
"""
iterate all the accept media types that match media_type
media_type -- string -- the media type to filter by
**params -- dict -- further filter by key: val
return -- generator -- yields all matching media type info things
"""
mtype, msubtype = self._split_media_type(media_type)
for x in self.__iter__():
# all the params have to match to make the media type valid
matched = True
for k, v in params.items():
if x[2].get(k, None) != v:
matched = False
break
if matched:
if x[0][0] == '*':
if x[0][1] == '*':
yield x
elif x[0][1] == msubtype:
yield x
elif mtype == '*':
if msubtype == '*':
yield x
elif x[0][1] == msubtype:
yield x
elif x[0][0] == mtype:
if msubtype == '*':
yield x
elif x[0][1] == '*':
yield x
elif x[0][1] == msubtype:
yield x | def function[filter, parameter[self, media_type]]:
constant[
iterate all the accept media types that match media_type
media_type -- string -- the media type to filter by
**params -- dict -- further filter by key: val
return -- generator -- yields all matching media type info things
]
<ast.Tuple object at 0x7da1b0401750> assign[=] call[name[self]._split_media_type, parameter[name[media_type]]]
for taget[name[x]] in starred[call[name[self].__iter__, parameter[]]] begin[:]
variable[matched] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da1b0400220>, <ast.Name object at 0x7da1b0400040>]]] in starred[call[name[params].items, parameter[]]] begin[:]
if compare[call[call[name[x]][constant[2]].get, parameter[name[k], constant[None]]] not_equal[!=] name[v]] begin[:]
variable[matched] assign[=] constant[False]
break
if name[matched] begin[:]
if compare[call[call[name[x]][constant[0]]][constant[0]] equal[==] constant[*]] begin[:]
if compare[call[call[name[x]][constant[0]]][constant[1]] equal[==] constant[*]] begin[:]
<ast.Yield object at 0x7da1b0401450> | keyword[def] identifier[filter] ( identifier[self] , identifier[media_type] ,** identifier[params] ):
literal[string]
identifier[mtype] , identifier[msubtype] = identifier[self] . identifier[_split_media_type] ( identifier[media_type] )
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[__iter__] ():
identifier[matched] = keyword[True]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params] . identifier[items] ():
keyword[if] identifier[x] [ literal[int] ]. identifier[get] ( identifier[k] , keyword[None] )!= identifier[v] :
identifier[matched] = keyword[False]
keyword[break]
keyword[if] identifier[matched] :
keyword[if] identifier[x] [ literal[int] ][ literal[int] ]== literal[string] :
keyword[if] identifier[x] [ literal[int] ][ literal[int] ]== literal[string] :
keyword[yield] identifier[x]
keyword[elif] identifier[x] [ literal[int] ][ literal[int] ]== identifier[msubtype] :
keyword[yield] identifier[x]
keyword[elif] identifier[mtype] == literal[string] :
keyword[if] identifier[msubtype] == literal[string] :
keyword[yield] identifier[x]
keyword[elif] identifier[x] [ literal[int] ][ literal[int] ]== identifier[msubtype] :
keyword[yield] identifier[x]
keyword[elif] identifier[x] [ literal[int] ][ literal[int] ]== identifier[mtype] :
keyword[if] identifier[msubtype] == literal[string] :
keyword[yield] identifier[x]
keyword[elif] identifier[x] [ literal[int] ][ literal[int] ]== literal[string] :
keyword[yield] identifier[x]
keyword[elif] identifier[x] [ literal[int] ][ literal[int] ]== identifier[msubtype] :
keyword[yield] identifier[x] | def filter(self, media_type, **params):
"""
iterate all the accept media types that match media_type
media_type -- string -- the media type to filter by
**params -- dict -- further filter by key: val
return -- generator -- yields all matching media type info things
"""
(mtype, msubtype) = self._split_media_type(media_type)
for x in self.__iter__():
# all the params have to match to make the media type valid
matched = True
for (k, v) in params.items():
if x[2].get(k, None) != v:
matched = False
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if matched:
if x[0][0] == '*':
if x[0][1] == '*':
yield x # depends on [control=['if'], data=[]]
elif x[0][1] == msubtype:
yield x # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif mtype == '*':
if msubtype == '*':
yield x # depends on [control=['if'], data=[]]
elif x[0][1] == msubtype:
yield x # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif x[0][0] == mtype:
if msubtype == '*':
yield x # depends on [control=['if'], data=[]]
elif x[0][1] == '*':
yield x # depends on [control=['if'], data=[]]
elif x[0][1] == msubtype:
yield x # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] |
def getJobStore(cls, locator):
"""
Create an instance of the concrete job store implementation that matches the given locator.
:param str locator: The location of the job store to be represent by the instance
:return: an instance of a concrete subclass of AbstractJobStore
:rtype: toil.jobStores.abstractJobStore.AbstractJobStore
"""
name, rest = cls.parseLocator(locator)
if name == 'file':
from toil.jobStores.fileJobStore import FileJobStore
return FileJobStore(rest)
elif name == 'aws':
from toil.jobStores.aws.jobStore import AWSJobStore
return AWSJobStore(rest)
elif name == 'azure':
from toil.jobStores.azureJobStore import AzureJobStore
return AzureJobStore(rest)
elif name == 'google':
from toil.jobStores.googleJobStore import GoogleJobStore
return GoogleJobStore(rest)
else:
raise RuntimeError("Unknown job store implementation '%s'" % name) | def function[getJobStore, parameter[cls, locator]]:
constant[
Create an instance of the concrete job store implementation that matches the given locator.
:param str locator: The location of the job store to be represent by the instance
:return: an instance of a concrete subclass of AbstractJobStore
:rtype: toil.jobStores.abstractJobStore.AbstractJobStore
]
<ast.Tuple object at 0x7da20e74b1c0> assign[=] call[name[cls].parseLocator, parameter[name[locator]]]
if compare[name[name] equal[==] constant[file]] begin[:]
from relative_module[toil.jobStores.fileJobStore] import module[FileJobStore]
return[call[name[FileJobStore], parameter[name[rest]]]] | keyword[def] identifier[getJobStore] ( identifier[cls] , identifier[locator] ):
literal[string]
identifier[name] , identifier[rest] = identifier[cls] . identifier[parseLocator] ( identifier[locator] )
keyword[if] identifier[name] == literal[string] :
keyword[from] identifier[toil] . identifier[jobStores] . identifier[fileJobStore] keyword[import] identifier[FileJobStore]
keyword[return] identifier[FileJobStore] ( identifier[rest] )
keyword[elif] identifier[name] == literal[string] :
keyword[from] identifier[toil] . identifier[jobStores] . identifier[aws] . identifier[jobStore] keyword[import] identifier[AWSJobStore]
keyword[return] identifier[AWSJobStore] ( identifier[rest] )
keyword[elif] identifier[name] == literal[string] :
keyword[from] identifier[toil] . identifier[jobStores] . identifier[azureJobStore] keyword[import] identifier[AzureJobStore]
keyword[return] identifier[AzureJobStore] ( identifier[rest] )
keyword[elif] identifier[name] == literal[string] :
keyword[from] identifier[toil] . identifier[jobStores] . identifier[googleJobStore] keyword[import] identifier[GoogleJobStore]
keyword[return] identifier[GoogleJobStore] ( identifier[rest] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[name] ) | def getJobStore(cls, locator):
"""
Create an instance of the concrete job store implementation that matches the given locator.
:param str locator: The location of the job store to be represent by the instance
:return: an instance of a concrete subclass of AbstractJobStore
:rtype: toil.jobStores.abstractJobStore.AbstractJobStore
"""
(name, rest) = cls.parseLocator(locator)
if name == 'file':
from toil.jobStores.fileJobStore import FileJobStore
return FileJobStore(rest) # depends on [control=['if'], data=[]]
elif name == 'aws':
from toil.jobStores.aws.jobStore import AWSJobStore
return AWSJobStore(rest) # depends on [control=['if'], data=[]]
elif name == 'azure':
from toil.jobStores.azureJobStore import AzureJobStore
return AzureJobStore(rest) # depends on [control=['if'], data=[]]
elif name == 'google':
from toil.jobStores.googleJobStore import GoogleJobStore
return GoogleJobStore(rest) # depends on [control=['if'], data=[]]
else:
raise RuntimeError("Unknown job store implementation '%s'" % name) |
def modify_document(self, doc):
''' Execute the configured ``main.py`` or ``main.ipynb`` to modify the
document.
This method will also search the app directory for any theme or
template files, and automatically configure the document with them
if they are found.
'''
if self._lifecycle_handler.failed:
return
# Note: we do NOT copy self._theme, which assumes the Theme
# class is immutable (has no setters)
if self._theme is not None:
doc.theme = self._theme
if self._template is not None:
doc.template = self._template
# This internal handler should never add a template
self._main_handler.modify_document(doc) | def function[modify_document, parameter[self, doc]]:
constant[ Execute the configured ``main.py`` or ``main.ipynb`` to modify the
document.
This method will also search the app directory for any theme or
template files, and automatically configure the document with them
if they are found.
]
if name[self]._lifecycle_handler.failed begin[:]
return[None]
if compare[name[self]._theme is_not constant[None]] begin[:]
name[doc].theme assign[=] name[self]._theme
if compare[name[self]._template is_not constant[None]] begin[:]
name[doc].template assign[=] name[self]._template
call[name[self]._main_handler.modify_document, parameter[name[doc]]] | keyword[def] identifier[modify_document] ( identifier[self] , identifier[doc] ):
literal[string]
keyword[if] identifier[self] . identifier[_lifecycle_handler] . identifier[failed] :
keyword[return]
keyword[if] identifier[self] . identifier[_theme] keyword[is] keyword[not] keyword[None] :
identifier[doc] . identifier[theme] = identifier[self] . identifier[_theme]
keyword[if] identifier[self] . identifier[_template] keyword[is] keyword[not] keyword[None] :
identifier[doc] . identifier[template] = identifier[self] . identifier[_template]
identifier[self] . identifier[_main_handler] . identifier[modify_document] ( identifier[doc] ) | def modify_document(self, doc):
""" Execute the configured ``main.py`` or ``main.ipynb`` to modify the
document.
This method will also search the app directory for any theme or
template files, and automatically configure the document with them
if they are found.
"""
if self._lifecycle_handler.failed:
return # depends on [control=['if'], data=[]]
# Note: we do NOT copy self._theme, which assumes the Theme
# class is immutable (has no setters)
if self._theme is not None:
doc.theme = self._theme # depends on [control=['if'], data=[]]
if self._template is not None:
doc.template = self._template # depends on [control=['if'], data=[]]
# This internal handler should never add a template
self._main_handler.modify_document(doc) |
def as_dict(self, *args, **kwargs):
"""Return ClinVarAllele data as dict object."""
self_as_dict = super(ClinVarAllele, self).as_dict(*args, **kwargs)
self_as_dict['hgvs'] = self.hgvs
self_as_dict['clnalleleid'] = self.clnalleleid
self_as_dict['clnsig'] = self.clnsig
self_as_dict['clndn'] = self.clndn
self_as_dict['clndisdb'] = self.clndisdb
self_as_dict['clnvi'] = self.clnvi
return self_as_dict | def function[as_dict, parameter[self]]:
constant[Return ClinVarAllele data as dict object.]
variable[self_as_dict] assign[=] call[call[name[super], parameter[name[ClinVarAllele], name[self]]].as_dict, parameter[<ast.Starred object at 0x7da1afe7abc0>]]
call[name[self_as_dict]][constant[hgvs]] assign[=] name[self].hgvs
call[name[self_as_dict]][constant[clnalleleid]] assign[=] name[self].clnalleleid
call[name[self_as_dict]][constant[clnsig]] assign[=] name[self].clnsig
call[name[self_as_dict]][constant[clndn]] assign[=] name[self].clndn
call[name[self_as_dict]][constant[clndisdb]] assign[=] name[self].clndisdb
call[name[self_as_dict]][constant[clnvi]] assign[=] name[self].clnvi
return[name[self_as_dict]] | keyword[def] identifier[as_dict] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self_as_dict] = identifier[super] ( identifier[ClinVarAllele] , identifier[self] ). identifier[as_dict] (* identifier[args] ,** identifier[kwargs] )
identifier[self_as_dict] [ literal[string] ]= identifier[self] . identifier[hgvs]
identifier[self_as_dict] [ literal[string] ]= identifier[self] . identifier[clnalleleid]
identifier[self_as_dict] [ literal[string] ]= identifier[self] . identifier[clnsig]
identifier[self_as_dict] [ literal[string] ]= identifier[self] . identifier[clndn]
identifier[self_as_dict] [ literal[string] ]= identifier[self] . identifier[clndisdb]
identifier[self_as_dict] [ literal[string] ]= identifier[self] . identifier[clnvi]
keyword[return] identifier[self_as_dict] | def as_dict(self, *args, **kwargs):
"""Return ClinVarAllele data as dict object."""
self_as_dict = super(ClinVarAllele, self).as_dict(*args, **kwargs)
self_as_dict['hgvs'] = self.hgvs
self_as_dict['clnalleleid'] = self.clnalleleid
self_as_dict['clnsig'] = self.clnsig
self_as_dict['clndn'] = self.clndn
self_as_dict['clndisdb'] = self.clndisdb
self_as_dict['clnvi'] = self.clnvi
return self_as_dict |
def update_interface(self, other_interface, ignore_mgmt=True):
"""
Update an existing interface by comparing values between two
interfaces. If a VLAN interface is defined in the other interface
and it doesn't exist on the existing interface, it will be created.
:param other_interface Interface: an instance of an
interface where values in this interface will be used to as the
template to determine changes. This only has to provide attributes
that need to change (or not).
:param bool ignore_mgmt: ignore resetting management fields. These are
generally better set after creation using `engine.interface_options`
:raises UpdateElementFailed: Failed to update the element
:return: (Interface, modified, created)
:rtype: tuple
.. note:: Interfaces with multiple IP addresses are ignored
"""
base_updated = self._update_interface(other_interface)
mgmt = ('auth_request', 'backup_heartbeat', 'backup_mgt',
'primary_mgt', 'primary_heartbeat', 'outgoing')
updated = False
invalid_routes = []
def process_interfaces(current, interface):
updated = False
invalid_routes = []
# Ignore interfaces with multiple addresses
if current.has_multiple_addresses:
return updated, invalid_routes
local_interfaces = current.interfaces # Existing interface
for interface in interface.interfaces: # New values
local_interface = None
if not getattr(interface, 'nodeid', None): # CVI
cvi = [itf for itf in local_interfaces if not getattr(itf, 'nodeid', None)]
local_interface = cvi[0] if cvi else None
else:
local_interface = local_interfaces.get(nodeid=interface.nodeid)
if local_interface: # CVI or NDI sub interfaces
for name, value in interface.data.items():
if getattr(local_interface, name) != value:
if ignore_mgmt and name in mgmt:
pass
else:
local_interface[name] = value
updated = True
if 'network_value' in name: # Only reset routes if network changed
invalid_routes.append(interface.nicid)
else:
current.data.setdefault('interfaces', []).append(
{interface.typeof: interface.data})
updated = True
return updated, invalid_routes
# Handle VLANs
is_vlan = other_interface.has_vlan
if is_vlan:
vlan_interfaces = self.vlan_interface
for pvlan in other_interface.vlan_interface:
current = vlan_interfaces.get(pvlan.vlan_id)
if current:
# PhysicalVlanInterface, set any parent interface values
if current._update_interface(pvlan):
updated = True
else:
# Create new interface
self.data.setdefault('vlanInterfaces', []).append(pvlan.data)
updated = True
continue # Skip sub interface check
_updated, routes = process_interfaces(current, pvlan)
if _updated: updated = True
invalid_routes.extend(routes)
else:
_updated, routes = process_interfaces(self, other_interface)
if _updated: updated = True
invalid_routes.extend(routes)
interface = self
if updated or base_updated:
interface = self.update()
if invalid_routes: # Interface updated, check the routes
del_invalid_routes(self._engine, invalid_routes)
return interface, base_updated or updated | def function[update_interface, parameter[self, other_interface, ignore_mgmt]]:
constant[
Update an existing interface by comparing values between two
interfaces. If a VLAN interface is defined in the other interface
and it doesn't exist on the existing interface, it will be created.
:param other_interface Interface: an instance of an
interface where values in this interface will be used to as the
template to determine changes. This only has to provide attributes
that need to change (or not).
:param bool ignore_mgmt: ignore resetting management fields. These are
generally better set after creation using `engine.interface_options`
:raises UpdateElementFailed: Failed to update the element
:return: (Interface, modified, created)
:rtype: tuple
.. note:: Interfaces with multiple IP addresses are ignored
]
variable[base_updated] assign[=] call[name[self]._update_interface, parameter[name[other_interface]]]
variable[mgmt] assign[=] tuple[[<ast.Constant object at 0x7da1b1b1a560>, <ast.Constant object at 0x7da1b1b1a500>, <ast.Constant object at 0x7da1b1b1b340>, <ast.Constant object at 0x7da1b1b19f30>, <ast.Constant object at 0x7da1b1a2d390>, <ast.Constant object at 0x7da1b1a2e620>]]
variable[updated] assign[=] constant[False]
variable[invalid_routes] assign[=] list[[]]
def function[process_interfaces, parameter[current, interface]]:
variable[updated] assign[=] constant[False]
variable[invalid_routes] assign[=] list[[]]
if name[current].has_multiple_addresses begin[:]
return[tuple[[<ast.Name object at 0x7da1b1a2ea40>, <ast.Name object at 0x7da1b1a2f190>]]]
variable[local_interfaces] assign[=] name[current].interfaces
for taget[name[interface]] in starred[name[interface].interfaces] begin[:]
variable[local_interface] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da1b1ba9630> begin[:]
variable[cvi] assign[=] <ast.ListComp object at 0x7da1b1ba9c00>
variable[local_interface] assign[=] <ast.IfExp object at 0x7da1b1baa260>
if name[local_interface] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1babbb0>, <ast.Name object at 0x7da1b1baa830>]]] in starred[call[name[interface].data.items, parameter[]]] begin[:]
if compare[call[name[getattr], parameter[name[local_interface], name[name]]] not_equal[!=] name[value]] begin[:]
if <ast.BoolOp object at 0x7da1b1baa200> begin[:]
pass
if compare[constant[network_value] in name[name]] begin[:]
call[name[invalid_routes].append, parameter[name[interface].nicid]]
return[tuple[[<ast.Name object at 0x7da1b1bc1060>, <ast.Name object at 0x7da1b1bc0460>]]]
variable[is_vlan] assign[=] name[other_interface].has_vlan
if name[is_vlan] begin[:]
variable[vlan_interfaces] assign[=] name[self].vlan_interface
for taget[name[pvlan]] in starred[name[other_interface].vlan_interface] begin[:]
variable[current] assign[=] call[name[vlan_interfaces].get, parameter[name[pvlan].vlan_id]]
if name[current] begin[:]
if call[name[current]._update_interface, parameter[name[pvlan]]] begin[:]
variable[updated] assign[=] constant[True]
<ast.Tuple object at 0x7da1b1bc0bb0> assign[=] call[name[process_interfaces], parameter[name[current], name[pvlan]]]
if name[_updated] begin[:]
variable[updated] assign[=] constant[True]
call[name[invalid_routes].extend, parameter[name[routes]]]
variable[interface] assign[=] name[self]
if <ast.BoolOp object at 0x7da1b1bc1ab0> begin[:]
variable[interface] assign[=] call[name[self].update, parameter[]]
if name[invalid_routes] begin[:]
call[name[del_invalid_routes], parameter[name[self]._engine, name[invalid_routes]]]
return[tuple[[<ast.Name object at 0x7da1b1bc0b20>, <ast.BoolOp object at 0x7da1b1bc0d90>]]] | keyword[def] identifier[update_interface] ( identifier[self] , identifier[other_interface] , identifier[ignore_mgmt] = keyword[True] ):
literal[string]
identifier[base_updated] = identifier[self] . identifier[_update_interface] ( identifier[other_interface] )
identifier[mgmt] =( literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] )
identifier[updated] = keyword[False]
identifier[invalid_routes] =[]
keyword[def] identifier[process_interfaces] ( identifier[current] , identifier[interface] ):
identifier[updated] = keyword[False]
identifier[invalid_routes] =[]
keyword[if] identifier[current] . identifier[has_multiple_addresses] :
keyword[return] identifier[updated] , identifier[invalid_routes]
identifier[local_interfaces] = identifier[current] . identifier[interfaces]
keyword[for] identifier[interface] keyword[in] identifier[interface] . identifier[interfaces] :
identifier[local_interface] = keyword[None]
keyword[if] keyword[not] identifier[getattr] ( identifier[interface] , literal[string] , keyword[None] ):
identifier[cvi] =[ identifier[itf] keyword[for] identifier[itf] keyword[in] identifier[local_interfaces] keyword[if] keyword[not] identifier[getattr] ( identifier[itf] , literal[string] , keyword[None] )]
identifier[local_interface] = identifier[cvi] [ literal[int] ] keyword[if] identifier[cvi] keyword[else] keyword[None]
keyword[else] :
identifier[local_interface] = identifier[local_interfaces] . identifier[get] ( identifier[nodeid] = identifier[interface] . identifier[nodeid] )
keyword[if] identifier[local_interface] :
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[interface] . identifier[data] . identifier[items] ():
keyword[if] identifier[getattr] ( identifier[local_interface] , identifier[name] )!= identifier[value] :
keyword[if] identifier[ignore_mgmt] keyword[and] identifier[name] keyword[in] identifier[mgmt] :
keyword[pass]
keyword[else] :
identifier[local_interface] [ identifier[name] ]= identifier[value]
identifier[updated] = keyword[True]
keyword[if] literal[string] keyword[in] identifier[name] :
identifier[invalid_routes] . identifier[append] ( identifier[interface] . identifier[nicid] )
keyword[else] :
identifier[current] . identifier[data] . identifier[setdefault] ( literal[string] ,[]). identifier[append] (
{ identifier[interface] . identifier[typeof] : identifier[interface] . identifier[data] })
identifier[updated] = keyword[True]
keyword[return] identifier[updated] , identifier[invalid_routes]
identifier[is_vlan] = identifier[other_interface] . identifier[has_vlan]
keyword[if] identifier[is_vlan] :
identifier[vlan_interfaces] = identifier[self] . identifier[vlan_interface]
keyword[for] identifier[pvlan] keyword[in] identifier[other_interface] . identifier[vlan_interface] :
identifier[current] = identifier[vlan_interfaces] . identifier[get] ( identifier[pvlan] . identifier[vlan_id] )
keyword[if] identifier[current] :
keyword[if] identifier[current] . identifier[_update_interface] ( identifier[pvlan] ):
identifier[updated] = keyword[True]
keyword[else] :
identifier[self] . identifier[data] . identifier[setdefault] ( literal[string] ,[]). identifier[append] ( identifier[pvlan] . identifier[data] )
identifier[updated] = keyword[True]
keyword[continue]
identifier[_updated] , identifier[routes] = identifier[process_interfaces] ( identifier[current] , identifier[pvlan] )
keyword[if] identifier[_updated] : identifier[updated] = keyword[True]
identifier[invalid_routes] . identifier[extend] ( identifier[routes] )
keyword[else] :
identifier[_updated] , identifier[routes] = identifier[process_interfaces] ( identifier[self] , identifier[other_interface] )
keyword[if] identifier[_updated] : identifier[updated] = keyword[True]
identifier[invalid_routes] . identifier[extend] ( identifier[routes] )
identifier[interface] = identifier[self]
keyword[if] identifier[updated] keyword[or] identifier[base_updated] :
identifier[interface] = identifier[self] . identifier[update] ()
keyword[if] identifier[invalid_routes] :
identifier[del_invalid_routes] ( identifier[self] . identifier[_engine] , identifier[invalid_routes] )
keyword[return] identifier[interface] , identifier[base_updated] keyword[or] identifier[updated] | def update_interface(self, other_interface, ignore_mgmt=True):
"""
Update an existing interface by comparing values between two
interfaces. If a VLAN interface is defined in the other interface
and it doesn't exist on the existing interface, it will be created.
:param other_interface Interface: an instance of an
interface where values in this interface will be used to as the
template to determine changes. This only has to provide attributes
that need to change (or not).
:param bool ignore_mgmt: ignore resetting management fields. These are
generally better set after creation using `engine.interface_options`
:raises UpdateElementFailed: Failed to update the element
:return: (Interface, modified, created)
:rtype: tuple
.. note:: Interfaces with multiple IP addresses are ignored
"""
base_updated = self._update_interface(other_interface)
mgmt = ('auth_request', 'backup_heartbeat', 'backup_mgt', 'primary_mgt', 'primary_heartbeat', 'outgoing')
updated = False
invalid_routes = []
def process_interfaces(current, interface):
updated = False
invalid_routes = []
# Ignore interfaces with multiple addresses
if current.has_multiple_addresses:
return (updated, invalid_routes) # depends on [control=['if'], data=[]]
local_interfaces = current.interfaces # Existing interface
for interface in interface.interfaces: # New values
local_interface = None
if not getattr(interface, 'nodeid', None): # CVI
cvi = [itf for itf in local_interfaces if not getattr(itf, 'nodeid', None)]
local_interface = cvi[0] if cvi else None # depends on [control=['if'], data=[]]
else:
local_interface = local_interfaces.get(nodeid=interface.nodeid)
if local_interface: # CVI or NDI sub interfaces
for (name, value) in interface.data.items():
if getattr(local_interface, name) != value:
if ignore_mgmt and name in mgmt:
pass # depends on [control=['if'], data=[]]
else:
local_interface[name] = value
updated = True
if 'network_value' in name: # Only reset routes if network changed
invalid_routes.append(interface.nicid) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
current.data.setdefault('interfaces', []).append({interface.typeof: interface.data})
updated = True # depends on [control=['for'], data=['interface']]
return (updated, invalid_routes)
# Handle VLANs
is_vlan = other_interface.has_vlan
if is_vlan:
vlan_interfaces = self.vlan_interface
for pvlan in other_interface.vlan_interface:
current = vlan_interfaces.get(pvlan.vlan_id)
if current:
# PhysicalVlanInterface, set any parent interface values
if current._update_interface(pvlan):
updated = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Create new interface
self.data.setdefault('vlanInterfaces', []).append(pvlan.data)
updated = True
continue # Skip sub interface check
(_updated, routes) = process_interfaces(current, pvlan)
if _updated:
updated = True # depends on [control=['if'], data=[]]
invalid_routes.extend(routes) # depends on [control=['for'], data=['pvlan']] # depends on [control=['if'], data=[]]
else:
(_updated, routes) = process_interfaces(self, other_interface)
if _updated:
updated = True # depends on [control=['if'], data=[]]
invalid_routes.extend(routes)
interface = self
if updated or base_updated:
interface = self.update()
if invalid_routes: # Interface updated, check the routes
del_invalid_routes(self._engine, invalid_routes) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return (interface, base_updated or updated) |
def relpath_to_modname(relpath):
"""Convert relative path to module name
Within a project, a path to the source file is uniquely identified with a
module name. Relative paths of the form 'foo/bar' are *not* converted to
module names 'foo.bar', because (1) they identify directories, not regular
files, and (2) already 'foo/bar/__init__.py' would claim that conversion.
Args:
relpath (str): Relative path from some location on sys.path
Example:
>>> relpath_to_modname('ballet/util/_util.py')
'ballet.util._util'
"""
# don't try to resolve!
p = pathlib.Path(relpath)
if p.name == '__init__.py':
p = p.parent
elif p.suffix == '.py':
p = p.with_suffix('')
else:
msg = 'Cannot convert a non-python file to a modname'
msg_detail = 'The relpath given is: {}'.format(relpath)
logger.error(msg + '\n' + msg_detail)
raise ValueError(msg)
return '.'.join(p.parts) | def function[relpath_to_modname, parameter[relpath]]:
constant[Convert relative path to module name
Within a project, a path to the source file is uniquely identified with a
module name. Relative paths of the form 'foo/bar' are *not* converted to
module names 'foo.bar', because (1) they identify directories, not regular
files, and (2) already 'foo/bar/__init__.py' would claim that conversion.
Args:
relpath (str): Relative path from some location on sys.path
Example:
>>> relpath_to_modname('ballet/util/_util.py')
'ballet.util._util'
]
variable[p] assign[=] call[name[pathlib].Path, parameter[name[relpath]]]
if compare[name[p].name equal[==] constant[__init__.py]] begin[:]
variable[p] assign[=] name[p].parent
return[call[constant[.].join, parameter[name[p].parts]]] | keyword[def] identifier[relpath_to_modname] ( identifier[relpath] ):
literal[string]
identifier[p] = identifier[pathlib] . identifier[Path] ( identifier[relpath] )
keyword[if] identifier[p] . identifier[name] == literal[string] :
identifier[p] = identifier[p] . identifier[parent]
keyword[elif] identifier[p] . identifier[suffix] == literal[string] :
identifier[p] = identifier[p] . identifier[with_suffix] ( literal[string] )
keyword[else] :
identifier[msg] = literal[string]
identifier[msg_detail] = literal[string] . identifier[format] ( identifier[relpath] )
identifier[logger] . identifier[error] ( identifier[msg] + literal[string] + identifier[msg_detail] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[return] literal[string] . identifier[join] ( identifier[p] . identifier[parts] ) | def relpath_to_modname(relpath):
"""Convert relative path to module name
Within a project, a path to the source file is uniquely identified with a
module name. Relative paths of the form 'foo/bar' are *not* converted to
module names 'foo.bar', because (1) they identify directories, not regular
files, and (2) already 'foo/bar/__init__.py' would claim that conversion.
Args:
relpath (str): Relative path from some location on sys.path
Example:
>>> relpath_to_modname('ballet/util/_util.py')
'ballet.util._util'
"""
# don't try to resolve!
p = pathlib.Path(relpath)
if p.name == '__init__.py':
p = p.parent # depends on [control=['if'], data=[]]
elif p.suffix == '.py':
p = p.with_suffix('') # depends on [control=['if'], data=[]]
else:
msg = 'Cannot convert a non-python file to a modname'
msg_detail = 'The relpath given is: {}'.format(relpath)
logger.error(msg + '\n' + msg_detail)
raise ValueError(msg)
return '.'.join(p.parts) |
def getEmpTraitCorrCoef(self):
"""
Returns the empirical trait correlation matrix
"""
cov = self.getEmpTraitCovar()
stds=SP.sqrt(cov.diagonal())[:,SP.newaxis]
RV = cov/stds/stds.T
return RV | def function[getEmpTraitCorrCoef, parameter[self]]:
constant[
Returns the empirical trait correlation matrix
]
variable[cov] assign[=] call[name[self].getEmpTraitCovar, parameter[]]
variable[stds] assign[=] call[call[name[SP].sqrt, parameter[call[name[cov].diagonal, parameter[]]]]][tuple[[<ast.Slice object at 0x7da18f811ba0>, <ast.Attribute object at 0x7da18f8109a0>]]]
variable[RV] assign[=] binary_operation[binary_operation[name[cov] / name[stds]] / name[stds].T]
return[name[RV]] | keyword[def] identifier[getEmpTraitCorrCoef] ( identifier[self] ):
literal[string]
identifier[cov] = identifier[self] . identifier[getEmpTraitCovar] ()
identifier[stds] = identifier[SP] . identifier[sqrt] ( identifier[cov] . identifier[diagonal] ())[:, identifier[SP] . identifier[newaxis] ]
identifier[RV] = identifier[cov] / identifier[stds] / identifier[stds] . identifier[T]
keyword[return] identifier[RV] | def getEmpTraitCorrCoef(self):
"""
Returns the empirical trait correlation matrix
"""
cov = self.getEmpTraitCovar()
stds = SP.sqrt(cov.diagonal())[:, SP.newaxis]
RV = cov / stds / stds.T
return RV |
def add_var_arg(self, arg):
"""
Add a variable (or macro) argument to the condor job. The argument is
added to the submit file and a different value of the argument can be set
for each node in the DAG.
@param arg: name of option to add.
"""
self.__args.append(arg)
self.__job.add_var_arg(self.__arg_index)
self.__arg_index += 1 | def function[add_var_arg, parameter[self, arg]]:
constant[
Add a variable (or macro) argument to the condor job. The argument is
added to the submit file and a different value of the argument can be set
for each node in the DAG.
@param arg: name of option to add.
]
call[name[self].__args.append, parameter[name[arg]]]
call[name[self].__job.add_var_arg, parameter[name[self].__arg_index]]
<ast.AugAssign object at 0x7da1b0bda140> | keyword[def] identifier[add_var_arg] ( identifier[self] , identifier[arg] ):
literal[string]
identifier[self] . identifier[__args] . identifier[append] ( identifier[arg] )
identifier[self] . identifier[__job] . identifier[add_var_arg] ( identifier[self] . identifier[__arg_index] )
identifier[self] . identifier[__arg_index] += literal[int] | def add_var_arg(self, arg):
"""
Add a variable (or macro) argument to the condor job. The argument is
added to the submit file and a different value of the argument can be set
for each node in the DAG.
@param arg: name of option to add.
"""
self.__args.append(arg)
self.__job.add_var_arg(self.__arg_index)
self.__arg_index += 1 |
def _unique_names():
"""Generates unique sequences of bytes.
"""
characters = ("abcdefghijklmnopqrstuvwxyz"
"0123456789")
characters = [characters[i:i + 1] for i in irange(len(characters))]
rng = random.Random()
while True:
letters = [rng.choice(characters) for i in irange(10)]
yield ''.join(letters) | def function[_unique_names, parameter[]]:
constant[Generates unique sequences of bytes.
]
variable[characters] assign[=] constant[abcdefghijklmnopqrstuvwxyz0123456789]
variable[characters] assign[=] <ast.ListComp object at 0x7da1b10229b0>
variable[rng] assign[=] call[name[random].Random, parameter[]]
while constant[True] begin[:]
variable[letters] assign[=] <ast.ListComp object at 0x7da1b10217e0>
<ast.Yield object at 0x7da1b10226b0> | keyword[def] identifier[_unique_names] ():
literal[string]
identifier[characters] =( literal[string]
literal[string] )
identifier[characters] =[ identifier[characters] [ identifier[i] : identifier[i] + literal[int] ] keyword[for] identifier[i] keyword[in] identifier[irange] ( identifier[len] ( identifier[characters] ))]
identifier[rng] = identifier[random] . identifier[Random] ()
keyword[while] keyword[True] :
identifier[letters] =[ identifier[rng] . identifier[choice] ( identifier[characters] ) keyword[for] identifier[i] keyword[in] identifier[irange] ( literal[int] )]
keyword[yield] literal[string] . identifier[join] ( identifier[letters] ) | def _unique_names():
"""Generates unique sequences of bytes.
"""
characters = 'abcdefghijklmnopqrstuvwxyz0123456789'
characters = [characters[i:i + 1] for i in irange(len(characters))]
rng = random.Random()
while True:
letters = [rng.choice(characters) for i in irange(10)]
yield ''.join(letters) # depends on [control=['while'], data=[]] |
def gaussian(shape, sd=1.0):
"""Sample a multivariate Gaussian pdf, normalised to have unit sum.
Parameters
----------
shape : tuple
Shape of output array.
sd : float, optional (default 1.0)
Standard deviation of Gaussian pdf.
Returns
-------
gc : ndarray
Sampled Gaussian pdf.
"""
gfn = lambda x, sd: np.exp(-(x**2) / (2.0 * sd**2)) / \
(np.sqrt(2.0 * np.pi) *sd)
gc = 1.0
if isinstance(shape, int):
shape = (shape,)
for k, n in enumerate(shape):
x = np.linspace(-3.0, 3.0, n).reshape(
(1,) * k + (n,) + (1,) * (len(shape) - k - 1))
gc = gc * gfn(x, sd)
gc /= np.sum(gc)
return gc | def function[gaussian, parameter[shape, sd]]:
constant[Sample a multivariate Gaussian pdf, normalised to have unit sum.
Parameters
----------
shape : tuple
Shape of output array.
sd : float, optional (default 1.0)
Standard deviation of Gaussian pdf.
Returns
-------
gc : ndarray
Sampled Gaussian pdf.
]
variable[gfn] assign[=] <ast.Lambda object at 0x7da1b0686d70>
variable[gc] assign[=] constant[1.0]
if call[name[isinstance], parameter[name[shape], name[int]]] begin[:]
variable[shape] assign[=] tuple[[<ast.Name object at 0x7da1b0687880>]]
for taget[tuple[[<ast.Name object at 0x7da1b0687550>, <ast.Name object at 0x7da1b0687670>]]] in starred[call[name[enumerate], parameter[name[shape]]]] begin[:]
variable[x] assign[=] call[call[name[np].linspace, parameter[<ast.UnaryOp object at 0x7da1b0685c00>, constant[3.0], name[n]]].reshape, parameter[binary_operation[binary_operation[binary_operation[tuple[[<ast.Constant object at 0x7da1b0695210>]] * name[k]] + tuple[[<ast.Name object at 0x7da1b06952a0>]]] + binary_operation[tuple[[<ast.Constant object at 0x7da1b0695330>]] * binary_operation[binary_operation[call[name[len], parameter[name[shape]]] - name[k]] - constant[1]]]]]]
variable[gc] assign[=] binary_operation[name[gc] * call[name[gfn], parameter[name[x], name[sd]]]]
<ast.AugAssign object at 0x7da1b06d5c90>
return[name[gc]] | keyword[def] identifier[gaussian] ( identifier[shape] , identifier[sd] = literal[int] ):
literal[string]
identifier[gfn] = keyword[lambda] identifier[x] , identifier[sd] : identifier[np] . identifier[exp] (-( identifier[x] ** literal[int] )/( literal[int] * identifier[sd] ** literal[int] ))/( identifier[np] . identifier[sqrt] ( literal[int] * identifier[np] . identifier[pi] )* identifier[sd] )
identifier[gc] = literal[int]
keyword[if] identifier[isinstance] ( identifier[shape] , identifier[int] ):
identifier[shape] =( identifier[shape] ,)
keyword[for] identifier[k] , identifier[n] keyword[in] identifier[enumerate] ( identifier[shape] ):
identifier[x] = identifier[np] . identifier[linspace] (- literal[int] , literal[int] , identifier[n] ). identifier[reshape] (
( literal[int] ,)* identifier[k] +( identifier[n] ,)+( literal[int] ,)*( identifier[len] ( identifier[shape] )- identifier[k] - literal[int] ))
identifier[gc] = identifier[gc] * identifier[gfn] ( identifier[x] , identifier[sd] )
identifier[gc] /= identifier[np] . identifier[sum] ( identifier[gc] )
keyword[return] identifier[gc] | def gaussian(shape, sd=1.0):
"""Sample a multivariate Gaussian pdf, normalised to have unit sum.
Parameters
----------
shape : tuple
Shape of output array.
sd : float, optional (default 1.0)
Standard deviation of Gaussian pdf.
Returns
-------
gc : ndarray
Sampled Gaussian pdf.
"""
gfn = lambda x, sd: np.exp(-x ** 2 / (2.0 * sd ** 2)) / (np.sqrt(2.0 * np.pi) * sd)
gc = 1.0
if isinstance(shape, int):
shape = (shape,) # depends on [control=['if'], data=[]]
for (k, n) in enumerate(shape):
x = np.linspace(-3.0, 3.0, n).reshape((1,) * k + (n,) + (1,) * (len(shape) - k - 1))
gc = gc * gfn(x, sd) # depends on [control=['for'], data=[]]
gc /= np.sum(gc)
return gc |
def get_features_by_ids(self, ids=None, threshold=0.0001, func=np.mean,
get_weights=False):
''' Returns features for which the mean loading across all specified
studies (in ids) is >= threshold. '''
weights = self.data.ix[ids].apply(func, 0)
above_thresh = weights[weights >= threshold]
return above_thresh if get_weights else list(above_thresh.index) | def function[get_features_by_ids, parameter[self, ids, threshold, func, get_weights]]:
constant[ Returns features for which the mean loading across all specified
studies (in ids) is >= threshold. ]
variable[weights] assign[=] call[call[name[self].data.ix][name[ids]].apply, parameter[name[func], constant[0]]]
variable[above_thresh] assign[=] call[name[weights]][compare[name[weights] greater_or_equal[>=] name[threshold]]]
return[<ast.IfExp object at 0x7da20e9548b0>] | keyword[def] identifier[get_features_by_ids] ( identifier[self] , identifier[ids] = keyword[None] , identifier[threshold] = literal[int] , identifier[func] = identifier[np] . identifier[mean] ,
identifier[get_weights] = keyword[False] ):
literal[string]
identifier[weights] = identifier[self] . identifier[data] . identifier[ix] [ identifier[ids] ]. identifier[apply] ( identifier[func] , literal[int] )
identifier[above_thresh] = identifier[weights] [ identifier[weights] >= identifier[threshold] ]
keyword[return] identifier[above_thresh] keyword[if] identifier[get_weights] keyword[else] identifier[list] ( identifier[above_thresh] . identifier[index] ) | def get_features_by_ids(self, ids=None, threshold=0.0001, func=np.mean, get_weights=False):
""" Returns features for which the mean loading across all specified
studies (in ids) is >= threshold. """
weights = self.data.ix[ids].apply(func, 0)
above_thresh = weights[weights >= threshold]
return above_thresh if get_weights else list(above_thresh.index) |
def package(*args: str) -> str:
"""
Creates an absolute path to a file or folder within the cauldron package
using the relative path elements specified by the args.
:param args:
Zero or more relative path elements that describe a file or folder
within the reporting
"""
return clean(os.path.join(os.path.dirname(__file__), '..', *args)) | def function[package, parameter[]]:
constant[
Creates an absolute path to a file or folder within the cauldron package
using the relative path elements specified by the args.
:param args:
Zero or more relative path elements that describe a file or folder
within the reporting
]
return[call[name[clean], parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[..], <ast.Starred object at 0x7da20e954730>]]]]] | keyword[def] identifier[package] (* identifier[args] : identifier[str] )-> identifier[str] :
literal[string]
keyword[return] identifier[clean] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] ,* identifier[args] )) | def package(*args: str) -> str:
"""
Creates an absolute path to a file or folder within the cauldron package
using the relative path elements specified by the args.
:param args:
Zero or more relative path elements that describe a file or folder
within the reporting
"""
return clean(os.path.join(os.path.dirname(__file__), '..', *args)) |
def checksum(self):
"""Grab checksum string
"""
md5sum, md5sum64, = [], []
for line in self.SLACKBUILDS_TXT.splitlines():
if line.startswith(self.line_name):
sbo_name = line[17:].strip()
if line.startswith(self.line_md5_64):
if sbo_name == self.name and line[26:].strip():
md5sum64 = line[26:].strip().split()
if line.startswith(self.line_md5):
if sbo_name == self.name and line[19:].strip():
md5sum = line[19:].strip().split()
return self._select_md5sum_arch(md5sum, md5sum64) | def function[checksum, parameter[self]]:
constant[Grab checksum string
]
<ast.Tuple object at 0x7da204963070> assign[=] tuple[[<ast.List object at 0x7da204963dc0>, <ast.List object at 0x7da2049632e0>]]
for taget[name[line]] in starred[call[name[self].SLACKBUILDS_TXT.splitlines, parameter[]]] begin[:]
if call[name[line].startswith, parameter[name[self].line_name]] begin[:]
variable[sbo_name] assign[=] call[call[name[line]][<ast.Slice object at 0x7da20c6c4c40>].strip, parameter[]]
if call[name[line].startswith, parameter[name[self].line_md5_64]] begin[:]
if <ast.BoolOp object at 0x7da20c6c56f0> begin[:]
variable[md5sum64] assign[=] call[call[call[name[line]][<ast.Slice object at 0x7da20c6c65c0>].strip, parameter[]].split, parameter[]]
if call[name[line].startswith, parameter[name[self].line_md5]] begin[:]
if <ast.BoolOp object at 0x7da20c6c4f70> begin[:]
variable[md5sum] assign[=] call[call[call[name[line]][<ast.Slice object at 0x7da20c6c4f10>].strip, parameter[]].split, parameter[]]
return[call[name[self]._select_md5sum_arch, parameter[name[md5sum], name[md5sum64]]]] | keyword[def] identifier[checksum] ( identifier[self] ):
literal[string]
identifier[md5sum] , identifier[md5sum64] ,=[],[]
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[SLACKBUILDS_TXT] . identifier[splitlines] ():
keyword[if] identifier[line] . identifier[startswith] ( identifier[self] . identifier[line_name] ):
identifier[sbo_name] = identifier[line] [ literal[int] :]. identifier[strip] ()
keyword[if] identifier[line] . identifier[startswith] ( identifier[self] . identifier[line_md5_64] ):
keyword[if] identifier[sbo_name] == identifier[self] . identifier[name] keyword[and] identifier[line] [ literal[int] :]. identifier[strip] ():
identifier[md5sum64] = identifier[line] [ literal[int] :]. identifier[strip] (). identifier[split] ()
keyword[if] identifier[line] . identifier[startswith] ( identifier[self] . identifier[line_md5] ):
keyword[if] identifier[sbo_name] == identifier[self] . identifier[name] keyword[and] identifier[line] [ literal[int] :]. identifier[strip] ():
identifier[md5sum] = identifier[line] [ literal[int] :]. identifier[strip] (). identifier[split] ()
keyword[return] identifier[self] . identifier[_select_md5sum_arch] ( identifier[md5sum] , identifier[md5sum64] ) | def checksum(self):
"""Grab checksum string
"""
(md5sum, md5sum64) = ([], [])
for line in self.SLACKBUILDS_TXT.splitlines():
if line.startswith(self.line_name):
sbo_name = line[17:].strip() # depends on [control=['if'], data=[]]
if line.startswith(self.line_md5_64):
if sbo_name == self.name and line[26:].strip():
md5sum64 = line[26:].strip().split() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if line.startswith(self.line_md5):
if sbo_name == self.name and line[19:].strip():
md5sum = line[19:].strip().split() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return self._select_md5sum_arch(md5sum, md5sum64) |
def select_as_tabledata(
self, table_name, columns=None, where=None, extra=None, type_hints=None
):
"""
Get data in the database and return fetched data as a
:py:class:`tabledata.TableData` instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Table data as a :py:class:`tabledata.TableData` instance.
:rtype: tabledata.TableData
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
.. note::
``pandas`` package required to execute this method.
"""
if columns is None:
columns = self.fetch_attr_names(table_name)
result = self.select(
select=AttrList(columns), table_name=table_name, where=where, extra=extra
)
if result is None:
return TableData(None, [], [])
if type_hints is None:
type_hints = self.fetch_data_types(table_name)
return TableData(
table_name,
columns,
result.fetchall(),
type_hints=[type_hints.get(col) for col in columns],
) | def function[select_as_tabledata, parameter[self, table_name, columns, where, extra, type_hints]]:
constant[
Get data in the database and return fetched data as a
:py:class:`tabledata.TableData` instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Table data as a :py:class:`tabledata.TableData` instance.
:rtype: tabledata.TableData
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
.. note::
``pandas`` package required to execute this method.
]
if compare[name[columns] is constant[None]] begin[:]
variable[columns] assign[=] call[name[self].fetch_attr_names, parameter[name[table_name]]]
variable[result] assign[=] call[name[self].select, parameter[]]
if compare[name[result] is constant[None]] begin[:]
return[call[name[TableData], parameter[constant[None], list[[]], list[[]]]]]
if compare[name[type_hints] is constant[None]] begin[:]
variable[type_hints] assign[=] call[name[self].fetch_data_types, parameter[name[table_name]]]
return[call[name[TableData], parameter[name[table_name], name[columns], call[name[result].fetchall, parameter[]]]]] | keyword[def] identifier[select_as_tabledata] (
identifier[self] , identifier[table_name] , identifier[columns] = keyword[None] , identifier[where] = keyword[None] , identifier[extra] = keyword[None] , identifier[type_hints] = keyword[None]
):
literal[string]
keyword[if] identifier[columns] keyword[is] keyword[None] :
identifier[columns] = identifier[self] . identifier[fetch_attr_names] ( identifier[table_name] )
identifier[result] = identifier[self] . identifier[select] (
identifier[select] = identifier[AttrList] ( identifier[columns] ), identifier[table_name] = identifier[table_name] , identifier[where] = identifier[where] , identifier[extra] = identifier[extra]
)
keyword[if] identifier[result] keyword[is] keyword[None] :
keyword[return] identifier[TableData] ( keyword[None] ,[],[])
keyword[if] identifier[type_hints] keyword[is] keyword[None] :
identifier[type_hints] = identifier[self] . identifier[fetch_data_types] ( identifier[table_name] )
keyword[return] identifier[TableData] (
identifier[table_name] ,
identifier[columns] ,
identifier[result] . identifier[fetchall] (),
identifier[type_hints] =[ identifier[type_hints] . identifier[get] ( identifier[col] ) keyword[for] identifier[col] keyword[in] identifier[columns] ],
) | def select_as_tabledata(self, table_name, columns=None, where=None, extra=None, type_hints=None):
"""
Get data in the database and return fetched data as a
:py:class:`tabledata.TableData` instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Table data as a :py:class:`tabledata.TableData` instance.
:rtype: tabledata.TableData
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
.. note::
``pandas`` package required to execute this method.
"""
if columns is None:
columns = self.fetch_attr_names(table_name) # depends on [control=['if'], data=['columns']]
result = self.select(select=AttrList(columns), table_name=table_name, where=where, extra=extra)
if result is None:
return TableData(None, [], []) # depends on [control=['if'], data=[]]
if type_hints is None:
type_hints = self.fetch_data_types(table_name) # depends on [control=['if'], data=['type_hints']]
return TableData(table_name, columns, result.fetchall(), type_hints=[type_hints.get(col) for col in columns]) |
def start_monitor(redis_address,
stdout_file=None,
stderr_file=None,
autoscaling_config=None,
redis_password=None):
"""Run a process to monitor the other processes.
Args:
redis_address (str): The address that the Redis server is listening on.
stdout_file: A file handle opened for writing to redirect stdout to. If
no redirection should happen, then this should be None.
stderr_file: A file handle opened for writing to redirect stderr to. If
no redirection should happen, then this should be None.
autoscaling_config: path to autoscaling config file.
redis_password (str): The password of the redis server.
Returns:
ProcessInfo for the process that was started.
"""
monitor_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "monitor.py")
command = [
sys.executable, "-u", monitor_path,
"--redis-address=" + str(redis_address)
]
if autoscaling_config:
command.append("--autoscaling-config=" + str(autoscaling_config))
if redis_password:
command.append("--redis-password=" + redis_password)
process_info = start_ray_process(
command,
ray_constants.PROCESS_TYPE_MONITOR,
stdout_file=stdout_file,
stderr_file=stderr_file)
return process_info | def function[start_monitor, parameter[redis_address, stdout_file, stderr_file, autoscaling_config, redis_password]]:
constant[Run a process to monitor the other processes.
Args:
redis_address (str): The address that the Redis server is listening on.
stdout_file: A file handle opened for writing to redirect stdout to. If
no redirection should happen, then this should be None.
stderr_file: A file handle opened for writing to redirect stderr to. If
no redirection should happen, then this should be None.
autoscaling_config: path to autoscaling config file.
redis_password (str): The password of the redis server.
Returns:
ProcessInfo for the process that was started.
]
variable[monitor_path] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[monitor.py]]]
variable[command] assign[=] list[[<ast.Attribute object at 0x7da20c7c8970>, <ast.Constant object at 0x7da20c7c94b0>, <ast.Name object at 0x7da20c7c91e0>, <ast.BinOp object at 0x7da20c7ca620>]]
if name[autoscaling_config] begin[:]
call[name[command].append, parameter[binary_operation[constant[--autoscaling-config=] + call[name[str], parameter[name[autoscaling_config]]]]]]
if name[redis_password] begin[:]
call[name[command].append, parameter[binary_operation[constant[--redis-password=] + name[redis_password]]]]
variable[process_info] assign[=] call[name[start_ray_process], parameter[name[command], name[ray_constants].PROCESS_TYPE_MONITOR]]
return[name[process_info]] | keyword[def] identifier[start_monitor] ( identifier[redis_address] ,
identifier[stdout_file] = keyword[None] ,
identifier[stderr_file] = keyword[None] ,
identifier[autoscaling_config] = keyword[None] ,
identifier[redis_password] = keyword[None] ):
literal[string]
identifier[monitor_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )), literal[string] )
identifier[command] =[
identifier[sys] . identifier[executable] , literal[string] , identifier[monitor_path] ,
literal[string] + identifier[str] ( identifier[redis_address] )
]
keyword[if] identifier[autoscaling_config] :
identifier[command] . identifier[append] ( literal[string] + identifier[str] ( identifier[autoscaling_config] ))
keyword[if] identifier[redis_password] :
identifier[command] . identifier[append] ( literal[string] + identifier[redis_password] )
identifier[process_info] = identifier[start_ray_process] (
identifier[command] ,
identifier[ray_constants] . identifier[PROCESS_TYPE_MONITOR] ,
identifier[stdout_file] = identifier[stdout_file] ,
identifier[stderr_file] = identifier[stderr_file] )
keyword[return] identifier[process_info] | def start_monitor(redis_address, stdout_file=None, stderr_file=None, autoscaling_config=None, redis_password=None):
"""Run a process to monitor the other processes.
Args:
redis_address (str): The address that the Redis server is listening on.
stdout_file: A file handle opened for writing to redirect stdout to. If
no redirection should happen, then this should be None.
stderr_file: A file handle opened for writing to redirect stderr to. If
no redirection should happen, then this should be None.
autoscaling_config: path to autoscaling config file.
redis_password (str): The password of the redis server.
Returns:
ProcessInfo for the process that was started.
"""
monitor_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'monitor.py')
command = [sys.executable, '-u', monitor_path, '--redis-address=' + str(redis_address)]
if autoscaling_config:
command.append('--autoscaling-config=' + str(autoscaling_config)) # depends on [control=['if'], data=[]]
if redis_password:
command.append('--redis-password=' + redis_password) # depends on [control=['if'], data=[]]
process_info = start_ray_process(command, ray_constants.PROCESS_TYPE_MONITOR, stdout_file=stdout_file, stderr_file=stderr_file)
return process_info |
def append_all_agent_batch_to_update_buffer(self, key_list=None, batch_size=None, training_length=None):
"""
Appends the buffer of all agents to the update buffer.
:param key_list: The fields that must be added. If None: all fields will be appended.
:param batch_size: The number of elements that must be appended. If None: All of them will be.
:param training_length: The length of the samples that must be appended. If None: only takes one element.
"""
for agent_id in self.keys():
self.append_update_buffer(agent_id, key_list, batch_size, training_length) | def function[append_all_agent_batch_to_update_buffer, parameter[self, key_list, batch_size, training_length]]:
constant[
Appends the buffer of all agents to the update buffer.
:param key_list: The fields that must be added. If None: all fields will be appended.
:param batch_size: The number of elements that must be appended. If None: All of them will be.
:param training_length: The length of the samples that must be appended. If None: only takes one element.
]
for taget[name[agent_id]] in starred[call[name[self].keys, parameter[]]] begin[:]
call[name[self].append_update_buffer, parameter[name[agent_id], name[key_list], name[batch_size], name[training_length]]] | keyword[def] identifier[append_all_agent_batch_to_update_buffer] ( identifier[self] , identifier[key_list] = keyword[None] , identifier[batch_size] = keyword[None] , identifier[training_length] = keyword[None] ):
literal[string]
keyword[for] identifier[agent_id] keyword[in] identifier[self] . identifier[keys] ():
identifier[self] . identifier[append_update_buffer] ( identifier[agent_id] , identifier[key_list] , identifier[batch_size] , identifier[training_length] ) | def append_all_agent_batch_to_update_buffer(self, key_list=None, batch_size=None, training_length=None):
"""
Appends the buffer of all agents to the update buffer.
:param key_list: The fields that must be added. If None: all fields will be appended.
:param batch_size: The number of elements that must be appended. If None: All of them will be.
:param training_length: The length of the samples that must be appended. If None: only takes one element.
"""
for agent_id in self.keys():
self.append_update_buffer(agent_id, key_list, batch_size, training_length) # depends on [control=['for'], data=['agent_id']] |
def brpop(self, keys, timeout=0):
"""
RPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to RPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely.
"""
if timeout is None:
timeout = 0
keys = list_or_args(keys, None)
keys.append(timeout)
return self.execute_command('BRPOP', *keys) | def function[brpop, parameter[self, keys, timeout]]:
constant[
RPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to RPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely.
]
if compare[name[timeout] is constant[None]] begin[:]
variable[timeout] assign[=] constant[0]
variable[keys] assign[=] call[name[list_or_args], parameter[name[keys], constant[None]]]
call[name[keys].append, parameter[name[timeout]]]
return[call[name[self].execute_command, parameter[constant[BRPOP], <ast.Starred object at 0x7da18dc98f70>]]] | keyword[def] identifier[brpop] ( identifier[self] , identifier[keys] , identifier[timeout] = literal[int] ):
literal[string]
keyword[if] identifier[timeout] keyword[is] keyword[None] :
identifier[timeout] = literal[int]
identifier[keys] = identifier[list_or_args] ( identifier[keys] , keyword[None] )
identifier[keys] . identifier[append] ( identifier[timeout] )
keyword[return] identifier[self] . identifier[execute_command] ( literal[string] ,* identifier[keys] ) | def brpop(self, keys, timeout=0):
"""
RPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to RPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely.
"""
if timeout is None:
timeout = 0 # depends on [control=['if'], data=['timeout']]
keys = list_or_args(keys, None)
keys.append(timeout)
return self.execute_command('BRPOP', *keys) |
def create(self):
"""
Calls various methods sequentially in order to fully build the
database.
"""
# Calls each of these methods in order. _populate_from_lines and
# _update_relations must be implemented in subclasses.
self._init_tables()
self._populate_from_lines(self.iterator)
self._update_relations()
self._finalize() | def function[create, parameter[self]]:
constant[
Calls various methods sequentially in order to fully build the
database.
]
call[name[self]._init_tables, parameter[]]
call[name[self]._populate_from_lines, parameter[name[self].iterator]]
call[name[self]._update_relations, parameter[]]
call[name[self]._finalize, parameter[]] | keyword[def] identifier[create] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_init_tables] ()
identifier[self] . identifier[_populate_from_lines] ( identifier[self] . identifier[iterator] )
identifier[self] . identifier[_update_relations] ()
identifier[self] . identifier[_finalize] () | def create(self):
"""
Calls various methods sequentially in order to fully build the
database.
"""
# Calls each of these methods in order. _populate_from_lines and
# _update_relations must be implemented in subclasses.
self._init_tables()
self._populate_from_lines(self.iterator)
self._update_relations()
self._finalize() |
def plot_heatmap(data, title='Heatmap', show_legend=True,
show_labels=True, label_fmt='.2f',
vmin=None, vmax=None,
figsize=None, label_color='w',
cmap='RdBu', **kwargs):
"""
Plot a heatmap using matplotlib's pcolor.
Args:
* data (DataFrame): DataFrame to plot. Usually small matrix (ex.
correlation matrix).
* title (string): Plot title
* show_legend (bool): Show color legend
* show_labels (bool): Show value labels
* label_fmt (str): Label format string
* vmin (float): Min value for scale
* vmax (float): Max value for scale
* cmap (string): Color map
* kwargs: Passed to matplotlib's pcolor
"""
fig, ax = plt.subplots(figsize=figsize)
heatmap = ax.pcolor(data, vmin=vmin, vmax=vmax, cmap=cmap)
# for some reason heatmap has the y values backwards....
ax.invert_yaxis()
if title is not None:
plt.title(title)
if show_legend:
fig.colorbar(heatmap)
if show_labels:
vals = data.values
for x in range(data.shape[0]):
for y in range(data.shape[1]):
plt.text(x + 0.5, y + 0.5, format(vals[y, x], label_fmt),
horizontalalignment='center',
verticalalignment='center',
color=label_color)
plt.yticks(np.arange(0.5, len(data.index), 1), data.index)
plt.xticks(np.arange(0.5, len(data.columns), 1), data.columns)
return plt | def function[plot_heatmap, parameter[data, title, show_legend, show_labels, label_fmt, vmin, vmax, figsize, label_color, cmap]]:
constant[
Plot a heatmap using matplotlib's pcolor.
Args:
* data (DataFrame): DataFrame to plot. Usually small matrix (ex.
correlation matrix).
* title (string): Plot title
* show_legend (bool): Show color legend
* show_labels (bool): Show value labels
* label_fmt (str): Label format string
* vmin (float): Min value for scale
* vmax (float): Max value for scale
* cmap (string): Color map
* kwargs: Passed to matplotlib's pcolor
]
<ast.Tuple object at 0x7da18f09cfa0> assign[=] call[name[plt].subplots, parameter[]]
variable[heatmap] assign[=] call[name[ax].pcolor, parameter[name[data]]]
call[name[ax].invert_yaxis, parameter[]]
if compare[name[title] is_not constant[None]] begin[:]
call[name[plt].title, parameter[name[title]]]
if name[show_legend] begin[:]
call[name[fig].colorbar, parameter[name[heatmap]]]
if name[show_labels] begin[:]
variable[vals] assign[=] name[data].values
for taget[name[x]] in starred[call[name[range], parameter[call[name[data].shape][constant[0]]]]] begin[:]
for taget[name[y]] in starred[call[name[range], parameter[call[name[data].shape][constant[1]]]]] begin[:]
call[name[plt].text, parameter[binary_operation[name[x] + constant[0.5]], binary_operation[name[y] + constant[0.5]], call[name[format], parameter[call[name[vals]][tuple[[<ast.Name object at 0x7da18f09f910>, <ast.Name object at 0x7da18f09e1d0>]]], name[label_fmt]]]]]
call[name[plt].yticks, parameter[call[name[np].arange, parameter[constant[0.5], call[name[len], parameter[name[data].index]], constant[1]]], name[data].index]]
call[name[plt].xticks, parameter[call[name[np].arange, parameter[constant[0.5], call[name[len], parameter[name[data].columns]], constant[1]]], name[data].columns]]
return[name[plt]] | keyword[def] identifier[plot_heatmap] ( identifier[data] , identifier[title] = literal[string] , identifier[show_legend] = keyword[True] ,
identifier[show_labels] = keyword[True] , identifier[label_fmt] = literal[string] ,
identifier[vmin] = keyword[None] , identifier[vmax] = keyword[None] ,
identifier[figsize] = keyword[None] , identifier[label_color] = literal[string] ,
identifier[cmap] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] = identifier[figsize] )
identifier[heatmap] = identifier[ax] . identifier[pcolor] ( identifier[data] , identifier[vmin] = identifier[vmin] , identifier[vmax] = identifier[vmax] , identifier[cmap] = identifier[cmap] )
identifier[ax] . identifier[invert_yaxis] ()
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[plt] . identifier[title] ( identifier[title] )
keyword[if] identifier[show_legend] :
identifier[fig] . identifier[colorbar] ( identifier[heatmap] )
keyword[if] identifier[show_labels] :
identifier[vals] = identifier[data] . identifier[values]
keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]):
identifier[plt] . identifier[text] ( identifier[x] + literal[int] , identifier[y] + literal[int] , identifier[format] ( identifier[vals] [ identifier[y] , identifier[x] ], identifier[label_fmt] ),
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[color] = identifier[label_color] )
identifier[plt] . identifier[yticks] ( identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[data] . identifier[index] ), literal[int] ), identifier[data] . identifier[index] )
identifier[plt] . identifier[xticks] ( identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[data] . identifier[columns] ), literal[int] ), identifier[data] . identifier[columns] )
keyword[return] identifier[plt] | def plot_heatmap(data, title='Heatmap', show_legend=True, show_labels=True, label_fmt='.2f', vmin=None, vmax=None, figsize=None, label_color='w', cmap='RdBu', **kwargs):
"""
Plot a heatmap using matplotlib's pcolor.
Args:
* data (DataFrame): DataFrame to plot. Usually small matrix (ex.
correlation matrix).
* title (string): Plot title
* show_legend (bool): Show color legend
* show_labels (bool): Show value labels
* label_fmt (str): Label format string
* vmin (float): Min value for scale
* vmax (float): Max value for scale
* cmap (string): Color map
* kwargs: Passed to matplotlib's pcolor
"""
(fig, ax) = plt.subplots(figsize=figsize)
heatmap = ax.pcolor(data, vmin=vmin, vmax=vmax, cmap=cmap)
# for some reason heatmap has the y values backwards....
ax.invert_yaxis()
if title is not None:
plt.title(title) # depends on [control=['if'], data=['title']]
if show_legend:
fig.colorbar(heatmap) # depends on [control=['if'], data=[]]
if show_labels:
vals = data.values
for x in range(data.shape[0]):
for y in range(data.shape[1]):
plt.text(x + 0.5, y + 0.5, format(vals[y, x], label_fmt), horizontalalignment='center', verticalalignment='center', color=label_color) # depends on [control=['for'], data=['y']] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]]
plt.yticks(np.arange(0.5, len(data.index), 1), data.index)
plt.xticks(np.arange(0.5, len(data.columns), 1), data.columns)
return plt |
def send(self, *args):
"""Sends a single raw message to the IRC server.
Arguments are automatically joined by spaces. No newlines are allowed.
"""
msg = " ".join(a.nick if isinstance(a, User) else str(a) for a in args)
if "\n" in msg:
raise ValueError("Cannot send() a newline. Args: %s" % repr(args))
_log.debug("%s <-- %s", self.server.host, msg)
self.socket.send(msg + "\r\n") | def function[send, parameter[self]]:
constant[Sends a single raw message to the IRC server.
Arguments are automatically joined by spaces. No newlines are allowed.
]
variable[msg] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da207f00760>]]
if compare[constant[
] in name[msg]] begin[:]
<ast.Raise object at 0x7da20c7cb100>
call[name[_log].debug, parameter[constant[%s <-- %s], name[self].server.host, name[msg]]]
call[name[self].socket.send, parameter[binary_operation[name[msg] + constant[
]]]] | keyword[def] identifier[send] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[msg] = literal[string] . identifier[join] ( identifier[a] . identifier[nick] keyword[if] identifier[isinstance] ( identifier[a] , identifier[User] ) keyword[else] identifier[str] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[args] )
keyword[if] literal[string] keyword[in] identifier[msg] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[repr] ( identifier[args] ))
identifier[_log] . identifier[debug] ( literal[string] , identifier[self] . identifier[server] . identifier[host] , identifier[msg] )
identifier[self] . identifier[socket] . identifier[send] ( identifier[msg] + literal[string] ) | def send(self, *args):
"""Sends a single raw message to the IRC server.
Arguments are automatically joined by spaces. No newlines are allowed.
"""
msg = ' '.join((a.nick if isinstance(a, User) else str(a) for a in args))
if '\n' in msg:
raise ValueError('Cannot send() a newline. Args: %s' % repr(args)) # depends on [control=['if'], data=[]]
_log.debug('%s <-- %s', self.server.host, msg)
self.socket.send(msg + '\r\n') |
def build_pub_dates(article, pub_dates):
"convert pub_dates into ArticleDate objects and add them to article"
for pub_date in pub_dates:
# always want a date type, take it from pub-type if must
if pub_date.get('date-type'):
date_instance = ea.ArticleDate(pub_date.get('date-type'),
pub_date.get('date'))
elif pub_date.get('pub-type'):
date_instance = ea.ArticleDate(pub_date.get('pub-type'),
pub_date.get('date'))
# Set more values
utils.set_attr_if_value(date_instance, 'pub_type', pub_date.get('pub-type'))
utils.set_attr_if_value(date_instance, 'publication_format',
pub_date.get('publication-format'))
utils.set_attr_if_value(date_instance, 'day', pub_date.get('day'))
utils.set_attr_if_value(date_instance, 'month', pub_date.get('month'))
utils.set_attr_if_value(date_instance, 'year', pub_date.get('year'))
article.add_date(date_instance) | def function[build_pub_dates, parameter[article, pub_dates]]:
constant[convert pub_dates into ArticleDate objects and add them to article]
for taget[name[pub_date]] in starred[name[pub_dates]] begin[:]
if call[name[pub_date].get, parameter[constant[date-type]]] begin[:]
variable[date_instance] assign[=] call[name[ea].ArticleDate, parameter[call[name[pub_date].get, parameter[constant[date-type]]], call[name[pub_date].get, parameter[constant[date]]]]]
call[name[utils].set_attr_if_value, parameter[name[date_instance], constant[pub_type], call[name[pub_date].get, parameter[constant[pub-type]]]]]
call[name[utils].set_attr_if_value, parameter[name[date_instance], constant[publication_format], call[name[pub_date].get, parameter[constant[publication-format]]]]]
call[name[utils].set_attr_if_value, parameter[name[date_instance], constant[day], call[name[pub_date].get, parameter[constant[day]]]]]
call[name[utils].set_attr_if_value, parameter[name[date_instance], constant[month], call[name[pub_date].get, parameter[constant[month]]]]]
call[name[utils].set_attr_if_value, parameter[name[date_instance], constant[year], call[name[pub_date].get, parameter[constant[year]]]]]
call[name[article].add_date, parameter[name[date_instance]]] | keyword[def] identifier[build_pub_dates] ( identifier[article] , identifier[pub_dates] ):
literal[string]
keyword[for] identifier[pub_date] keyword[in] identifier[pub_dates] :
keyword[if] identifier[pub_date] . identifier[get] ( literal[string] ):
identifier[date_instance] = identifier[ea] . identifier[ArticleDate] ( identifier[pub_date] . identifier[get] ( literal[string] ),
identifier[pub_date] . identifier[get] ( literal[string] ))
keyword[elif] identifier[pub_date] . identifier[get] ( literal[string] ):
identifier[date_instance] = identifier[ea] . identifier[ArticleDate] ( identifier[pub_date] . identifier[get] ( literal[string] ),
identifier[pub_date] . identifier[get] ( literal[string] ))
identifier[utils] . identifier[set_attr_if_value] ( identifier[date_instance] , literal[string] , identifier[pub_date] . identifier[get] ( literal[string] ))
identifier[utils] . identifier[set_attr_if_value] ( identifier[date_instance] , literal[string] ,
identifier[pub_date] . identifier[get] ( literal[string] ))
identifier[utils] . identifier[set_attr_if_value] ( identifier[date_instance] , literal[string] , identifier[pub_date] . identifier[get] ( literal[string] ))
identifier[utils] . identifier[set_attr_if_value] ( identifier[date_instance] , literal[string] , identifier[pub_date] . identifier[get] ( literal[string] ))
identifier[utils] . identifier[set_attr_if_value] ( identifier[date_instance] , literal[string] , identifier[pub_date] . identifier[get] ( literal[string] ))
identifier[article] . identifier[add_date] ( identifier[date_instance] ) | def build_pub_dates(article, pub_dates):
"""convert pub_dates into ArticleDate objects and add them to article"""
for pub_date in pub_dates:
# always want a date type, take it from pub-type if must
if pub_date.get('date-type'):
date_instance = ea.ArticleDate(pub_date.get('date-type'), pub_date.get('date')) # depends on [control=['if'], data=[]]
elif pub_date.get('pub-type'):
date_instance = ea.ArticleDate(pub_date.get('pub-type'), pub_date.get('date')) # depends on [control=['if'], data=[]]
# Set more values
utils.set_attr_if_value(date_instance, 'pub_type', pub_date.get('pub-type'))
utils.set_attr_if_value(date_instance, 'publication_format', pub_date.get('publication-format'))
utils.set_attr_if_value(date_instance, 'day', pub_date.get('day'))
utils.set_attr_if_value(date_instance, 'month', pub_date.get('month'))
utils.set_attr_if_value(date_instance, 'year', pub_date.get('year'))
article.add_date(date_instance) # depends on [control=['for'], data=['pub_date']] |
def live_neighbours(self, i, j):
""" Count the number of live neighbours around point (i, j). """
s = 0 # The total number of live neighbours.
# Loop over all the neighbours.
for x in [i - 1, i, i + 1]:
for y in [j - 1, j, j + 1]:
if (x == i and y == j):
continue # Skip the current point itself - we only want to count the neighbours!
if (x != self.N and y != self.N):
s += self.old_grid[x][y]
# The remaining branches handle the case where the neighbour is off the end of the grid.
# In this case, we loop back round such that the grid becomes a "toroidal array".
elif (x == self.N and y != self.N):
s += self.old_grid[0][y]
elif (x != self.N and y == self.N):
s += self.old_grid[x][0]
else:
s += self.old_grid[0][0]
return s | def function[live_neighbours, parameter[self, i, j]]:
constant[ Count the number of live neighbours around point (i, j). ]
variable[s] assign[=] constant[0]
for taget[name[x]] in starred[list[[<ast.BinOp object at 0x7da20c990fd0>, <ast.Name object at 0x7da20c9900d0>, <ast.BinOp object at 0x7da20c992170>]]] begin[:]
for taget[name[y]] in starred[list[[<ast.BinOp object at 0x7da20c993340>, <ast.Name object at 0x7da20c993c40>, <ast.BinOp object at 0x7da20c990c70>]]] begin[:]
if <ast.BoolOp object at 0x7da20c992680> begin[:]
continue
if <ast.BoolOp object at 0x7da20c9921a0> begin[:]
<ast.AugAssign object at 0x7da20c990b50>
return[name[s]] | keyword[def] identifier[live_neighbours] ( identifier[self] , identifier[i] , identifier[j] ):
literal[string]
identifier[s] = literal[int]
keyword[for] identifier[x] keyword[in] [ identifier[i] - literal[int] , identifier[i] , identifier[i] + literal[int] ]:
keyword[for] identifier[y] keyword[in] [ identifier[j] - literal[int] , identifier[j] , identifier[j] + literal[int] ]:
keyword[if] ( identifier[x] == identifier[i] keyword[and] identifier[y] == identifier[j] ):
keyword[continue]
keyword[if] ( identifier[x] != identifier[self] . identifier[N] keyword[and] identifier[y] != identifier[self] . identifier[N] ):
identifier[s] += identifier[self] . identifier[old_grid] [ identifier[x] ][ identifier[y] ]
keyword[elif] ( identifier[x] == identifier[self] . identifier[N] keyword[and] identifier[y] != identifier[self] . identifier[N] ):
identifier[s] += identifier[self] . identifier[old_grid] [ literal[int] ][ identifier[y] ]
keyword[elif] ( identifier[x] != identifier[self] . identifier[N] keyword[and] identifier[y] == identifier[self] . identifier[N] ):
identifier[s] += identifier[self] . identifier[old_grid] [ identifier[x] ][ literal[int] ]
keyword[else] :
identifier[s] += identifier[self] . identifier[old_grid] [ literal[int] ][ literal[int] ]
keyword[return] identifier[s] | def live_neighbours(self, i, j):
""" Count the number of live neighbours around point (i, j). """
s = 0 # The total number of live neighbours.
# Loop over all the neighbours.
for x in [i - 1, i, i + 1]:
for y in [j - 1, j, j + 1]:
if x == i and y == j:
continue # Skip the current point itself - we only want to count the neighbours! # depends on [control=['if'], data=[]]
if x != self.N and y != self.N:
s += self.old_grid[x][y] # depends on [control=['if'], data=[]]
# The remaining branches handle the case where the neighbour is off the end of the grid.
# In this case, we loop back round such that the grid becomes a "toroidal array".
elif x == self.N and y != self.N:
s += self.old_grid[0][y] # depends on [control=['if'], data=[]]
elif x != self.N and y == self.N:
s += self.old_grid[x][0] # depends on [control=['if'], data=[]]
else:
s += self.old_grid[0][0] # depends on [control=['for'], data=['y']] # depends on [control=['for'], data=['x']]
return s |
def mark_data_dirty(self):
""" Called from item to indicate its data or metadata has changed."""
self.__cache.set_cached_value_dirty(self.__display_item, self.__cache_property_name)
self.__initialize_cache()
self.__cached_value_dirty = True | def function[mark_data_dirty, parameter[self]]:
constant[ Called from item to indicate its data or metadata has changed.]
call[name[self].__cache.set_cached_value_dirty, parameter[name[self].__display_item, name[self].__cache_property_name]]
call[name[self].__initialize_cache, parameter[]]
name[self].__cached_value_dirty assign[=] constant[True] | keyword[def] identifier[mark_data_dirty] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__cache] . identifier[set_cached_value_dirty] ( identifier[self] . identifier[__display_item] , identifier[self] . identifier[__cache_property_name] )
identifier[self] . identifier[__initialize_cache] ()
identifier[self] . identifier[__cached_value_dirty] = keyword[True] | def mark_data_dirty(self):
""" Called from item to indicate its data or metadata has changed."""
self.__cache.set_cached_value_dirty(self.__display_item, self.__cache_property_name)
self.__initialize_cache()
self.__cached_value_dirty = True |
def generate_datasets_summary(self, catalog, export_path=None):
"""Genera un informe sobre los datasets presentes en un catálogo,
indicando para cada uno:
- Índice en la lista catalog["dataset"]
- Título
- Identificador
- Cantidad de distribuciones
- Estado de sus metadatos ["OK"|"ERROR"]
Es utilizada por la rutina diaria de `libreria-catalogos` para reportar
sobre los datasets de los catálogos mantenidos.
Args:
catalog (str o dict): Path a un catálogo en cualquier formato,
JSON, XLSX, o diccionario de python.
export_path (str): Path donde exportar el informe generado (en
formato XLSX o CSV). Si se especifica, el método no devolverá
nada.
Returns:
list: Contiene tantos dicts como datasets estén presentes en
`catalogs`, con los datos antes mencionados.
"""
catalog = readers.read_catalog(catalog)
# Trato de leer todos los datasets bien formados de la lista
# catalog["dataset"], si existe.
if "dataset" in catalog and isinstance(catalog["dataset"], list):
datasets = [d if isinstance(d, dict) else {} for d in
catalog["dataset"]]
else:
# Si no, considero que no hay datasets presentes
datasets = []
validation = self.validate_catalog(catalog)["error"]["dataset"]
def info_dataset(index, dataset):
"""Recolecta información básica de un dataset."""
info = OrderedDict()
info["indice"] = index
info["titulo"] = dataset.get("title")
info["identificador"] = dataset.get("identifier")
info["estado_metadatos"] = validation[index]["status"]
info["cant_errores"] = len(validation[index]["errors"])
info["cant_distribuciones"] = len(dataset["distribution"])
return info
summary = [info_dataset(i, ds) for i, ds in enumerate(datasets)]
if export_path:
writers.write_table(summary, export_path)
else:
return summary | def function[generate_datasets_summary, parameter[self, catalog, export_path]]:
constant[Genera un informe sobre los datasets presentes en un catálogo,
indicando para cada uno:
- Índice en la lista catalog["dataset"]
- Título
- Identificador
- Cantidad de distribuciones
- Estado de sus metadatos ["OK"|"ERROR"]
Es utilizada por la rutina diaria de `libreria-catalogos` para reportar
sobre los datasets de los catálogos mantenidos.
Args:
catalog (str o dict): Path a un catálogo en cualquier formato,
JSON, XLSX, o diccionario de python.
export_path (str): Path donde exportar el informe generado (en
formato XLSX o CSV). Si se especifica, el método no devolverá
nada.
Returns:
list: Contiene tantos dicts como datasets estén presentes en
`catalogs`, con los datos antes mencionados.
]
variable[catalog] assign[=] call[name[readers].read_catalog, parameter[name[catalog]]]
if <ast.BoolOp object at 0x7da18f58c6a0> begin[:]
variable[datasets] assign[=] <ast.ListComp object at 0x7da18f58f250>
variable[validation] assign[=] call[call[call[name[self].validate_catalog, parameter[name[catalog]]]][constant[error]]][constant[dataset]]
def function[info_dataset, parameter[index, dataset]]:
constant[Recolecta información básica de un dataset.]
variable[info] assign[=] call[name[OrderedDict], parameter[]]
call[name[info]][constant[indice]] assign[=] name[index]
call[name[info]][constant[titulo]] assign[=] call[name[dataset].get, parameter[constant[title]]]
call[name[info]][constant[identificador]] assign[=] call[name[dataset].get, parameter[constant[identifier]]]
call[name[info]][constant[estado_metadatos]] assign[=] call[call[name[validation]][name[index]]][constant[status]]
call[name[info]][constant[cant_errores]] assign[=] call[name[len], parameter[call[call[name[validation]][name[index]]][constant[errors]]]]
call[name[info]][constant[cant_distribuciones]] assign[=] call[name[len], parameter[call[name[dataset]][constant[distribution]]]]
return[name[info]]
variable[summary] assign[=] <ast.ListComp object at 0x7da1b04fd1b0>
if name[export_path] begin[:]
call[name[writers].write_table, parameter[name[summary], name[export_path]]] | keyword[def] identifier[generate_datasets_summary] ( identifier[self] , identifier[catalog] , identifier[export_path] = keyword[None] ):
literal[string]
identifier[catalog] = identifier[readers] . identifier[read_catalog] ( identifier[catalog] )
keyword[if] literal[string] keyword[in] identifier[catalog] keyword[and] identifier[isinstance] ( identifier[catalog] [ literal[string] ], identifier[list] ):
identifier[datasets] =[ identifier[d] keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] ) keyword[else] {} keyword[for] identifier[d] keyword[in]
identifier[catalog] [ literal[string] ]]
keyword[else] :
identifier[datasets] =[]
identifier[validation] = identifier[self] . identifier[validate_catalog] ( identifier[catalog] )[ literal[string] ][ literal[string] ]
keyword[def] identifier[info_dataset] ( identifier[index] , identifier[dataset] ):
literal[string]
identifier[info] = identifier[OrderedDict] ()
identifier[info] [ literal[string] ]= identifier[index]
identifier[info] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[info] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[info] [ literal[string] ]= identifier[validation] [ identifier[index] ][ literal[string] ]
identifier[info] [ literal[string] ]= identifier[len] ( identifier[validation] [ identifier[index] ][ literal[string] ])
identifier[info] [ literal[string] ]= identifier[len] ( identifier[dataset] [ literal[string] ])
keyword[return] identifier[info]
identifier[summary] =[ identifier[info_dataset] ( identifier[i] , identifier[ds] ) keyword[for] identifier[i] , identifier[ds] keyword[in] identifier[enumerate] ( identifier[datasets] )]
keyword[if] identifier[export_path] :
identifier[writers] . identifier[write_table] ( identifier[summary] , identifier[export_path] )
keyword[else] :
keyword[return] identifier[summary] | def generate_datasets_summary(self, catalog, export_path=None):
"""Genera un informe sobre los datasets presentes en un catálogo,
indicando para cada uno:
- Índice en la lista catalog["dataset"]
- Título
- Identificador
- Cantidad de distribuciones
- Estado de sus metadatos ["OK"|"ERROR"]
Es utilizada por la rutina diaria de `libreria-catalogos` para reportar
sobre los datasets de los catálogos mantenidos.
Args:
catalog (str o dict): Path a un catálogo en cualquier formato,
JSON, XLSX, o diccionario de python.
export_path (str): Path donde exportar el informe generado (en
formato XLSX o CSV). Si se especifica, el método no devolverá
nada.
Returns:
list: Contiene tantos dicts como datasets estén presentes en
`catalogs`, con los datos antes mencionados.
"""
catalog = readers.read_catalog(catalog)
# Trato de leer todos los datasets bien formados de la lista
# catalog["dataset"], si existe.
if 'dataset' in catalog and isinstance(catalog['dataset'], list):
datasets = [d if isinstance(d, dict) else {} for d in catalog['dataset']] # depends on [control=['if'], data=[]]
else:
# Si no, considero que no hay datasets presentes
datasets = []
validation = self.validate_catalog(catalog)['error']['dataset']
def info_dataset(index, dataset):
"""Recolecta información básica de un dataset."""
info = OrderedDict()
info['indice'] = index
info['titulo'] = dataset.get('title')
info['identificador'] = dataset.get('identifier')
info['estado_metadatos'] = validation[index]['status']
info['cant_errores'] = len(validation[index]['errors'])
info['cant_distribuciones'] = len(dataset['distribution'])
return info
summary = [info_dataset(i, ds) for (i, ds) in enumerate(datasets)]
if export_path:
writers.write_table(summary, export_path) # depends on [control=['if'], data=[]]
else:
return summary |
def isgood(name):
""" Whether name should be installed """
if not isbad(name):
if name.endswith('.py') or name.endswith('.json') or name.endswith('.tar'):
return True
return False | def function[isgood, parameter[name]]:
constant[ Whether name should be installed ]
if <ast.UnaryOp object at 0x7da18dc9af80> begin[:]
if <ast.BoolOp object at 0x7da18dc99330> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[isgood] ( identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[isbad] ( identifier[name] ):
keyword[if] identifier[name] . identifier[endswith] ( literal[string] ) keyword[or] identifier[name] . identifier[endswith] ( literal[string] ) keyword[or] identifier[name] . identifier[endswith] ( literal[string] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def isgood(name):
""" Whether name should be installed """
if not isbad(name):
if name.endswith('.py') or name.endswith('.json') or name.endswith('.tar'):
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return False |
def Levenshtein_Distance(w1, w2):
"""
Computes Levenshtein Distance between two words
Args:
:param w1: str
:param w2: str
:return: int
Examples:
>>> Levenshtein.Levenshtein_Distance('noctis', 'noctem')
2
>>> Levenshtein.Levenshtein_Distance('nox', 'nochem')
4
>>> Levenshtein.Levenshtein_Distance('orbis', 'robis')
2
"""
m, n = len(w1), len(w2)
v1 = [i for i in range(n + 1)]
v2 = [0 for i in range(n + 1)]
for i in range(m):
v2[0] = i + 1
for j in range(n):
delCost = v1[j + 1] + 1
insCost = v2[j] + 1
subCost = v1[j]
if w1[i] != w2[j]: subCost += 1
v2[j + 1] = min(delCost, insCost, subCost)
v1, v2 = v2, v1
return v1[-1] | def function[Levenshtein_Distance, parameter[w1, w2]]:
constant[
Computes Levenshtein Distance between two words
Args:
:param w1: str
:param w2: str
:return: int
Examples:
>>> Levenshtein.Levenshtein_Distance('noctis', 'noctem')
2
>>> Levenshtein.Levenshtein_Distance('nox', 'nochem')
4
>>> Levenshtein.Levenshtein_Distance('orbis', 'robis')
2
]
<ast.Tuple object at 0x7da20e9579a0> assign[=] tuple[[<ast.Call object at 0x7da20e955b70>, <ast.Call object at 0x7da20e956b60>]]
variable[v1] assign[=] <ast.ListComp object at 0x7da20e957040>
variable[v2] assign[=] <ast.ListComp object at 0x7da20e955540>
for taget[name[i]] in starred[call[name[range], parameter[name[m]]]] begin[:]
call[name[v2]][constant[0]] assign[=] binary_operation[name[i] + constant[1]]
for taget[name[j]] in starred[call[name[range], parameter[name[n]]]] begin[:]
variable[delCost] assign[=] binary_operation[call[name[v1]][binary_operation[name[j] + constant[1]]] + constant[1]]
variable[insCost] assign[=] binary_operation[call[name[v2]][name[j]] + constant[1]]
variable[subCost] assign[=] call[name[v1]][name[j]]
if compare[call[name[w1]][name[i]] not_equal[!=] call[name[w2]][name[j]]] begin[:]
<ast.AugAssign object at 0x7da20e954e80>
call[name[v2]][binary_operation[name[j] + constant[1]]] assign[=] call[name[min], parameter[name[delCost], name[insCost], name[subCost]]]
<ast.Tuple object at 0x7da20c7c8340> assign[=] tuple[[<ast.Name object at 0x7da20c7c8130>, <ast.Name object at 0x7da20c7c81f0>]]
return[call[name[v1]][<ast.UnaryOp object at 0x7da20c7c9750>]] | keyword[def] identifier[Levenshtein_Distance] ( identifier[w1] , identifier[w2] ):
literal[string]
identifier[m] , identifier[n] = identifier[len] ( identifier[w1] ), identifier[len] ( identifier[w2] )
identifier[v1] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] + literal[int] )]
identifier[v2] =[ literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] + literal[int] )]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[m] ):
identifier[v2] [ literal[int] ]= identifier[i] + literal[int]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[n] ):
identifier[delCost] = identifier[v1] [ identifier[j] + literal[int] ]+ literal[int]
identifier[insCost] = identifier[v2] [ identifier[j] ]+ literal[int]
identifier[subCost] = identifier[v1] [ identifier[j] ]
keyword[if] identifier[w1] [ identifier[i] ]!= identifier[w2] [ identifier[j] ]: identifier[subCost] += literal[int]
identifier[v2] [ identifier[j] + literal[int] ]= identifier[min] ( identifier[delCost] , identifier[insCost] , identifier[subCost] )
identifier[v1] , identifier[v2] = identifier[v2] , identifier[v1]
keyword[return] identifier[v1] [- literal[int] ] | def Levenshtein_Distance(w1, w2):
"""
Computes Levenshtein Distance between two words
Args:
:param w1: str
:param w2: str
:return: int
Examples:
>>> Levenshtein.Levenshtein_Distance('noctis', 'noctem')
2
>>> Levenshtein.Levenshtein_Distance('nox', 'nochem')
4
>>> Levenshtein.Levenshtein_Distance('orbis', 'robis')
2
"""
(m, n) = (len(w1), len(w2))
v1 = [i for i in range(n + 1)]
v2 = [0 for i in range(n + 1)]
for i in range(m):
v2[0] = i + 1
for j in range(n):
delCost = v1[j + 1] + 1
insCost = v2[j] + 1
subCost = v1[j]
if w1[i] != w2[j]:
subCost += 1 # depends on [control=['if'], data=[]]
v2[j + 1] = min(delCost, insCost, subCost) # depends on [control=['for'], data=['j']]
(v1, v2) = (v2, v1) # depends on [control=['for'], data=['i']]
return v1[-1] |
def _normalize_server_url(self, server):
"""
Check if the server URL starts with a HTTP or HTTPS scheme, fall back to http if not present
"""
server = server if server.startswith(("http://", "https://")) else "http://{}".format(server)
return server | def function[_normalize_server_url, parameter[self, server]]:
constant[
Check if the server URL starts with a HTTP or HTTPS scheme, fall back to http if not present
]
variable[server] assign[=] <ast.IfExp object at 0x7da20c7ca5c0>
return[name[server]] | keyword[def] identifier[_normalize_server_url] ( identifier[self] , identifier[server] ):
literal[string]
identifier[server] = identifier[server] keyword[if] identifier[server] . identifier[startswith] (( literal[string] , literal[string] )) keyword[else] literal[string] . identifier[format] ( identifier[server] )
keyword[return] identifier[server] | def _normalize_server_url(self, server):
"""
Check if the server URL starts with a HTTP or HTTPS scheme, fall back to http if not present
"""
server = server if server.startswith(('http://', 'https://')) else 'http://{}'.format(server)
return server |
def get_fields_with_environment_context(self, db_name, table_name, environment_context):
"""
Parameters:
- db_name
- table_name
- environment_context
"""
self.send_get_fields_with_environment_context(db_name, table_name, environment_context)
return self.recv_get_fields_with_environment_context() | def function[get_fields_with_environment_context, parameter[self, db_name, table_name, environment_context]]:
constant[
Parameters:
- db_name
- table_name
- environment_context
]
call[name[self].send_get_fields_with_environment_context, parameter[name[db_name], name[table_name], name[environment_context]]]
return[call[name[self].recv_get_fields_with_environment_context, parameter[]]] | keyword[def] identifier[get_fields_with_environment_context] ( identifier[self] , identifier[db_name] , identifier[table_name] , identifier[environment_context] ):
literal[string]
identifier[self] . identifier[send_get_fields_with_environment_context] ( identifier[db_name] , identifier[table_name] , identifier[environment_context] )
keyword[return] identifier[self] . identifier[recv_get_fields_with_environment_context] () | def get_fields_with_environment_context(self, db_name, table_name, environment_context):
"""
Parameters:
- db_name
- table_name
- environment_context
"""
self.send_get_fields_with_environment_context(db_name, table_name, environment_context)
return self.recv_get_fields_with_environment_context() |
def satisfaction_rating_show(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/satisfaction_ratings#show-satisfaction-rating"
api_path = "/api/v2/satisfaction_ratings/{id}.json"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[satisfaction_rating_show, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/core/satisfaction_ratings#show-satisfaction-rating]
variable[api_path] assign[=] constant[/api/v2/satisfaction_ratings/{id}.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[satisfaction_rating_show] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def satisfaction_rating_show(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/satisfaction_ratings#show-satisfaction-rating"""
api_path = '/api/v2/satisfaction_ratings/{id}.json'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def main(argv=None):
"""Main command line interface."""
if argv is None:
argv = sys.argv[1:]
cli = CommandLineTool()
return cli.run(argv) | def function[main, parameter[argv]]:
constant[Main command line interface.]
if compare[name[argv] is constant[None]] begin[:]
variable[argv] assign[=] call[name[sys].argv][<ast.Slice object at 0x7da18c4cf280>]
variable[cli] assign[=] call[name[CommandLineTool], parameter[]]
return[call[name[cli].run, parameter[name[argv]]]] | keyword[def] identifier[main] ( identifier[argv] = keyword[None] ):
literal[string]
keyword[if] identifier[argv] keyword[is] keyword[None] :
identifier[argv] = identifier[sys] . identifier[argv] [ literal[int] :]
identifier[cli] = identifier[CommandLineTool] ()
keyword[return] identifier[cli] . identifier[run] ( identifier[argv] ) | def main(argv=None):
"""Main command line interface."""
if argv is None:
argv = sys.argv[1:] # depends on [control=['if'], data=['argv']]
cli = CommandLineTool()
return cli.run(argv) |
def _get_set(self, key, operation, create=False):
"""
Get (and maybe create) a set by name.
"""
return self._get_by_type(key, operation, create, b'set', set()) | def function[_get_set, parameter[self, key, operation, create]]:
constant[
Get (and maybe create) a set by name.
]
return[call[name[self]._get_by_type, parameter[name[key], name[operation], name[create], constant[b'set'], call[name[set], parameter[]]]]] | keyword[def] identifier[_get_set] ( identifier[self] , identifier[key] , identifier[operation] , identifier[create] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[_get_by_type] ( identifier[key] , identifier[operation] , identifier[create] , literal[string] , identifier[set] ()) | def _get_set(self, key, operation, create=False):
"""
Get (and maybe create) a set by name.
"""
return self._get_by_type(key, operation, create, b'set', set()) |
def _mcmc_fit(self, scale=1.0, nsims=10000, printer=True, method="M-H",
cov_matrix=None, map_start=True, quiet_progress=False, **kwargs):
""" Performs random walk Metropolis-Hastings
Parameters
----------
scale : float
Default starting scale
nsims : int
Number of simulations
printer : Boolean
Whether to print results or not
method : str
What type of MCMC
cov_matrix: None or np.array
Can optionally provide a covariance matrix for M-H.
"""
scale = 2.38/np.sqrt(self.z_no)
# Get Mode and Inverse Hessian information
if self.model_type in ['GPNARX', 'GPR', 'GP'] or map_start is True:
y = self.fit(method='PML', printer=False)
starting_values = y.z.get_z_values()
# TODO: Bad use of a try/except - remove in future
try:
ses = np.abs(np.diag(y.ihessian))
if len(ses[np.isnan(ses)]) != 0:
ses = np.ones(ses.shape[0])
cov_matrix = np.zeros((len(ses), len(ses)))
np.fill_diagonal(cov_matrix, ses)
except:
pass
else:
starting_values = self.latent_variables.get_z_starting_values()
if method == "M-H":
sampler = MetropolisHastings(self.neg_logposterior, scale, nsims, starting_values,
cov_matrix=cov_matrix, model_object=None, quiet_progress=quiet_progress)
chain, mean_est, median_est, upper_95_est, lower_95_est = sampler.sample()
else:
raise Exception("Method not recognized!")
if len(self.latent_variables.z_list) == 1:
self.latent_variables.set_z_values(mean_est,'M-H',None,chain)
mean_est = self.latent_variables.z_list[0].prior.transform(mean_est)
median_est = self.latent_variables.z_list[0].prior.transform(median_est)
upper_95_est = self.latent_variables.z_list[0].prior.transform(upper_95_est)
lower_95_est = self.latent_variables.z_list[0].prior.transform(lower_95_est)
else:
self.latent_variables.set_z_values(mean_est, 'M-H', None, chain)
for k in range(len(chain)):
mean_est[k] = self.latent_variables.z_list[k].prior.transform(mean_est[k])
median_est[k] = self.latent_variables.z_list[k].prior.transform(median_est[k])
upper_95_est[k] = self.latent_variables.z_list[k].prior.transform(upper_95_est[k])
lower_95_est[k] = self.latent_variables.z_list[k].prior.transform(lower_95_est[k])
self.latent_variables.estimation_method = 'M-H'
theta, Y, scores, states, states_var, X_names = self._categorize_model_output(mean_est)
# Change this in future
try:
latent_variables_store = self.latent_variables.copy()
except:
latent_variables_store = self.latent_variables
return MCMCResults(data_name=self.data_name,X_names=X_names,model_name=self.model_name,
model_type=self.model_type, latent_variables=latent_variables_store,data=Y,index=self.index,
multivariate_model=self.multivariate_model,objective_object=self.neg_logposterior,
method='Metropolis Hastings',samples=chain,mean_est=mean_est,median_est=median_est,lower_95_est=lower_95_est,
upper_95_est=upper_95_est,signal=theta,scores=scores, z_hide=self._z_hide,max_lag=self.max_lag,
states=states,states_var=states_var) | def function[_mcmc_fit, parameter[self, scale, nsims, printer, method, cov_matrix, map_start, quiet_progress]]:
constant[ Performs random walk Metropolis-Hastings
Parameters
----------
scale : float
Default starting scale
nsims : int
Number of simulations
printer : Boolean
Whether to print results or not
method : str
What type of MCMC
cov_matrix: None or np.array
Can optionally provide a covariance matrix for M-H.
]
variable[scale] assign[=] binary_operation[constant[2.38] / call[name[np].sqrt, parameter[name[self].z_no]]]
if <ast.BoolOp object at 0x7da18bc704f0> begin[:]
variable[y] assign[=] call[name[self].fit, parameter[]]
variable[starting_values] assign[=] call[name[y].z.get_z_values, parameter[]]
<ast.Try object at 0x7da18bc72230>
if compare[name[method] equal[==] constant[M-H]] begin[:]
variable[sampler] assign[=] call[name[MetropolisHastings], parameter[name[self].neg_logposterior, name[scale], name[nsims], name[starting_values]]]
<ast.Tuple object at 0x7da18f720bb0> assign[=] call[name[sampler].sample, parameter[]]
if compare[call[name[len], parameter[name[self].latent_variables.z_list]] equal[==] constant[1]] begin[:]
call[name[self].latent_variables.set_z_values, parameter[name[mean_est], constant[M-H], constant[None], name[chain]]]
variable[mean_est] assign[=] call[call[name[self].latent_variables.z_list][constant[0]].prior.transform, parameter[name[mean_est]]]
variable[median_est] assign[=] call[call[name[self].latent_variables.z_list][constant[0]].prior.transform, parameter[name[median_est]]]
variable[upper_95_est] assign[=] call[call[name[self].latent_variables.z_list][constant[0]].prior.transform, parameter[name[upper_95_est]]]
variable[lower_95_est] assign[=] call[call[name[self].latent_variables.z_list][constant[0]].prior.transform, parameter[name[lower_95_est]]]
name[self].latent_variables.estimation_method assign[=] constant[M-H]
<ast.Tuple object at 0x7da2044c1990> assign[=] call[name[self]._categorize_model_output, parameter[name[mean_est]]]
<ast.Try object at 0x7da2044c3a00>
return[call[name[MCMCResults], parameter[]]] | keyword[def] identifier[_mcmc_fit] ( identifier[self] , identifier[scale] = literal[int] , identifier[nsims] = literal[int] , identifier[printer] = keyword[True] , identifier[method] = literal[string] ,
identifier[cov_matrix] = keyword[None] , identifier[map_start] = keyword[True] , identifier[quiet_progress] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[scale] = literal[int] / identifier[np] . identifier[sqrt] ( identifier[self] . identifier[z_no] )
keyword[if] identifier[self] . identifier[model_type] keyword[in] [ literal[string] , literal[string] , literal[string] ] keyword[or] identifier[map_start] keyword[is] keyword[True] :
identifier[y] = identifier[self] . identifier[fit] ( identifier[method] = literal[string] , identifier[printer] = keyword[False] )
identifier[starting_values] = identifier[y] . identifier[z] . identifier[get_z_values] ()
keyword[try] :
identifier[ses] = identifier[np] . identifier[abs] ( identifier[np] . identifier[diag] ( identifier[y] . identifier[ihessian] ))
keyword[if] identifier[len] ( identifier[ses] [ identifier[np] . identifier[isnan] ( identifier[ses] )])!= literal[int] :
identifier[ses] = identifier[np] . identifier[ones] ( identifier[ses] . identifier[shape] [ literal[int] ])
identifier[cov_matrix] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[ses] ), identifier[len] ( identifier[ses] )))
identifier[np] . identifier[fill_diagonal] ( identifier[cov_matrix] , identifier[ses] )
keyword[except] :
keyword[pass]
keyword[else] :
identifier[starting_values] = identifier[self] . identifier[latent_variables] . identifier[get_z_starting_values] ()
keyword[if] identifier[method] == literal[string] :
identifier[sampler] = identifier[MetropolisHastings] ( identifier[self] . identifier[neg_logposterior] , identifier[scale] , identifier[nsims] , identifier[starting_values] ,
identifier[cov_matrix] = identifier[cov_matrix] , identifier[model_object] = keyword[None] , identifier[quiet_progress] = identifier[quiet_progress] )
identifier[chain] , identifier[mean_est] , identifier[median_est] , identifier[upper_95_est] , identifier[lower_95_est] = identifier[sampler] . identifier[sample] ()
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[len] ( identifier[self] . identifier[latent_variables] . identifier[z_list] )== literal[int] :
identifier[self] . identifier[latent_variables] . identifier[set_z_values] ( identifier[mean_est] , literal[string] , keyword[None] , identifier[chain] )
identifier[mean_est] = identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[mean_est] )
identifier[median_est] = identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[median_est] )
identifier[upper_95_est] = identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[upper_95_est] )
identifier[lower_95_est] = identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[lower_95_est] )
keyword[else] :
identifier[self] . identifier[latent_variables] . identifier[set_z_values] ( identifier[mean_est] , literal[string] , keyword[None] , identifier[chain] )
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[chain] )):
identifier[mean_est] [ identifier[k] ]= identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[mean_est] [ identifier[k] ])
identifier[median_est] [ identifier[k] ]= identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[median_est] [ identifier[k] ])
identifier[upper_95_est] [ identifier[k] ]= identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[upper_95_est] [ identifier[k] ])
identifier[lower_95_est] [ identifier[k] ]= identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[lower_95_est] [ identifier[k] ])
identifier[self] . identifier[latent_variables] . identifier[estimation_method] = literal[string]
identifier[theta] , identifier[Y] , identifier[scores] , identifier[states] , identifier[states_var] , identifier[X_names] = identifier[self] . identifier[_categorize_model_output] ( identifier[mean_est] )
keyword[try] :
identifier[latent_variables_store] = identifier[self] . identifier[latent_variables] . identifier[copy] ()
keyword[except] :
identifier[latent_variables_store] = identifier[self] . identifier[latent_variables]
keyword[return] identifier[MCMCResults] ( identifier[data_name] = identifier[self] . identifier[data_name] , identifier[X_names] = identifier[X_names] , identifier[model_name] = identifier[self] . identifier[model_name] ,
identifier[model_type] = identifier[self] . identifier[model_type] , identifier[latent_variables] = identifier[latent_variables_store] , identifier[data] = identifier[Y] , identifier[index] = identifier[self] . identifier[index] ,
identifier[multivariate_model] = identifier[self] . identifier[multivariate_model] , identifier[objective_object] = identifier[self] . identifier[neg_logposterior] ,
identifier[method] = literal[string] , identifier[samples] = identifier[chain] , identifier[mean_est] = identifier[mean_est] , identifier[median_est] = identifier[median_est] , identifier[lower_95_est] = identifier[lower_95_est] ,
identifier[upper_95_est] = identifier[upper_95_est] , identifier[signal] = identifier[theta] , identifier[scores] = identifier[scores] , identifier[z_hide] = identifier[self] . identifier[_z_hide] , identifier[max_lag] = identifier[self] . identifier[max_lag] ,
identifier[states] = identifier[states] , identifier[states_var] = identifier[states_var] ) | def _mcmc_fit(self, scale=1.0, nsims=10000, printer=True, method='M-H', cov_matrix=None, map_start=True, quiet_progress=False, **kwargs):
""" Performs random walk Metropolis-Hastings
Parameters
----------
scale : float
Default starting scale
nsims : int
Number of simulations
printer : Boolean
Whether to print results or not
method : str
What type of MCMC
cov_matrix: None or np.array
Can optionally provide a covariance matrix for M-H.
"""
scale = 2.38 / np.sqrt(self.z_no)
# Get Mode and Inverse Hessian information
if self.model_type in ['GPNARX', 'GPR', 'GP'] or map_start is True:
y = self.fit(method='PML', printer=False)
starting_values = y.z.get_z_values()
# TODO: Bad use of a try/except - remove in future
try:
ses = np.abs(np.diag(y.ihessian))
if len(ses[np.isnan(ses)]) != 0:
ses = np.ones(ses.shape[0]) # depends on [control=['if'], data=[]]
cov_matrix = np.zeros((len(ses), len(ses)))
np.fill_diagonal(cov_matrix, ses) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
starting_values = self.latent_variables.get_z_starting_values()
if method == 'M-H':
sampler = MetropolisHastings(self.neg_logposterior, scale, nsims, starting_values, cov_matrix=cov_matrix, model_object=None, quiet_progress=quiet_progress)
(chain, mean_est, median_est, upper_95_est, lower_95_est) = sampler.sample() # depends on [control=['if'], data=[]]
else:
raise Exception('Method not recognized!')
if len(self.latent_variables.z_list) == 1:
self.latent_variables.set_z_values(mean_est, 'M-H', None, chain)
mean_est = self.latent_variables.z_list[0].prior.transform(mean_est)
median_est = self.latent_variables.z_list[0].prior.transform(median_est)
upper_95_est = self.latent_variables.z_list[0].prior.transform(upper_95_est)
lower_95_est = self.latent_variables.z_list[0].prior.transform(lower_95_est) # depends on [control=['if'], data=[]]
else:
self.latent_variables.set_z_values(mean_est, 'M-H', None, chain)
for k in range(len(chain)):
mean_est[k] = self.latent_variables.z_list[k].prior.transform(mean_est[k])
median_est[k] = self.latent_variables.z_list[k].prior.transform(median_est[k])
upper_95_est[k] = self.latent_variables.z_list[k].prior.transform(upper_95_est[k])
lower_95_est[k] = self.latent_variables.z_list[k].prior.transform(lower_95_est[k]) # depends on [control=['for'], data=['k']]
self.latent_variables.estimation_method = 'M-H'
(theta, Y, scores, states, states_var, X_names) = self._categorize_model_output(mean_est)
# Change this in future
try:
latent_variables_store = self.latent_variables.copy() # depends on [control=['try'], data=[]]
except:
latent_variables_store = self.latent_variables # depends on [control=['except'], data=[]]
return MCMCResults(data_name=self.data_name, X_names=X_names, model_name=self.model_name, model_type=self.model_type, latent_variables=latent_variables_store, data=Y, index=self.index, multivariate_model=self.multivariate_model, objective_object=self.neg_logposterior, method='Metropolis Hastings', samples=chain, mean_est=mean_est, median_est=median_est, lower_95_est=lower_95_est, upper_95_est=upper_95_est, signal=theta, scores=scores, z_hide=self._z_hide, max_lag=self.max_lag, states=states, states_var=states_var) |
def bus_remove_match_action(self, rule, func, user_data, error = None) :
"removes a message filter previously installed with bus_add_match_action."
rulekey = format_rule(rule)
rule = unformat_rule(rule)
self._match_actions[rulekey].actions.remove(_MatchActionEntry._Action(func, user_data))
if len(self._match_actions[rulekey].actions) == 0 :
self.bus_remove_match(rulekey, error) # shouldn’t fail!
del self._match_actions[rulekey]
if len(self._match_actions) == 0 :
self.remove_filter(self._rule_action_match, None) | def function[bus_remove_match_action, parameter[self, rule, func, user_data, error]]:
constant[removes a message filter previously installed with bus_add_match_action.]
variable[rulekey] assign[=] call[name[format_rule], parameter[name[rule]]]
variable[rule] assign[=] call[name[unformat_rule], parameter[name[rule]]]
call[call[name[self]._match_actions][name[rulekey]].actions.remove, parameter[call[name[_MatchActionEntry]._Action, parameter[name[func], name[user_data]]]]]
if compare[call[name[len], parameter[call[name[self]._match_actions][name[rulekey]].actions]] equal[==] constant[0]] begin[:]
call[name[self].bus_remove_match, parameter[name[rulekey], name[error]]]
<ast.Delete object at 0x7da20c992170>
if compare[call[name[len], parameter[name[self]._match_actions]] equal[==] constant[0]] begin[:]
call[name[self].remove_filter, parameter[name[self]._rule_action_match, constant[None]]] | keyword[def] identifier[bus_remove_match_action] ( identifier[self] , identifier[rule] , identifier[func] , identifier[user_data] , identifier[error] = keyword[None] ):
literal[string]
identifier[rulekey] = identifier[format_rule] ( identifier[rule] )
identifier[rule] = identifier[unformat_rule] ( identifier[rule] )
identifier[self] . identifier[_match_actions] [ identifier[rulekey] ]. identifier[actions] . identifier[remove] ( identifier[_MatchActionEntry] . identifier[_Action] ( identifier[func] , identifier[user_data] ))
keyword[if] identifier[len] ( identifier[self] . identifier[_match_actions] [ identifier[rulekey] ]. identifier[actions] )== literal[int] :
identifier[self] . identifier[bus_remove_match] ( identifier[rulekey] , identifier[error] )
keyword[del] identifier[self] . identifier[_match_actions] [ identifier[rulekey] ]
keyword[if] identifier[len] ( identifier[self] . identifier[_match_actions] )== literal[int] :
identifier[self] . identifier[remove_filter] ( identifier[self] . identifier[_rule_action_match] , keyword[None] ) | def bus_remove_match_action(self, rule, func, user_data, error=None):
"""removes a message filter previously installed with bus_add_match_action."""
rulekey = format_rule(rule)
rule = unformat_rule(rule)
self._match_actions[rulekey].actions.remove(_MatchActionEntry._Action(func, user_data))
if len(self._match_actions[rulekey].actions) == 0:
self.bus_remove_match(rulekey, error) # shouldn’t fail!
del self._match_actions[rulekey]
if len(self._match_actions) == 0:
self.remove_filter(self._rule_action_match, None) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def memory_usage(method):
"""Log memory usage before and after a method."""
def wrapper(*args, **kwargs):
logging.info('Memory before method %s is %s.',
method.__name__, runtime.memory_usage().current())
result = method(*args, **kwargs)
logging.info('Memory after method %s is %s',
method.__name__, runtime.memory_usage().current())
return result
return wrapper | def function[memory_usage, parameter[method]]:
constant[Log memory usage before and after a method.]
def function[wrapper, parameter[]]:
call[name[logging].info, parameter[constant[Memory before method %s is %s.], name[method].__name__, call[call[name[runtime].memory_usage, parameter[]].current, parameter[]]]]
variable[result] assign[=] call[name[method], parameter[<ast.Starred object at 0x7da1b0508160>]]
call[name[logging].info, parameter[constant[Memory after method %s is %s], name[method].__name__, call[call[name[runtime].memory_usage, parameter[]].current, parameter[]]]]
return[name[result]]
return[name[wrapper]] | keyword[def] identifier[memory_usage] ( identifier[method] ):
literal[string]
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[logging] . identifier[info] ( literal[string] ,
identifier[method] . identifier[__name__] , identifier[runtime] . identifier[memory_usage] (). identifier[current] ())
identifier[result] = identifier[method] (* identifier[args] ,** identifier[kwargs] )
identifier[logging] . identifier[info] ( literal[string] ,
identifier[method] . identifier[__name__] , identifier[runtime] . identifier[memory_usage] (). identifier[current] ())
keyword[return] identifier[result]
keyword[return] identifier[wrapper] | def memory_usage(method):
"""Log memory usage before and after a method."""
def wrapper(*args, **kwargs):
logging.info('Memory before method %s is %s.', method.__name__, runtime.memory_usage().current())
result = method(*args, **kwargs)
logging.info('Memory after method %s is %s', method.__name__, runtime.memory_usage().current())
return result
return wrapper |
def managed(name, passphrase, entries, force_remove=False):
'''
Create or manage a java keystore.
name
The path to the keystore file
passphrase
The password to the keystore
entries
A list containing an alias, certificate, and optional private_key.
The certificate and private_key can be a file or a string
.. code-block:: yaml
- entries:
- alias: hostname2
certificate: /path/to/cert.crt
private_key: /path/to/key.key
- alias: stringhost
certificate: |
-----BEGIN CERTIFICATE-----
MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG
...
2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0
-----END CERTIFICATE-----
force_remove
If True will cause the state to remove any entries found in the keystore which are not
defined in the state. The default is False.
Example
.. code-block:: yaml
define_keystore:
keystore.managed:
- name: /path/to/keystore
- passphrase: changeit
- force_remove: True
- entries:
- alias: hostname1
certificate: /path/to/cert.crt
- alias: remotehost
certificate: /path/to/cert2.crt
private_key: /path/to/key2.key
- alias: pillarhost
certificate: {{ salt.pillar.get('path:to:cert') }}
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True}
keep_list = []
old_aliases = []
if force_remove:
if os.path.exists(name):
existing_entries = __salt__['keystore.list'](name, passphrase)
for entry in existing_entries:
old_aliases.append(entry.get('alias'))
log.debug("Existing aliases list: %s", old_aliases)
for entry in entries:
update_entry = True
existing_entry = None
if os.path.exists(name):
if force_remove:
keep_list.append(entry['alias'])
existing_entry = __salt__['keystore.list'](name, passphrase, entry['alias'])
if existing_entry:
existing_sha1 = existing_entry[0]['sha1']
new_sha1 = __salt__['x509.read_certificate'](entry['certificate'])['SHA1 Finger Print']
if existing_sha1 == new_sha1:
update_entry = False
if update_entry:
if __opts__['test']:
ret['result'] = None
if existing_entry:
ret['comment'] += "Alias {0} would have been updated\n".format(entry['alias'])
else:
ret['comment'] += "Alias {0} would have been added\n".format(entry['alias'])
else:
if existing_entry:
result = __salt__['keystore.remove'](entry['alias'], name, passphrase)
result = __salt__['keystore.add'](entry['alias'],
name,
passphrase,
entry['certificate'],
private_key=entry.get('private_key', None)
)
if result:
ret['changes'][entry['alias']] = "Updated"
ret['comment'] += "Alias {0} updated.\n".format(entry['alias'])
else:
result = __salt__['keystore.add'](entry['alias'],
name,
passphrase,
entry['certificate'],
private_key=entry.get('private_key', None)
)
if result:
ret['changes'][entry['alias']] = "Added"
ret['comment'] += "Alias {0} added.\n".format(entry['alias'])
if force_remove:
# Determine which aliases need to be removed
remove_list = list(set(old_aliases) - set(keep_list))
log.debug("Will remove: %s", remove_list)
for alias_name in remove_list:
if __opts__['test']:
ret['comment'] += "Alias {0} would have been removed".format(alias_name)
ret['result'] = None
else:
__salt__['keystore.remove'](alias_name, name, passphrase)
ret['changes'][alias_name] = "Removed"
ret['comment'] += "Alias {0} removed.\n".format(alias_name)
if not ret['changes'] and not ret['comment']:
ret['comment'] = "No changes made.\n"
return ret | def function[managed, parameter[name, passphrase, entries, force_remove]]:
constant[
Create or manage a java keystore.
name
The path to the keystore file
passphrase
The password to the keystore
entries
A list containing an alias, certificate, and optional private_key.
The certificate and private_key can be a file or a string
.. code-block:: yaml
- entries:
- alias: hostname2
certificate: /path/to/cert.crt
private_key: /path/to/key.key
- alias: stringhost
certificate: |
-----BEGIN CERTIFICATE-----
MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG
...
2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0
-----END CERTIFICATE-----
force_remove
If True will cause the state to remove any entries found in the keystore which are not
defined in the state. The default is False.
Example
.. code-block:: yaml
define_keystore:
keystore.managed:
- name: /path/to/keystore
- passphrase: changeit
- force_remove: True
- entries:
- alias: hostname1
certificate: /path/to/cert.crt
- alias: remotehost
certificate: /path/to/cert2.crt
private_key: /path/to/key2.key
- alias: pillarhost
certificate: {{ salt.pillar.get('path:to:cert') }}
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2046e30>, <ast.Constant object at 0x7da1b2047400>, <ast.Constant object at 0x7da1b2045600>, <ast.Constant object at 0x7da1b2047730>], [<ast.Dict object at 0x7da1b2044940>, <ast.Constant object at 0x7da1b20466b0>, <ast.Name object at 0x7da1b2046a10>, <ast.Constant object at 0x7da1b2044a90>]]
variable[keep_list] assign[=] list[[]]
variable[old_aliases] assign[=] list[[]]
if name[force_remove] begin[:]
if call[name[os].path.exists, parameter[name[name]]] begin[:]
variable[existing_entries] assign[=] call[call[name[__salt__]][constant[keystore.list]], parameter[name[name], name[passphrase]]]
for taget[name[entry]] in starred[name[existing_entries]] begin[:]
call[name[old_aliases].append, parameter[call[name[entry].get, parameter[constant[alias]]]]]
call[name[log].debug, parameter[constant[Existing aliases list: %s], name[old_aliases]]]
for taget[name[entry]] in starred[name[entries]] begin[:]
variable[update_entry] assign[=] constant[True]
variable[existing_entry] assign[=] constant[None]
if call[name[os].path.exists, parameter[name[name]]] begin[:]
if name[force_remove] begin[:]
call[name[keep_list].append, parameter[call[name[entry]][constant[alias]]]]
variable[existing_entry] assign[=] call[call[name[__salt__]][constant[keystore.list]], parameter[name[name], name[passphrase], call[name[entry]][constant[alias]]]]
if name[existing_entry] begin[:]
variable[existing_sha1] assign[=] call[call[name[existing_entry]][constant[0]]][constant[sha1]]
variable[new_sha1] assign[=] call[call[call[name[__salt__]][constant[x509.read_certificate]], parameter[call[name[entry]][constant[certificate]]]]][constant[SHA1 Finger Print]]
if compare[name[existing_sha1] equal[==] name[new_sha1]] begin[:]
variable[update_entry] assign[=] constant[False]
if name[update_entry] begin[:]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[None]
if name[existing_entry] begin[:]
<ast.AugAssign object at 0x7da1b210c340>
if name[force_remove] begin[:]
variable[remove_list] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[name[old_aliases]]] - call[name[set], parameter[name[keep_list]]]]]]
call[name[log].debug, parameter[constant[Will remove: %s], name[remove_list]]]
for taget[name[alias_name]] in starred[name[remove_list]] begin[:]
if call[name[__opts__]][constant[test]] begin[:]
<ast.AugAssign object at 0x7da1b2012830>
call[name[ret]][constant[result]] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b20138e0> begin[:]
call[name[ret]][constant[comment]] assign[=] constant[No changes made.
]
return[name[ret]] | keyword[def] identifier[managed] ( identifier[name] , identifier[passphrase] , identifier[entries] , identifier[force_remove] = keyword[False] ):
literal[string]
identifier[ret] ={ literal[string] :{},
literal[string] : literal[string] ,
literal[string] : identifier[name] ,
literal[string] : keyword[True] }
identifier[keep_list] =[]
identifier[old_aliases] =[]
keyword[if] identifier[force_remove] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ):
identifier[existing_entries] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[passphrase] )
keyword[for] identifier[entry] keyword[in] identifier[existing_entries] :
identifier[old_aliases] . identifier[append] ( identifier[entry] . identifier[get] ( literal[string] ))
identifier[log] . identifier[debug] ( literal[string] , identifier[old_aliases] )
keyword[for] identifier[entry] keyword[in] identifier[entries] :
identifier[update_entry] = keyword[True]
identifier[existing_entry] = keyword[None]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ):
keyword[if] identifier[force_remove] :
identifier[keep_list] . identifier[append] ( identifier[entry] [ literal[string] ])
identifier[existing_entry] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[passphrase] , identifier[entry] [ literal[string] ])
keyword[if] identifier[existing_entry] :
identifier[existing_sha1] = identifier[existing_entry] [ literal[int] ][ literal[string] ]
identifier[new_sha1] = identifier[__salt__] [ literal[string] ]( identifier[entry] [ literal[string] ])[ literal[string] ]
keyword[if] identifier[existing_sha1] == identifier[new_sha1] :
identifier[update_entry] = keyword[False]
keyword[if] identifier[update_entry] :
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
keyword[if] identifier[existing_entry] :
identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[entry] [ literal[string] ])
keyword[else] :
identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[entry] [ literal[string] ])
keyword[else] :
keyword[if] identifier[existing_entry] :
identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[entry] [ literal[string] ], identifier[name] , identifier[passphrase] )
identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[entry] [ literal[string] ],
identifier[name] ,
identifier[passphrase] ,
identifier[entry] [ literal[string] ],
identifier[private_key] = identifier[entry] . identifier[get] ( literal[string] , keyword[None] )
)
keyword[if] identifier[result] :
identifier[ret] [ literal[string] ][ identifier[entry] [ literal[string] ]]= literal[string]
identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[entry] [ literal[string] ])
keyword[else] :
identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[entry] [ literal[string] ],
identifier[name] ,
identifier[passphrase] ,
identifier[entry] [ literal[string] ],
identifier[private_key] = identifier[entry] . identifier[get] ( literal[string] , keyword[None] )
)
keyword[if] identifier[result] :
identifier[ret] [ literal[string] ][ identifier[entry] [ literal[string] ]]= literal[string]
identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[entry] [ literal[string] ])
keyword[if] identifier[force_remove] :
identifier[remove_list] = identifier[list] ( identifier[set] ( identifier[old_aliases] )- identifier[set] ( identifier[keep_list] ))
identifier[log] . identifier[debug] ( literal[string] , identifier[remove_list] )
keyword[for] identifier[alias_name] keyword[in] identifier[remove_list] :
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[alias_name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[__salt__] [ literal[string] ]( identifier[alias_name] , identifier[name] , identifier[passphrase] )
identifier[ret] [ literal[string] ][ identifier[alias_name] ]= literal[string]
identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[alias_name] )
keyword[if] keyword[not] identifier[ret] [ literal[string] ] keyword[and] keyword[not] identifier[ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret] | def managed(name, passphrase, entries, force_remove=False):
"""
Create or manage a java keystore.
name
The path to the keystore file
passphrase
The password to the keystore
entries
A list containing an alias, certificate, and optional private_key.
The certificate and private_key can be a file or a string
.. code-block:: yaml
- entries:
- alias: hostname2
certificate: /path/to/cert.crt
private_key: /path/to/key.key
- alias: stringhost
certificate: |
-----BEGIN CERTIFICATE-----
MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG
...
2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0
-----END CERTIFICATE-----
force_remove
If True will cause the state to remove any entries found in the keystore which are not
defined in the state. The default is False.
Example
.. code-block:: yaml
define_keystore:
keystore.managed:
- name: /path/to/keystore
- passphrase: changeit
- force_remove: True
- entries:
- alias: hostname1
certificate: /path/to/cert.crt
- alias: remotehost
certificate: /path/to/cert2.crt
private_key: /path/to/key2.key
- alias: pillarhost
certificate: {{ salt.pillar.get('path:to:cert') }}
"""
ret = {'changes': {}, 'comment': '', 'name': name, 'result': True}
keep_list = []
old_aliases = []
if force_remove:
if os.path.exists(name):
existing_entries = __salt__['keystore.list'](name, passphrase)
for entry in existing_entries:
old_aliases.append(entry.get('alias')) # depends on [control=['for'], data=['entry']]
log.debug('Existing aliases list: %s', old_aliases) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
for entry in entries:
update_entry = True
existing_entry = None
if os.path.exists(name):
if force_remove:
keep_list.append(entry['alias']) # depends on [control=['if'], data=[]]
existing_entry = __salt__['keystore.list'](name, passphrase, entry['alias'])
if existing_entry:
existing_sha1 = existing_entry[0]['sha1']
new_sha1 = __salt__['x509.read_certificate'](entry['certificate'])['SHA1 Finger Print']
if existing_sha1 == new_sha1:
update_entry = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if update_entry:
if __opts__['test']:
ret['result'] = None
if existing_entry:
ret['comment'] += 'Alias {0} would have been updated\n'.format(entry['alias']) # depends on [control=['if'], data=[]]
else:
ret['comment'] += 'Alias {0} would have been added\n'.format(entry['alias']) # depends on [control=['if'], data=[]]
elif existing_entry:
result = __salt__['keystore.remove'](entry['alias'], name, passphrase)
result = __salt__['keystore.add'](entry['alias'], name, passphrase, entry['certificate'], private_key=entry.get('private_key', None))
if result:
ret['changes'][entry['alias']] = 'Updated'
ret['comment'] += 'Alias {0} updated.\n'.format(entry['alias']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
result = __salt__['keystore.add'](entry['alias'], name, passphrase, entry['certificate'], private_key=entry.get('private_key', None))
if result:
ret['changes'][entry['alias']] = 'Added'
ret['comment'] += 'Alias {0} added.\n'.format(entry['alias']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']]
if force_remove:
# Determine which aliases need to be removed
remove_list = list(set(old_aliases) - set(keep_list))
log.debug('Will remove: %s', remove_list)
for alias_name in remove_list:
if __opts__['test']:
ret['comment'] += 'Alias {0} would have been removed'.format(alias_name)
ret['result'] = None # depends on [control=['if'], data=[]]
else:
__salt__['keystore.remove'](alias_name, name, passphrase)
ret['changes'][alias_name] = 'Removed'
ret['comment'] += 'Alias {0} removed.\n'.format(alias_name) # depends on [control=['for'], data=['alias_name']] # depends on [control=['if'], data=[]]
if not ret['changes'] and (not ret['comment']):
ret['comment'] = 'No changes made.\n' # depends on [control=['if'], data=[]]
return ret |
def same_guid(post, parameter=DEFAULT_SIMILARITY_TIMESPAN):
'''Skip posts with exactly same GUID.
Parameter: comparison timespan, seconds (int, 0 = inf, default: {0}).'''
from feedjack.models import Post
if isinstance(parameter, types.StringTypes): parameter = int(parameter.strip())
similar = Post.objects.filtered(for_display=False)\
.exclude(id=post.id).filter(guid=post.guid)
if parameter:
similar = similar.filter(date_updated__gt=timezone.now() - timedelta(seconds=parameter))
return not bool(similar.exists()) | def function[same_guid, parameter[post, parameter]]:
constant[Skip posts with exactly same GUID.
Parameter: comparison timespan, seconds (int, 0 = inf, default: {0}).]
from relative_module[feedjack.models] import module[Post]
if call[name[isinstance], parameter[name[parameter], name[types].StringTypes]] begin[:]
variable[parameter] assign[=] call[name[int], parameter[call[name[parameter].strip, parameter[]]]]
variable[similar] assign[=] call[call[call[name[Post].objects.filtered, parameter[]].exclude, parameter[]].filter, parameter[]]
if name[parameter] begin[:]
variable[similar] assign[=] call[name[similar].filter, parameter[]]
return[<ast.UnaryOp object at 0x7da20e960d90>] | keyword[def] identifier[same_guid] ( identifier[post] , identifier[parameter] = identifier[DEFAULT_SIMILARITY_TIMESPAN] ):
literal[string]
keyword[from] identifier[feedjack] . identifier[models] keyword[import] identifier[Post]
keyword[if] identifier[isinstance] ( identifier[parameter] , identifier[types] . identifier[StringTypes] ): identifier[parameter] = identifier[int] ( identifier[parameter] . identifier[strip] ())
identifier[similar] = identifier[Post] . identifier[objects] . identifier[filtered] ( identifier[for_display] = keyword[False] ). identifier[exclude] ( identifier[id] = identifier[post] . identifier[id] ). identifier[filter] ( identifier[guid] = identifier[post] . identifier[guid] )
keyword[if] identifier[parameter] :
identifier[similar] = identifier[similar] . identifier[filter] ( identifier[date_updated__gt] = identifier[timezone] . identifier[now] ()- identifier[timedelta] ( identifier[seconds] = identifier[parameter] ))
keyword[return] keyword[not] identifier[bool] ( identifier[similar] . identifier[exists] ()) | def same_guid(post, parameter=DEFAULT_SIMILARITY_TIMESPAN):
"""Skip posts with exactly same GUID.
Parameter: comparison timespan, seconds (int, 0 = inf, default: {0})."""
from feedjack.models import Post
if isinstance(parameter, types.StringTypes):
parameter = int(parameter.strip()) # depends on [control=['if'], data=[]]
similar = Post.objects.filtered(for_display=False).exclude(id=post.id).filter(guid=post.guid)
if parameter:
similar = similar.filter(date_updated__gt=timezone.now() - timedelta(seconds=parameter)) # depends on [control=['if'], data=[]]
return not bool(similar.exists()) |
def _update_viewport_margins(self):
"""Update viewport margins."""
top = 0
left = 0
right = 0
bottom = 0
for panel in self.panels_for_zone(Panel.Position.LEFT):
if panel.isVisible():
width = panel.sizeHint().width()
left += width
for panel in self.panels_for_zone(Panel.Position.RIGHT):
if panel.isVisible():
width = panel.sizeHint().width()
right += width
for panel in self.panels_for_zone(Panel.Position.TOP):
if panel.isVisible():
height = panel.sizeHint().height()
top += height
for panel in self.panels_for_zone(Panel.Position.BOTTOM):
if panel.isVisible():
height = panel.sizeHint().height()
bottom += height
self._margin_sizes = (top, left, right, bottom)
self.editor.setViewportMargins(left, top, right, bottom) | def function[_update_viewport_margins, parameter[self]]:
constant[Update viewport margins.]
variable[top] assign[=] constant[0]
variable[left] assign[=] constant[0]
variable[right] assign[=] constant[0]
variable[bottom] assign[=] constant[0]
for taget[name[panel]] in starred[call[name[self].panels_for_zone, parameter[name[Panel].Position.LEFT]]] begin[:]
if call[name[panel].isVisible, parameter[]] begin[:]
variable[width] assign[=] call[call[name[panel].sizeHint, parameter[]].width, parameter[]]
<ast.AugAssign object at 0x7da20c76e140>
for taget[name[panel]] in starred[call[name[self].panels_for_zone, parameter[name[Panel].Position.RIGHT]]] begin[:]
if call[name[panel].isVisible, parameter[]] begin[:]
variable[width] assign[=] call[call[name[panel].sizeHint, parameter[]].width, parameter[]]
<ast.AugAssign object at 0x7da20c76f880>
for taget[name[panel]] in starred[call[name[self].panels_for_zone, parameter[name[Panel].Position.TOP]]] begin[:]
if call[name[panel].isVisible, parameter[]] begin[:]
variable[height] assign[=] call[call[name[panel].sizeHint, parameter[]].height, parameter[]]
<ast.AugAssign object at 0x7da20c76c130>
for taget[name[panel]] in starred[call[name[self].panels_for_zone, parameter[name[Panel].Position.BOTTOM]]] begin[:]
if call[name[panel].isVisible, parameter[]] begin[:]
variable[height] assign[=] call[call[name[panel].sizeHint, parameter[]].height, parameter[]]
<ast.AugAssign object at 0x7da20e954a30>
name[self]._margin_sizes assign[=] tuple[[<ast.Name object at 0x7da20e956da0>, <ast.Name object at 0x7da20e957b20>, <ast.Name object at 0x7da20e957880>, <ast.Name object at 0x7da20e956e30>]]
call[name[self].editor.setViewportMargins, parameter[name[left], name[top], name[right], name[bottom]]] | keyword[def] identifier[_update_viewport_margins] ( identifier[self] ):
literal[string]
identifier[top] = literal[int]
identifier[left] = literal[int]
identifier[right] = literal[int]
identifier[bottom] = literal[int]
keyword[for] identifier[panel] keyword[in] identifier[self] . identifier[panels_for_zone] ( identifier[Panel] . identifier[Position] . identifier[LEFT] ):
keyword[if] identifier[panel] . identifier[isVisible] ():
identifier[width] = identifier[panel] . identifier[sizeHint] (). identifier[width] ()
identifier[left] += identifier[width]
keyword[for] identifier[panel] keyword[in] identifier[self] . identifier[panels_for_zone] ( identifier[Panel] . identifier[Position] . identifier[RIGHT] ):
keyword[if] identifier[panel] . identifier[isVisible] ():
identifier[width] = identifier[panel] . identifier[sizeHint] (). identifier[width] ()
identifier[right] += identifier[width]
keyword[for] identifier[panel] keyword[in] identifier[self] . identifier[panels_for_zone] ( identifier[Panel] . identifier[Position] . identifier[TOP] ):
keyword[if] identifier[panel] . identifier[isVisible] ():
identifier[height] = identifier[panel] . identifier[sizeHint] (). identifier[height] ()
identifier[top] += identifier[height]
keyword[for] identifier[panel] keyword[in] identifier[self] . identifier[panels_for_zone] ( identifier[Panel] . identifier[Position] . identifier[BOTTOM] ):
keyword[if] identifier[panel] . identifier[isVisible] ():
identifier[height] = identifier[panel] . identifier[sizeHint] (). identifier[height] ()
identifier[bottom] += identifier[height]
identifier[self] . identifier[_margin_sizes] =( identifier[top] , identifier[left] , identifier[right] , identifier[bottom] )
identifier[self] . identifier[editor] . identifier[setViewportMargins] ( identifier[left] , identifier[top] , identifier[right] , identifier[bottom] ) | def _update_viewport_margins(self):
"""Update viewport margins."""
top = 0
left = 0
right = 0
bottom = 0
for panel in self.panels_for_zone(Panel.Position.LEFT):
if panel.isVisible():
width = panel.sizeHint().width()
left += width # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['panel']]
for panel in self.panels_for_zone(Panel.Position.RIGHT):
if panel.isVisible():
width = panel.sizeHint().width()
right += width # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['panel']]
for panel in self.panels_for_zone(Panel.Position.TOP):
if panel.isVisible():
height = panel.sizeHint().height()
top += height # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['panel']]
for panel in self.panels_for_zone(Panel.Position.BOTTOM):
if panel.isVisible():
height = panel.sizeHint().height()
bottom += height # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['panel']]
self._margin_sizes = (top, left, right, bottom)
self.editor.setViewportMargins(left, top, right, bottom) |
def load_operator(self, operator):
"""|coro|
Loads the players stats for the operator
Parameters
----------
operator : str
the name of the operator
Returns
-------
:class:`Operator`
the operator object found"""
location = yield from self.auth.get_operator_index(operator)
if location is None:
raise ValueError("invalid operator %s" % operator)
operator_key = yield from self.auth.get_operator_statistic(operator)
if operator_key is not None:
operator_key = "," + operator_key
else:
operator_key = ""
data = yield from self.auth.get("https://public-ubiservices.ubi.com/v1/spaces/%s/sandboxes/%s/playerstats2/statistics?populations=%s&statistics=operatorpvp_kills,operatorpvp_death,operatorpvp_roundwon,operatorpvp_roundlost,operatorpvp_meleekills,operatorpvp_totalxp,operatorpvp_headshot,operatorpvp_timeplayed,operatorpvp_dbno%s" % (self.spaceid, self.platform_url, self.id, operator_key))
if not "results" in data or not self.id in data["results"]:
raise InvalidRequest("Missing results key in returned JSON object %s" % str(data))
data = data["results"][self.id]
data = {x.split(":")[0].split("_")[1]: data[x] for x in data if x is not None and location in x}
if operator_key:
data["__statistic_name"] = operator_key.split("_")[1]
#if len(data) < 5:
# raise InvalidRequest("invalid number of results for operator in JSON object %s" % data)
oper = Operator(operator, data)
self.operators[operator] = oper
return oper | def function[load_operator, parameter[self, operator]]:
constant[|coro|
Loads the players stats for the operator
Parameters
----------
operator : str
the name of the operator
Returns
-------
:class:`Operator`
the operator object found]
variable[location] assign[=] <ast.YieldFrom object at 0x7da1b1354d60>
if compare[name[location] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1354370>
variable[operator_key] assign[=] <ast.YieldFrom object at 0x7da1b1354fd0>
if compare[name[operator_key] is_not constant[None]] begin[:]
variable[operator_key] assign[=] binary_operation[constant[,] + name[operator_key]]
variable[data] assign[=] <ast.YieldFrom object at 0x7da1b1354d90>
if <ast.BoolOp object at 0x7da1b1357970> begin[:]
<ast.Raise object at 0x7da1b1354640>
variable[data] assign[=] call[call[name[data]][constant[results]]][name[self].id]
variable[data] assign[=] <ast.DictComp object at 0x7da1b1356aa0>
if name[operator_key] begin[:]
call[name[data]][constant[__statistic_name]] assign[=] call[call[name[operator_key].split, parameter[constant[_]]]][constant[1]]
variable[oper] assign[=] call[name[Operator], parameter[name[operator], name[data]]]
call[name[self].operators][name[operator]] assign[=] name[oper]
return[name[oper]] | keyword[def] identifier[load_operator] ( identifier[self] , identifier[operator] ):
literal[string]
identifier[location] = keyword[yield] keyword[from] identifier[self] . identifier[auth] . identifier[get_operator_index] ( identifier[operator] )
keyword[if] identifier[location] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[operator] )
identifier[operator_key] = keyword[yield] keyword[from] identifier[self] . identifier[auth] . identifier[get_operator_statistic] ( identifier[operator] )
keyword[if] identifier[operator_key] keyword[is] keyword[not] keyword[None] :
identifier[operator_key] = literal[string] + identifier[operator_key]
keyword[else] :
identifier[operator_key] = literal[string]
identifier[data] = keyword[yield] keyword[from] identifier[self] . identifier[auth] . identifier[get] ( literal[string] %( identifier[self] . identifier[spaceid] , identifier[self] . identifier[platform_url] , identifier[self] . identifier[id] , identifier[operator_key] ))
keyword[if] keyword[not] literal[string] keyword[in] identifier[data] keyword[or] keyword[not] identifier[self] . identifier[id] keyword[in] identifier[data] [ literal[string] ]:
keyword[raise] identifier[InvalidRequest] ( literal[string] % identifier[str] ( identifier[data] ))
identifier[data] = identifier[data] [ literal[string] ][ identifier[self] . identifier[id] ]
identifier[data] ={ identifier[x] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]: identifier[data] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[data] keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] keyword[and] identifier[location] keyword[in] identifier[x] }
keyword[if] identifier[operator_key] :
identifier[data] [ literal[string] ]= identifier[operator_key] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[oper] = identifier[Operator] ( identifier[operator] , identifier[data] )
identifier[self] . identifier[operators] [ identifier[operator] ]= identifier[oper]
keyword[return] identifier[oper] | def load_operator(self, operator):
"""|coro|
Loads the players stats for the operator
Parameters
----------
operator : str
the name of the operator
Returns
-------
:class:`Operator`
the operator object found"""
location = (yield from self.auth.get_operator_index(operator))
if location is None:
raise ValueError('invalid operator %s' % operator) # depends on [control=['if'], data=[]]
operator_key = (yield from self.auth.get_operator_statistic(operator))
if operator_key is not None:
operator_key = ',' + operator_key # depends on [control=['if'], data=['operator_key']]
else:
operator_key = ''
data = (yield from self.auth.get('https://public-ubiservices.ubi.com/v1/spaces/%s/sandboxes/%s/playerstats2/statistics?populations=%s&statistics=operatorpvp_kills,operatorpvp_death,operatorpvp_roundwon,operatorpvp_roundlost,operatorpvp_meleekills,operatorpvp_totalxp,operatorpvp_headshot,operatorpvp_timeplayed,operatorpvp_dbno%s' % (self.spaceid, self.platform_url, self.id, operator_key)))
if not 'results' in data or not self.id in data['results']:
raise InvalidRequest('Missing results key in returned JSON object %s' % str(data)) # depends on [control=['if'], data=[]]
data = data['results'][self.id]
data = {x.split(':')[0].split('_')[1]: data[x] for x in data if x is not None and location in x}
if operator_key:
data['__statistic_name'] = operator_key.split('_')[1] # depends on [control=['if'], data=[]]
#if len(data) < 5:
# raise InvalidRequest("invalid number of results for operator in JSON object %s" % data)
oper = Operator(operator, data)
self.operators[operator] = oper
return oper |
def host_info_getter(func, name=None):
"""
The decorated function is added to the process of collecting the host_info.
This just adds the decorated function to the global
``sacred.host_info.host_info_gatherers`` dictionary.
The functions from that dictionary are used when collecting the host info
using :py:func:`~sacred.host_info.get_host_info`.
Parameters
----------
func : callable
A function that can be called without arguments and returns some
json-serializable information.
name : str, optional
The name of the corresponding entry in host_info.
Defaults to the name of the function.
Returns
-------
The function itself.
"""
name = name or func.__name__
host_info_gatherers[name] = func
return func | def function[host_info_getter, parameter[func, name]]:
constant[
The decorated function is added to the process of collecting the host_info.
This just adds the decorated function to the global
``sacred.host_info.host_info_gatherers`` dictionary.
The functions from that dictionary are used when collecting the host info
using :py:func:`~sacred.host_info.get_host_info`.
Parameters
----------
func : callable
A function that can be called without arguments and returns some
json-serializable information.
name : str, optional
The name of the corresponding entry in host_info.
Defaults to the name of the function.
Returns
-------
The function itself.
]
variable[name] assign[=] <ast.BoolOp object at 0x7da1b18955a0>
call[name[host_info_gatherers]][name[name]] assign[=] name[func]
return[name[func]] | keyword[def] identifier[host_info_getter] ( identifier[func] , identifier[name] = keyword[None] ):
literal[string]
identifier[name] = identifier[name] keyword[or] identifier[func] . identifier[__name__]
identifier[host_info_gatherers] [ identifier[name] ]= identifier[func]
keyword[return] identifier[func] | def host_info_getter(func, name=None):
"""
The decorated function is added to the process of collecting the host_info.
This just adds the decorated function to the global
``sacred.host_info.host_info_gatherers`` dictionary.
The functions from that dictionary are used when collecting the host info
using :py:func:`~sacred.host_info.get_host_info`.
Parameters
----------
func : callable
A function that can be called without arguments and returns some
json-serializable information.
name : str, optional
The name of the corresponding entry in host_info.
Defaults to the name of the function.
Returns
-------
The function itself.
"""
name = name or func.__name__
host_info_gatherers[name] = func
return func |
def connect(self, timeout=None):
""" Returns a new :class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer`
Sometimes there is more than one address specified for a server or client.
After trying to connect to all available addresses for this client and assuming
at least one connection was successful, the PlexClient object is built and returned.
Raises:
:class:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
"""
cls = PlexServer if 'server' in self.provides else PlexClient
listargs = [[cls, url, self.token, timeout] for url in self.connections]
log.info('Testing %s device connections..', len(listargs))
results = utils.threaded(_connect, listargs)
return _chooseConnection('Device', self.name, results) | def function[connect, parameter[self, timeout]]:
constant[ Returns a new :class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer`
Sometimes there is more than one address specified for a server or client.
After trying to connect to all available addresses for this client and assuming
at least one connection was successful, the PlexClient object is built and returned.
Raises:
:class:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
]
variable[cls] assign[=] <ast.IfExp object at 0x7da1b0696410>
variable[listargs] assign[=] <ast.ListComp object at 0x7da1b0696200>
call[name[log].info, parameter[constant[Testing %s device connections..], call[name[len], parameter[name[listargs]]]]]
variable[results] assign[=] call[name[utils].threaded, parameter[name[_connect], name[listargs]]]
return[call[name[_chooseConnection], parameter[constant[Device], name[self].name, name[results]]]] | keyword[def] identifier[connect] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[cls] = identifier[PlexServer] keyword[if] literal[string] keyword[in] identifier[self] . identifier[provides] keyword[else] identifier[PlexClient]
identifier[listargs] =[[ identifier[cls] , identifier[url] , identifier[self] . identifier[token] , identifier[timeout] ] keyword[for] identifier[url] keyword[in] identifier[self] . identifier[connections] ]
identifier[log] . identifier[info] ( literal[string] , identifier[len] ( identifier[listargs] ))
identifier[results] = identifier[utils] . identifier[threaded] ( identifier[_connect] , identifier[listargs] )
keyword[return] identifier[_chooseConnection] ( literal[string] , identifier[self] . identifier[name] , identifier[results] ) | def connect(self, timeout=None):
""" Returns a new :class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer`
Sometimes there is more than one address specified for a server or client.
After trying to connect to all available addresses for this client and assuming
at least one connection was successful, the PlexClient object is built and returned.
Raises:
:class:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
"""
cls = PlexServer if 'server' in self.provides else PlexClient
listargs = [[cls, url, self.token, timeout] for url in self.connections]
log.info('Testing %s device connections..', len(listargs))
results = utils.threaded(_connect, listargs)
return _chooseConnection('Device', self.name, results) |
def sort_columns(self, column, key=None, reverse=False):
"""
Sort the DataFrame by one of the columns. The sort modifies the DataFrame inplace. The key and reverse
parameters have the same meaning as for the built-in sort() function.
:param column: column name to use for the sort
:param key: if not None then a function of one argument that is used to extract a comparison key from each
list element
:param reverse: if True then the list elements are sort as if each comparison were reversed.
:return: nothing
"""
if isinstance(column, (list, blist)):
raise TypeError('Can only sort by a single column ')
sort = sorted_list_indexes(self._data[self._columns.index(column)], key, reverse)
# sort index
self._index = blist([self._index[x] for x in sort]) if self._blist else [self._index[x] for x in sort]
# each column
for c in range(len(self._data)):
self._data[c] = blist([self._data[c][i] for i in sort]) if self._blist else [self._data[c][i] for i in sort] | def function[sort_columns, parameter[self, column, key, reverse]]:
constant[
Sort the DataFrame by one of the columns. The sort modifies the DataFrame inplace. The key and reverse
parameters have the same meaning as for the built-in sort() function.
:param column: column name to use for the sort
:param key: if not None then a function of one argument that is used to extract a comparison key from each
list element
:param reverse: if True then the list elements are sort as if each comparison were reversed.
:return: nothing
]
if call[name[isinstance], parameter[name[column], tuple[[<ast.Name object at 0x7da207f02b60>, <ast.Name object at 0x7da207f03c10>]]]] begin[:]
<ast.Raise object at 0x7da207f03340>
variable[sort] assign[=] call[name[sorted_list_indexes], parameter[call[name[self]._data][call[name[self]._columns.index, parameter[name[column]]]], name[key], name[reverse]]]
name[self]._index assign[=] <ast.IfExp object at 0x7da207f03760>
for taget[name[c]] in starred[call[name[range], parameter[call[name[len], parameter[name[self]._data]]]]] begin[:]
call[name[self]._data][name[c]] assign[=] <ast.IfExp object at 0x7da207f01540> | keyword[def] identifier[sort_columns] ( identifier[self] , identifier[column] , identifier[key] = keyword[None] , identifier[reverse] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[column] ,( identifier[list] , identifier[blist] )):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[sort] = identifier[sorted_list_indexes] ( identifier[self] . identifier[_data] [ identifier[self] . identifier[_columns] . identifier[index] ( identifier[column] )], identifier[key] , identifier[reverse] )
identifier[self] . identifier[_index] = identifier[blist] ([ identifier[self] . identifier[_index] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[sort] ]) keyword[if] identifier[self] . identifier[_blist] keyword[else] [ identifier[self] . identifier[_index] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[sort] ]
keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[_data] )):
identifier[self] . identifier[_data] [ identifier[c] ]= identifier[blist] ([ identifier[self] . identifier[_data] [ identifier[c] ][ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[sort] ]) keyword[if] identifier[self] . identifier[_blist] keyword[else] [ identifier[self] . identifier[_data] [ identifier[c] ][ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[sort] ] | def sort_columns(self, column, key=None, reverse=False):
"""
Sort the DataFrame by one of the columns. The sort modifies the DataFrame inplace. The key and reverse
parameters have the same meaning as for the built-in sort() function.
:param column: column name to use for the sort
:param key: if not None then a function of one argument that is used to extract a comparison key from each
list element
:param reverse: if True then the list elements are sort as if each comparison were reversed.
:return: nothing
"""
if isinstance(column, (list, blist)):
raise TypeError('Can only sort by a single column ') # depends on [control=['if'], data=[]]
sort = sorted_list_indexes(self._data[self._columns.index(column)], key, reverse)
# sort index
self._index = blist([self._index[x] for x in sort]) if self._blist else [self._index[x] for x in sort]
# each column
for c in range(len(self._data)):
self._data[c] = blist([self._data[c][i] for i in sort]) if self._blist else [self._data[c][i] for i in sort] # depends on [control=['for'], data=['c']] |
def filter_indices(self, options, verbosity, *args, **kwargs):
"""Filter indices and execute an action for each index."""
index_name_map = {
index.__class__.__name__: index
for index in index_builder.indexes
}
# Process includes.
if options['index']:
indices = set(options['index'])
else:
indices = set(index_name_map.keys())
# Process excludes.
for index_name in options['exclude']:
if index_name not in index_name_map:
self.invalid_index(index_name)
return
indices.discard(index_name)
# Execute action for each remaining index.
for index_name in indices:
try:
index = index_name_map[index_name]
except KeyError:
self.invalid_index(index_name)
return
if verbosity > 0:
self.stdout.write("Processing index '{}'...".format(index_name))
self.handle_index(index, *args, **kwargs) | def function[filter_indices, parameter[self, options, verbosity]]:
constant[Filter indices and execute an action for each index.]
variable[index_name_map] assign[=] <ast.DictComp object at 0x7da1b1a29330>
if call[name[options]][constant[index]] begin[:]
variable[indices] assign[=] call[name[set], parameter[call[name[options]][constant[index]]]]
for taget[name[index_name]] in starred[call[name[options]][constant[exclude]]] begin[:]
if compare[name[index_name] <ast.NotIn object at 0x7da2590d7190> name[index_name_map]] begin[:]
call[name[self].invalid_index, parameter[name[index_name]]]
return[None]
call[name[indices].discard, parameter[name[index_name]]]
for taget[name[index_name]] in starred[name[indices]] begin[:]
<ast.Try object at 0x7da1b1b84670>
if compare[name[verbosity] greater[>] constant[0]] begin[:]
call[name[self].stdout.write, parameter[call[constant[Processing index '{}'...].format, parameter[name[index_name]]]]]
call[name[self].handle_index, parameter[name[index], <ast.Starred object at 0x7da18c4cf1c0>]] | keyword[def] identifier[filter_indices] ( identifier[self] , identifier[options] , identifier[verbosity] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[index_name_map] ={
identifier[index] . identifier[__class__] . identifier[__name__] : identifier[index]
keyword[for] identifier[index] keyword[in] identifier[index_builder] . identifier[indexes]
}
keyword[if] identifier[options] [ literal[string] ]:
identifier[indices] = identifier[set] ( identifier[options] [ literal[string] ])
keyword[else] :
identifier[indices] = identifier[set] ( identifier[index_name_map] . identifier[keys] ())
keyword[for] identifier[index_name] keyword[in] identifier[options] [ literal[string] ]:
keyword[if] identifier[index_name] keyword[not] keyword[in] identifier[index_name_map] :
identifier[self] . identifier[invalid_index] ( identifier[index_name] )
keyword[return]
identifier[indices] . identifier[discard] ( identifier[index_name] )
keyword[for] identifier[index_name] keyword[in] identifier[indices] :
keyword[try] :
identifier[index] = identifier[index_name_map] [ identifier[index_name] ]
keyword[except] identifier[KeyError] :
identifier[self] . identifier[invalid_index] ( identifier[index_name] )
keyword[return]
keyword[if] identifier[verbosity] > literal[int] :
identifier[self] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[index_name] ))
identifier[self] . identifier[handle_index] ( identifier[index] ,* identifier[args] ,** identifier[kwargs] ) | def filter_indices(self, options, verbosity, *args, **kwargs):
"""Filter indices and execute an action for each index."""
index_name_map = {index.__class__.__name__: index for index in index_builder.indexes}
# Process includes.
if options['index']:
indices = set(options['index']) # depends on [control=['if'], data=[]]
else:
indices = set(index_name_map.keys())
# Process excludes.
for index_name in options['exclude']:
if index_name not in index_name_map:
self.invalid_index(index_name)
return # depends on [control=['if'], data=['index_name']]
indices.discard(index_name) # depends on [control=['for'], data=['index_name']]
# Execute action for each remaining index.
for index_name in indices:
try:
index = index_name_map[index_name] # depends on [control=['try'], data=[]]
except KeyError:
self.invalid_index(index_name)
return # depends on [control=['except'], data=[]]
if verbosity > 0:
self.stdout.write("Processing index '{}'...".format(index_name)) # depends on [control=['if'], data=[]]
self.handle_index(index, *args, **kwargs) # depends on [control=['for'], data=['index_name']] |
def recv(self, bfd_pkt):
"""
BFD packet receiver.
"""
LOG.debug("[BFD][%s][RECV] BFD Control received: %s",
hex(self._local_discr), six.binary_type(bfd_pkt))
self._remote_discr = bfd_pkt.my_discr
self._remote_state = bfd_pkt.state
self._remote_demand_mode = bfd_pkt.flags & bfd.BFD_FLAG_DEMAND
if self._remote_min_rx_interval != bfd_pkt.required_min_rx_interval:
self._remote_min_rx_interval = bfd_pkt.required_min_rx_interval
# Update transmit interval (RFC5880 Section 6.8.2.)
self._update_xmit_period()
# TODO: Echo function (RFC5880 Page 35)
if bfd_pkt.flags & bfd.BFD_FLAG_FINAL and self._is_polling:
self._is_polling = False
# Check and update the session state (RFC5880 Page 35)
if self._session_state == bfd.BFD_STATE_ADMIN_DOWN:
return
if bfd_pkt.state == bfd.BFD_STATE_ADMIN_DOWN:
if self._session_state != bfd.BFD_STATE_DOWN:
self._set_state(bfd.BFD_STATE_DOWN,
bfd.BFD_DIAG_NEIG_SIG_SESS_DOWN)
else:
if self._session_state == bfd.BFD_STATE_DOWN:
if bfd_pkt.state == bfd.BFD_STATE_DOWN:
self._set_state(bfd.BFD_STATE_INIT)
elif bfd_pkt.state == bfd.BFD_STATE_INIT:
self._set_state(bfd.BFD_STATE_UP)
elif self._session_state == bfd.BFD_STATE_INIT:
if bfd_pkt.state in [bfd.BFD_STATE_INIT, bfd.BFD_STATE_UP]:
self._set_state(bfd.BFD_STATE_UP)
else:
if bfd_pkt.state == bfd.BFD_STATE_DOWN:
self._set_state(bfd.BFD_STATE_DOWN,
bfd.BFD_DIAG_NEIG_SIG_SESS_DOWN)
# TODO: Demand mode support.
if self._remote_demand_mode and \
self._session_state == bfd.BFD_STATE_UP and \
self._remote_session_state == bfd.BFD_STATE_UP:
self._enable_send = False
if not self._remote_demand_mode or \
self._session_state != bfd.BFD_STATE_UP or \
self._remote_session_state != bfd.BFD_STATE_UP:
if not self._enable_send:
self._enable_send = True
hub.spawn(self._send_loop)
# Update the detection time (RFC5880 Section 6.8.4.)
if self._detect_time == 0:
self._detect_time = bfd_pkt.desired_min_tx_interval * \
bfd_pkt.detect_mult / 1000000.0
# Start the timeout loop.
hub.spawn(self._recv_timeout_loop)
if bfd_pkt.flags & bfd.BFD_FLAG_POLL:
self._pending_final = True
self._detect_time = bfd_pkt.desired_min_tx_interval * \
bfd_pkt.detect_mult / 1000000.0
# Update the remote authentication sequence number.
if self._auth_type in [bfd.BFD_AUTH_KEYED_MD5,
bfd.BFD_AUTH_METICULOUS_KEYED_MD5,
bfd.BFD_AUTH_KEYED_SHA1,
bfd.BFD_AUTH_METICULOUS_KEYED_SHA1]:
self._rcv_auth_seq = bfd_pkt.auth_cls.seq
self._auth_seq_known = 1
# Set the lock.
if self._lock is not None:
self._lock.set() | def function[recv, parameter[self, bfd_pkt]]:
constant[
BFD packet receiver.
]
call[name[LOG].debug, parameter[constant[[BFD][%s][RECV] BFD Control received: %s], call[name[hex], parameter[name[self]._local_discr]], call[name[six].binary_type, parameter[name[bfd_pkt]]]]]
name[self]._remote_discr assign[=] name[bfd_pkt].my_discr
name[self]._remote_state assign[=] name[bfd_pkt].state
name[self]._remote_demand_mode assign[=] binary_operation[name[bfd_pkt].flags <ast.BitAnd object at 0x7da2590d6b60> name[bfd].BFD_FLAG_DEMAND]
if compare[name[self]._remote_min_rx_interval not_equal[!=] name[bfd_pkt].required_min_rx_interval] begin[:]
name[self]._remote_min_rx_interval assign[=] name[bfd_pkt].required_min_rx_interval
call[name[self]._update_xmit_period, parameter[]]
if <ast.BoolOp object at 0x7da1b1a20730> begin[:]
name[self]._is_polling assign[=] constant[False]
if compare[name[self]._session_state equal[==] name[bfd].BFD_STATE_ADMIN_DOWN] begin[:]
return[None]
if compare[name[bfd_pkt].state equal[==] name[bfd].BFD_STATE_ADMIN_DOWN] begin[:]
if compare[name[self]._session_state not_equal[!=] name[bfd].BFD_STATE_DOWN] begin[:]
call[name[self]._set_state, parameter[name[bfd].BFD_STATE_DOWN, name[bfd].BFD_DIAG_NEIG_SIG_SESS_DOWN]]
if <ast.BoolOp object at 0x7da1b1a22ec0> begin[:]
name[self]._enable_send assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b1a21060> begin[:]
if <ast.UnaryOp object at 0x7da1b1a21ea0> begin[:]
name[self]._enable_send assign[=] constant[True]
call[name[hub].spawn, parameter[name[self]._send_loop]]
if compare[name[self]._detect_time equal[==] constant[0]] begin[:]
name[self]._detect_time assign[=] binary_operation[binary_operation[name[bfd_pkt].desired_min_tx_interval * name[bfd_pkt].detect_mult] / constant[1000000.0]]
call[name[hub].spawn, parameter[name[self]._recv_timeout_loop]]
if binary_operation[name[bfd_pkt].flags <ast.BitAnd object at 0x7da2590d6b60> name[bfd].BFD_FLAG_POLL] begin[:]
name[self]._pending_final assign[=] constant[True]
name[self]._detect_time assign[=] binary_operation[binary_operation[name[bfd_pkt].desired_min_tx_interval * name[bfd_pkt].detect_mult] / constant[1000000.0]]
if compare[name[self]._auth_type in list[[<ast.Attribute object at 0x7da1b1a3d6c0>, <ast.Attribute object at 0x7da1b1a3d570>, <ast.Attribute object at 0x7da1b1a3d3c0>, <ast.Attribute object at 0x7da1b1a3d090>]]] begin[:]
name[self]._rcv_auth_seq assign[=] name[bfd_pkt].auth_cls.seq
name[self]._auth_seq_known assign[=] constant[1]
if compare[name[self]._lock is_not constant[None]] begin[:]
call[name[self]._lock.set, parameter[]] | keyword[def] identifier[recv] ( identifier[self] , identifier[bfd_pkt] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] ,
identifier[hex] ( identifier[self] . identifier[_local_discr] ), identifier[six] . identifier[binary_type] ( identifier[bfd_pkt] ))
identifier[self] . identifier[_remote_discr] = identifier[bfd_pkt] . identifier[my_discr]
identifier[self] . identifier[_remote_state] = identifier[bfd_pkt] . identifier[state]
identifier[self] . identifier[_remote_demand_mode] = identifier[bfd_pkt] . identifier[flags] & identifier[bfd] . identifier[BFD_FLAG_DEMAND]
keyword[if] identifier[self] . identifier[_remote_min_rx_interval] != identifier[bfd_pkt] . identifier[required_min_rx_interval] :
identifier[self] . identifier[_remote_min_rx_interval] = identifier[bfd_pkt] . identifier[required_min_rx_interval]
identifier[self] . identifier[_update_xmit_period] ()
keyword[if] identifier[bfd_pkt] . identifier[flags] & identifier[bfd] . identifier[BFD_FLAG_FINAL] keyword[and] identifier[self] . identifier[_is_polling] :
identifier[self] . identifier[_is_polling] = keyword[False]
keyword[if] identifier[self] . identifier[_session_state] == identifier[bfd] . identifier[BFD_STATE_ADMIN_DOWN] :
keyword[return]
keyword[if] identifier[bfd_pkt] . identifier[state] == identifier[bfd] . identifier[BFD_STATE_ADMIN_DOWN] :
keyword[if] identifier[self] . identifier[_session_state] != identifier[bfd] . identifier[BFD_STATE_DOWN] :
identifier[self] . identifier[_set_state] ( identifier[bfd] . identifier[BFD_STATE_DOWN] ,
identifier[bfd] . identifier[BFD_DIAG_NEIG_SIG_SESS_DOWN] )
keyword[else] :
keyword[if] identifier[self] . identifier[_session_state] == identifier[bfd] . identifier[BFD_STATE_DOWN] :
keyword[if] identifier[bfd_pkt] . identifier[state] == identifier[bfd] . identifier[BFD_STATE_DOWN] :
identifier[self] . identifier[_set_state] ( identifier[bfd] . identifier[BFD_STATE_INIT] )
keyword[elif] identifier[bfd_pkt] . identifier[state] == identifier[bfd] . identifier[BFD_STATE_INIT] :
identifier[self] . identifier[_set_state] ( identifier[bfd] . identifier[BFD_STATE_UP] )
keyword[elif] identifier[self] . identifier[_session_state] == identifier[bfd] . identifier[BFD_STATE_INIT] :
keyword[if] identifier[bfd_pkt] . identifier[state] keyword[in] [ identifier[bfd] . identifier[BFD_STATE_INIT] , identifier[bfd] . identifier[BFD_STATE_UP] ]:
identifier[self] . identifier[_set_state] ( identifier[bfd] . identifier[BFD_STATE_UP] )
keyword[else] :
keyword[if] identifier[bfd_pkt] . identifier[state] == identifier[bfd] . identifier[BFD_STATE_DOWN] :
identifier[self] . identifier[_set_state] ( identifier[bfd] . identifier[BFD_STATE_DOWN] ,
identifier[bfd] . identifier[BFD_DIAG_NEIG_SIG_SESS_DOWN] )
keyword[if] identifier[self] . identifier[_remote_demand_mode] keyword[and] identifier[self] . identifier[_session_state] == identifier[bfd] . identifier[BFD_STATE_UP] keyword[and] identifier[self] . identifier[_remote_session_state] == identifier[bfd] . identifier[BFD_STATE_UP] :
identifier[self] . identifier[_enable_send] = keyword[False]
keyword[if] keyword[not] identifier[self] . identifier[_remote_demand_mode] keyword[or] identifier[self] . identifier[_session_state] != identifier[bfd] . identifier[BFD_STATE_UP] keyword[or] identifier[self] . identifier[_remote_session_state] != identifier[bfd] . identifier[BFD_STATE_UP] :
keyword[if] keyword[not] identifier[self] . identifier[_enable_send] :
identifier[self] . identifier[_enable_send] = keyword[True]
identifier[hub] . identifier[spawn] ( identifier[self] . identifier[_send_loop] )
keyword[if] identifier[self] . identifier[_detect_time] == literal[int] :
identifier[self] . identifier[_detect_time] = identifier[bfd_pkt] . identifier[desired_min_tx_interval] * identifier[bfd_pkt] . identifier[detect_mult] / literal[int]
identifier[hub] . identifier[spawn] ( identifier[self] . identifier[_recv_timeout_loop] )
keyword[if] identifier[bfd_pkt] . identifier[flags] & identifier[bfd] . identifier[BFD_FLAG_POLL] :
identifier[self] . identifier[_pending_final] = keyword[True]
identifier[self] . identifier[_detect_time] = identifier[bfd_pkt] . identifier[desired_min_tx_interval] * identifier[bfd_pkt] . identifier[detect_mult] / literal[int]
keyword[if] identifier[self] . identifier[_auth_type] keyword[in] [ identifier[bfd] . identifier[BFD_AUTH_KEYED_MD5] ,
identifier[bfd] . identifier[BFD_AUTH_METICULOUS_KEYED_MD5] ,
identifier[bfd] . identifier[BFD_AUTH_KEYED_SHA1] ,
identifier[bfd] . identifier[BFD_AUTH_METICULOUS_KEYED_SHA1] ]:
identifier[self] . identifier[_rcv_auth_seq] = identifier[bfd_pkt] . identifier[auth_cls] . identifier[seq]
identifier[self] . identifier[_auth_seq_known] = literal[int]
keyword[if] identifier[self] . identifier[_lock] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_lock] . identifier[set] () | def recv(self, bfd_pkt):
"""
BFD packet receiver.
"""
LOG.debug('[BFD][%s][RECV] BFD Control received: %s', hex(self._local_discr), six.binary_type(bfd_pkt))
self._remote_discr = bfd_pkt.my_discr
self._remote_state = bfd_pkt.state
self._remote_demand_mode = bfd_pkt.flags & bfd.BFD_FLAG_DEMAND
if self._remote_min_rx_interval != bfd_pkt.required_min_rx_interval:
self._remote_min_rx_interval = bfd_pkt.required_min_rx_interval
# Update transmit interval (RFC5880 Section 6.8.2.)
self._update_xmit_period() # depends on [control=['if'], data=[]]
# TODO: Echo function (RFC5880 Page 35)
if bfd_pkt.flags & bfd.BFD_FLAG_FINAL and self._is_polling:
self._is_polling = False # depends on [control=['if'], data=[]]
# Check and update the session state (RFC5880 Page 35)
if self._session_state == bfd.BFD_STATE_ADMIN_DOWN:
return # depends on [control=['if'], data=[]]
if bfd_pkt.state == bfd.BFD_STATE_ADMIN_DOWN:
if self._session_state != bfd.BFD_STATE_DOWN:
self._set_state(bfd.BFD_STATE_DOWN, bfd.BFD_DIAG_NEIG_SIG_SESS_DOWN) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self._session_state == bfd.BFD_STATE_DOWN:
if bfd_pkt.state == bfd.BFD_STATE_DOWN:
self._set_state(bfd.BFD_STATE_INIT) # depends on [control=['if'], data=[]]
elif bfd_pkt.state == bfd.BFD_STATE_INIT:
self._set_state(bfd.BFD_STATE_UP) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self._session_state == bfd.BFD_STATE_INIT:
if bfd_pkt.state in [bfd.BFD_STATE_INIT, bfd.BFD_STATE_UP]:
self._set_state(bfd.BFD_STATE_UP) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif bfd_pkt.state == bfd.BFD_STATE_DOWN:
self._set_state(bfd.BFD_STATE_DOWN, bfd.BFD_DIAG_NEIG_SIG_SESS_DOWN) # depends on [control=['if'], data=[]]
# TODO: Demand mode support.
if self._remote_demand_mode and self._session_state == bfd.BFD_STATE_UP and (self._remote_session_state == bfd.BFD_STATE_UP):
self._enable_send = False # depends on [control=['if'], data=[]]
if not self._remote_demand_mode or self._session_state != bfd.BFD_STATE_UP or self._remote_session_state != bfd.BFD_STATE_UP:
if not self._enable_send:
self._enable_send = True
hub.spawn(self._send_loop) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Update the detection time (RFC5880 Section 6.8.4.)
if self._detect_time == 0:
self._detect_time = bfd_pkt.desired_min_tx_interval * bfd_pkt.detect_mult / 1000000.0
# Start the timeout loop.
hub.spawn(self._recv_timeout_loop) # depends on [control=['if'], data=[]]
if bfd_pkt.flags & bfd.BFD_FLAG_POLL:
self._pending_final = True
self._detect_time = bfd_pkt.desired_min_tx_interval * bfd_pkt.detect_mult / 1000000.0 # depends on [control=['if'], data=[]]
# Update the remote authentication sequence number.
if self._auth_type in [bfd.BFD_AUTH_KEYED_MD5, bfd.BFD_AUTH_METICULOUS_KEYED_MD5, bfd.BFD_AUTH_KEYED_SHA1, bfd.BFD_AUTH_METICULOUS_KEYED_SHA1]:
self._rcv_auth_seq = bfd_pkt.auth_cls.seq
self._auth_seq_known = 1 # depends on [control=['if'], data=[]]
# Set the lock.
if self._lock is not None:
self._lock.set() # depends on [control=['if'], data=[]] |
def postmap(self, path, valuemap=None, query=None):
"""
Performs a POST request as per post() but the response content type
is required to be "application/json" and is processed as with getmap().
"""
code, data, ctype = self.post(path, valuemap, query)
if ctype != 'application/json':
self.log.error("Expecting JSON from POST of '%s', got '%s'", self.lastpath, ctype)
raise HttpError(code=400, content_type='text/plain', content='Remote returned invalid content type: '+ctype)
try:
result = json.loads(data)
except Exception as e: # pragma: no cover
self.log.error("Could not load JSON content from POST %r -- %s", self.lastpath, e)
raise HttpError(code=400, content_type='text/plain', content='Could not load JSON content')
return result | def function[postmap, parameter[self, path, valuemap, query]]:
constant[
Performs a POST request as per post() but the response content type
is required to be "application/json" and is processed as with getmap().
]
<ast.Tuple object at 0x7da18bc72380> assign[=] call[name[self].post, parameter[name[path], name[valuemap], name[query]]]
if compare[name[ctype] not_equal[!=] constant[application/json]] begin[:]
call[name[self].log.error, parameter[constant[Expecting JSON from POST of '%s', got '%s'], name[self].lastpath, name[ctype]]]
<ast.Raise object at 0x7da204565e10>
<ast.Try object at 0x7da204567640>
return[name[result]] | keyword[def] identifier[postmap] ( identifier[self] , identifier[path] , identifier[valuemap] = keyword[None] , identifier[query] = keyword[None] ):
literal[string]
identifier[code] , identifier[data] , identifier[ctype] = identifier[self] . identifier[post] ( identifier[path] , identifier[valuemap] , identifier[query] )
keyword[if] identifier[ctype] != literal[string] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[self] . identifier[lastpath] , identifier[ctype] )
keyword[raise] identifier[HttpError] ( identifier[code] = literal[int] , identifier[content_type] = literal[string] , identifier[content] = literal[string] + identifier[ctype] )
keyword[try] :
identifier[result] = identifier[json] . identifier[loads] ( identifier[data] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[self] . identifier[lastpath] , identifier[e] )
keyword[raise] identifier[HttpError] ( identifier[code] = literal[int] , identifier[content_type] = literal[string] , identifier[content] = literal[string] )
keyword[return] identifier[result] | def postmap(self, path, valuemap=None, query=None):
"""
Performs a POST request as per post() but the response content type
is required to be "application/json" and is processed as with getmap().
"""
(code, data, ctype) = self.post(path, valuemap, query)
if ctype != 'application/json':
self.log.error("Expecting JSON from POST of '%s', got '%s'", self.lastpath, ctype)
raise HttpError(code=400, content_type='text/plain', content='Remote returned invalid content type: ' + ctype) # depends on [control=['if'], data=['ctype']]
try:
result = json.loads(data) # depends on [control=['try'], data=[]]
except Exception as e: # pragma: no cover
self.log.error('Could not load JSON content from POST %r -- %s', self.lastpath, e)
raise HttpError(code=400, content_type='text/plain', content='Could not load JSON content') # depends on [control=['except'], data=['e']]
return result |
def granted(self, lock):
'''Return True if a previously requested lock has been granted'''
unit = hookenv.local_unit()
ts = self.requests[unit].get(lock)
if ts and self.grants.get(unit, {}).get(lock) == ts:
return True
return False | def function[granted, parameter[self, lock]]:
constant[Return True if a previously requested lock has been granted]
variable[unit] assign[=] call[name[hookenv].local_unit, parameter[]]
variable[ts] assign[=] call[call[name[self].requests][name[unit]].get, parameter[name[lock]]]
if <ast.BoolOp object at 0x7da1b1219d50> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[granted] ( identifier[self] , identifier[lock] ):
literal[string]
identifier[unit] = identifier[hookenv] . identifier[local_unit] ()
identifier[ts] = identifier[self] . identifier[requests] [ identifier[unit] ]. identifier[get] ( identifier[lock] )
keyword[if] identifier[ts] keyword[and] identifier[self] . identifier[grants] . identifier[get] ( identifier[unit] ,{}). identifier[get] ( identifier[lock] )== identifier[ts] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def granted(self, lock):
"""Return True if a previously requested lock has been granted"""
unit = hookenv.local_unit()
ts = self.requests[unit].get(lock)
if ts and self.grants.get(unit, {}).get(lock) == ts:
return True # depends on [control=['if'], data=[]]
return False |
def write_layout(_path):
"""
Write a valid gentoo layout file to :path:.
Args:
path - The output path of the layout.conf
"""
path.mkdir_uchroot("/etc/portage/metadata")
path.mkfile_uchroot("/etc/portage/metadata/layout.conf")
with open(_path, 'w') as layoutconf:
lines = '''masters = gentoo'''
layoutconf.write(lines) | def function[write_layout, parameter[_path]]:
constant[
Write a valid gentoo layout file to :path:.
Args:
path - The output path of the layout.conf
]
call[name[path].mkdir_uchroot, parameter[constant[/etc/portage/metadata]]]
call[name[path].mkfile_uchroot, parameter[constant[/etc/portage/metadata/layout.conf]]]
with call[name[open], parameter[name[_path], constant[w]]] begin[:]
variable[lines] assign[=] constant[masters = gentoo]
call[name[layoutconf].write, parameter[name[lines]]] | keyword[def] identifier[write_layout] ( identifier[_path] ):
literal[string]
identifier[path] . identifier[mkdir_uchroot] ( literal[string] )
identifier[path] . identifier[mkfile_uchroot] ( literal[string] )
keyword[with] identifier[open] ( identifier[_path] , literal[string] ) keyword[as] identifier[layoutconf] :
identifier[lines] = literal[string]
identifier[layoutconf] . identifier[write] ( identifier[lines] ) | def write_layout(_path):
"""
Write a valid gentoo layout file to :path:.
Args:
path - The output path of the layout.conf
"""
path.mkdir_uchroot('/etc/portage/metadata')
path.mkfile_uchroot('/etc/portage/metadata/layout.conf')
with open(_path, 'w') as layoutconf:
lines = 'masters = gentoo'
layoutconf.write(lines) # depends on [control=['with'], data=['layoutconf']] |
def end_y(self):
"""
Return the Y-position of the end point of this connector, in English
Metric Units (as a |Length| object).
"""
cxnSp = self._element
y, cy, flipV = cxnSp.y, cxnSp.cy, cxnSp.flipV
end_y = y if flipV else y+cy
return Emu(end_y) | def function[end_y, parameter[self]]:
constant[
Return the Y-position of the end point of this connector, in English
Metric Units (as a |Length| object).
]
variable[cxnSp] assign[=] name[self]._element
<ast.Tuple object at 0x7da204963bb0> assign[=] tuple[[<ast.Attribute object at 0x7da2049617b0>, <ast.Attribute object at 0x7da204963130>, <ast.Attribute object at 0x7da2049630a0>]]
variable[end_y] assign[=] <ast.IfExp object at 0x7da204962ef0>
return[call[name[Emu], parameter[name[end_y]]]] | keyword[def] identifier[end_y] ( identifier[self] ):
literal[string]
identifier[cxnSp] = identifier[self] . identifier[_element]
identifier[y] , identifier[cy] , identifier[flipV] = identifier[cxnSp] . identifier[y] , identifier[cxnSp] . identifier[cy] , identifier[cxnSp] . identifier[flipV]
identifier[end_y] = identifier[y] keyword[if] identifier[flipV] keyword[else] identifier[y] + identifier[cy]
keyword[return] identifier[Emu] ( identifier[end_y] ) | def end_y(self):
"""
Return the Y-position of the end point of this connector, in English
Metric Units (as a |Length| object).
"""
cxnSp = self._element
(y, cy, flipV) = (cxnSp.y, cxnSp.cy, cxnSp.flipV)
end_y = y if flipV else y + cy
return Emu(end_y) |
def read_element_using_argtuple(self, argtuple):
"""
takes a tuple of keys
returns node found in cfg_dict
found by traversing cfg_dict by successive
application of keys from element_path
"""
# doesn't support DELIMITED, only dict-based formats
if self.format == FMT_DELIMITED:
return None
node = self.cfg_dict
for key in argtuple:
node = node[key]
return node | def function[read_element_using_argtuple, parameter[self, argtuple]]:
constant[
takes a tuple of keys
returns node found in cfg_dict
found by traversing cfg_dict by successive
application of keys from element_path
]
if compare[name[self].format equal[==] name[FMT_DELIMITED]] begin[:]
return[constant[None]]
variable[node] assign[=] name[self].cfg_dict
for taget[name[key]] in starred[name[argtuple]] begin[:]
variable[node] assign[=] call[name[node]][name[key]]
return[name[node]] | keyword[def] identifier[read_element_using_argtuple] ( identifier[self] , identifier[argtuple] ):
literal[string]
keyword[if] identifier[self] . identifier[format] == identifier[FMT_DELIMITED] :
keyword[return] keyword[None]
identifier[node] = identifier[self] . identifier[cfg_dict]
keyword[for] identifier[key] keyword[in] identifier[argtuple] :
identifier[node] = identifier[node] [ identifier[key] ]
keyword[return] identifier[node] | def read_element_using_argtuple(self, argtuple):
"""
takes a tuple of keys
returns node found in cfg_dict
found by traversing cfg_dict by successive
application of keys from element_path
""" # doesn't support DELIMITED, only dict-based formats
if self.format == FMT_DELIMITED:
return None # depends on [control=['if'], data=[]]
node = self.cfg_dict
for key in argtuple:
node = node[key] # depends on [control=['for'], data=['key']]
return node |
def with_config(loop=None):
"""
:return: an instance of the txaio API with the given
configuration. This won't affect anything using the 'gloabl'
config nor other instances created using this function.
If you need to customize txaio configuration separately (e.g. to
use multiple event-loops in asyncio), you can take code like this:
import txaio
class FunTimes(object):
def something_async(self):
return txaio.call_later(1, lambda: 'some result')
and instead do this:
import txaio
class FunTimes(object):
txaio = txaio
def something_async(self):
# this will run in the local/new event loop created in the constructor
return self.txaio.call_later(1, lambda: 'some result')
fun0 = FunTimes()
fun1 = FunTimes()
fun1.txaio = txaio.with_config(loop=asyncio.new_event_loop())
So `fun1` will run its futures on the newly-created event loop,
while `fun0` will work just as it did before this `with_config`
method was introduced (after 2.6.2).
"""
cfg = _Config()
if loop is not None:
cfg.loop = loop
return _AsyncioApi(cfg) | def function[with_config, parameter[loop]]:
constant[
:return: an instance of the txaio API with the given
configuration. This won't affect anything using the 'gloabl'
config nor other instances created using this function.
If you need to customize txaio configuration separately (e.g. to
use multiple event-loops in asyncio), you can take code like this:
import txaio
class FunTimes(object):
def something_async(self):
return txaio.call_later(1, lambda: 'some result')
and instead do this:
import txaio
class FunTimes(object):
txaio = txaio
def something_async(self):
# this will run in the local/new event loop created in the constructor
return self.txaio.call_later(1, lambda: 'some result')
fun0 = FunTimes()
fun1 = FunTimes()
fun1.txaio = txaio.with_config(loop=asyncio.new_event_loop())
So `fun1` will run its futures on the newly-created event loop,
while `fun0` will work just as it did before this `with_config`
method was introduced (after 2.6.2).
]
variable[cfg] assign[=] call[name[_Config], parameter[]]
if compare[name[loop] is_not constant[None]] begin[:]
name[cfg].loop assign[=] name[loop]
return[call[name[_AsyncioApi], parameter[name[cfg]]]] | keyword[def] identifier[with_config] ( identifier[loop] = keyword[None] ):
literal[string]
identifier[cfg] = identifier[_Config] ()
keyword[if] identifier[loop] keyword[is] keyword[not] keyword[None] :
identifier[cfg] . identifier[loop] = identifier[loop]
keyword[return] identifier[_AsyncioApi] ( identifier[cfg] ) | def with_config(loop=None):
"""
:return: an instance of the txaio API with the given
configuration. This won't affect anything using the 'gloabl'
config nor other instances created using this function.
If you need to customize txaio configuration separately (e.g. to
use multiple event-loops in asyncio), you can take code like this:
import txaio
class FunTimes(object):
def something_async(self):
return txaio.call_later(1, lambda: 'some result')
and instead do this:
import txaio
class FunTimes(object):
txaio = txaio
def something_async(self):
# this will run in the local/new event loop created in the constructor
return self.txaio.call_later(1, lambda: 'some result')
fun0 = FunTimes()
fun1 = FunTimes()
fun1.txaio = txaio.with_config(loop=asyncio.new_event_loop())
So `fun1` will run its futures on the newly-created event loop,
while `fun0` will work just as it did before this `with_config`
method was introduced (after 2.6.2).
"""
cfg = _Config()
if loop is not None:
cfg.loop = loop # depends on [control=['if'], data=['loop']]
return _AsyncioApi(cfg) |
def rhyme_scheme(self):
"""
Calculates the rhyme scheme of a given stanza. It doesn't yet support
phonetical rhyming (homophones) and thus is still error-prone
Example:
>>> stanza = ['Ein rîchiu küneginne, frou Uote ir muoter hiez.', 'ir vater der hiez Dancrât, der in diu erbe liez', 'sît nâch sîme lebene, ein ellens rîcher man,', 'der ouch in sîner jugende grôzer êren vil gewan.']
>>> S = Verse(stanza)
>>> S.rhyme_scheme()
'AABB'
"""
rhymes = dict()
i = 64
strs = ""
for line in self.syllabified:
w = line[-1][-1][-3:]
for r in rhymes.keys():
if r.endswith(w) or w.endswith(r):
rhymes[w] = rhymes[r]
break
if w in rhymes:
strs += rhymes[w]
else:
i += 1
rhymes[w] = chr(i)
strs += chr(i)
return strs | def function[rhyme_scheme, parameter[self]]:
constant[
Calculates the rhyme scheme of a given stanza. It doesn't yet support
phonetical rhyming (homophones) and thus is still error-prone
Example:
>>> stanza = ['Ein rîchiu küneginne, frou Uote ir muoter hiez.', 'ir vater der hiez Dancrât, der in diu erbe liez', 'sît nâch sîme lebene, ein ellens rîcher man,', 'der ouch in sîner jugende grôzer êren vil gewan.']
>>> S = Verse(stanza)
>>> S.rhyme_scheme()
'AABB'
]
variable[rhymes] assign[=] call[name[dict], parameter[]]
variable[i] assign[=] constant[64]
variable[strs] assign[=] constant[]
for taget[name[line]] in starred[name[self].syllabified] begin[:]
variable[w] assign[=] call[call[call[name[line]][<ast.UnaryOp object at 0x7da204623fa0>]][<ast.UnaryOp object at 0x7da204622ce0>]][<ast.Slice object at 0x7da204623d00>]
for taget[name[r]] in starred[call[name[rhymes].keys, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da204623640> begin[:]
call[name[rhymes]][name[w]] assign[=] call[name[rhymes]][name[r]]
break
if compare[name[w] in name[rhymes]] begin[:]
<ast.AugAssign object at 0x7da20e955750>
return[name[strs]] | keyword[def] identifier[rhyme_scheme] ( identifier[self] ):
literal[string]
identifier[rhymes] = identifier[dict] ()
identifier[i] = literal[int]
identifier[strs] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[syllabified] :
identifier[w] = identifier[line] [- literal[int] ][- literal[int] ][- literal[int] :]
keyword[for] identifier[r] keyword[in] identifier[rhymes] . identifier[keys] ():
keyword[if] identifier[r] . identifier[endswith] ( identifier[w] ) keyword[or] identifier[w] . identifier[endswith] ( identifier[r] ):
identifier[rhymes] [ identifier[w] ]= identifier[rhymes] [ identifier[r] ]
keyword[break]
keyword[if] identifier[w] keyword[in] identifier[rhymes] :
identifier[strs] += identifier[rhymes] [ identifier[w] ]
keyword[else] :
identifier[i] += literal[int]
identifier[rhymes] [ identifier[w] ]= identifier[chr] ( identifier[i] )
identifier[strs] += identifier[chr] ( identifier[i] )
keyword[return] identifier[strs] | def rhyme_scheme(self):
"""
Calculates the rhyme scheme of a given stanza. It doesn't yet support
phonetical rhyming (homophones) and thus is still error-prone
Example:
>>> stanza = ['Ein rîchiu küneginne, frou Uote ir muoter hiez.', 'ir vater der hiez Dancrât, der in diu erbe liez', 'sît nâch sîme lebene, ein ellens rîcher man,', 'der ouch in sîner jugende grôzer êren vil gewan.']
>>> S = Verse(stanza)
>>> S.rhyme_scheme()
'AABB'
"""
rhymes = dict()
i = 64
strs = ''
for line in self.syllabified:
w = line[-1][-1][-3:]
for r in rhymes.keys():
if r.endswith(w) or w.endswith(r):
rhymes[w] = rhymes[r]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
if w in rhymes:
strs += rhymes[w] # depends on [control=['if'], data=['w', 'rhymes']]
else:
i += 1
rhymes[w] = chr(i)
strs += chr(i) # depends on [control=['for'], data=['line']]
return strs |
def sphrec(r, colat, lon):
"""
Convert from spherical coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphrec_c.html
:param r: Distance of a point from the origin.
:type r: float
:param colat: Angle of the point from the positive Z-axis.
:type colat: float
:param lon: Angle of the point from the XZ plane in radians.
:type lon: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
r = ctypes.c_double(r)
colat = ctypes.c_double(colat)
lon = ctypes.c_double(lon)
rectan = stypes.emptyDoubleVector(3)
libspice.sphrec_c(r, colat, lon, rectan)
return stypes.cVectorToPython(rectan) | def function[sphrec, parameter[r, colat, lon]]:
constant[
Convert from spherical coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphrec_c.html
:param r: Distance of a point from the origin.
:type r: float
:param colat: Angle of the point from the positive Z-axis.
:type colat: float
:param lon: Angle of the point from the XZ plane in radians.
:type lon: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
]
variable[r] assign[=] call[name[ctypes].c_double, parameter[name[r]]]
variable[colat] assign[=] call[name[ctypes].c_double, parameter[name[colat]]]
variable[lon] assign[=] call[name[ctypes].c_double, parameter[name[lon]]]
variable[rectan] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
call[name[libspice].sphrec_c, parameter[name[r], name[colat], name[lon], name[rectan]]]
return[call[name[stypes].cVectorToPython, parameter[name[rectan]]]] | keyword[def] identifier[sphrec] ( identifier[r] , identifier[colat] , identifier[lon] ):
literal[string]
identifier[r] = identifier[ctypes] . identifier[c_double] ( identifier[r] )
identifier[colat] = identifier[ctypes] . identifier[c_double] ( identifier[colat] )
identifier[lon] = identifier[ctypes] . identifier[c_double] ( identifier[lon] )
identifier[rectan] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[sphrec_c] ( identifier[r] , identifier[colat] , identifier[lon] , identifier[rectan] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[rectan] ) | def sphrec(r, colat, lon):
"""
Convert from spherical coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphrec_c.html
:param r: Distance of a point from the origin.
:type r: float
:param colat: Angle of the point from the positive Z-axis.
:type colat: float
:param lon: Angle of the point from the XZ plane in radians.
:type lon: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
r = ctypes.c_double(r)
colat = ctypes.c_double(colat)
lon = ctypes.c_double(lon)
rectan = stypes.emptyDoubleVector(3)
libspice.sphrec_c(r, colat, lon, rectan)
return stypes.cVectorToPython(rectan) |
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
raise hg_util.Abort("cannot specify CL name and file patterns")
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
raise hg_util.Abort(err)
if not cl.local and (opts["stdin"] or not opts["stdout"]):
raise hg_util.Abort("cannot change non-local CL " + name)
else:
name = "new"
cl = CL("new")
if repo[None].branch() != "default":
raise hg_util.Abort("cannot create CL outside default branch; switch with 'hg update default'")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
raise hg_util.Abort("cannot use -d and -D together")
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
raise hg_util.Abort("cannot use "+flag+" with file patterns")
if opts["stdin"] or opts["stdout"]:
raise hg_util.Abort("cannot use "+flag+" with -i or -o")
if not cl.local:
raise hg_util.Abort("cannot change non-local CL " + name)
if opts["delete"]:
if cl.copied_from:
raise hg_util.Abort("original author must delete CL; hg change -D will remove locally")
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
raise hg_util.Abort("error parsing change list: line %d: %s" % (line, err))
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
raise hg_util.Abort(err)
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return | def function[change, parameter[ui, repo]]:
constant[create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
]
if name[codereview_disabled] begin[:]
<ast.Raise object at 0x7da2044c07c0>
variable[dirty] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da2044c06a0> begin[:]
variable[name] assign[=] call[name[pats]][constant[0]]
if compare[call[name[len], parameter[name[pats]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da2044c2980>
variable[pats] assign[=] call[name[pats]][<ast.Slice object at 0x7da2044c2b30>]
<ast.Tuple object at 0x7da2044c1480> assign[=] call[name[LoadCL], parameter[name[ui], name[repo], name[name]]]
if compare[name[err] not_equal[!=] constant[]] begin[:]
<ast.Raise object at 0x7da2044c1300>
if <ast.BoolOp object at 0x7da2044c1e70> begin[:]
<ast.Raise object at 0x7da2044c1450>
if <ast.BoolOp object at 0x7da2044c3af0> begin[:]
if <ast.BoolOp object at 0x7da2044c0160> begin[:]
<ast.Raise object at 0x7da2044c0820>
variable[flag] assign[=] constant[-d]
if call[name[opts]][constant[deletelocal]] begin[:]
variable[flag] assign[=] constant[-D]
if compare[name[name] equal[==] constant[new]] begin[:]
<ast.Raise object at 0x7da2044c2e60>
if <ast.BoolOp object at 0x7da2044c35b0> begin[:]
<ast.Raise object at 0x7da2044c28f0>
if <ast.UnaryOp object at 0x7da2044c3580> begin[:]
<ast.Raise object at 0x7da2044c2bf0>
if call[name[opts]][constant[delete]] begin[:]
if name[cl].copied_from begin[:]
<ast.Raise object at 0x7da2044c0e20>
call[name[PostMessage], parameter[name[ui], name[cl].name, constant[*** Abandoned ***]]]
call[name[EditDesc], parameter[name[cl].name]]
call[name[cl].Delete, parameter[name[ui], name[repo]]]
return[None]
if call[name[opts]][constant[stdin]] begin[:]
variable[s] assign[=] call[name[sys].stdin.read, parameter[]]
<ast.Tuple object at 0x7da2044c0220> assign[=] call[name[ParseCL], parameter[name[s], name[name]]]
if compare[name[err] not_equal[!=] constant[]] begin[:]
<ast.Raise object at 0x7da2044c2260>
if compare[name[clx].desc is_not constant[None]] begin[:]
name[cl].desc assign[=] name[clx].desc
call[name[dirty]][name[cl]] assign[=] constant[True]
if compare[name[clx].reviewer is_not constant[None]] begin[:]
name[cl].reviewer assign[=] name[clx].reviewer
call[name[dirty]][name[cl]] assign[=] constant[True]
if compare[name[clx].cc is_not constant[None]] begin[:]
name[cl].cc assign[=] name[clx].cc
call[name[dirty]][name[cl]] assign[=] constant[True]
if compare[name[clx].files is_not constant[None]] begin[:]
name[cl].files assign[=] name[clx].files
call[name[dirty]][name[cl]] assign[=] constant[True]
if compare[name[clx].private not_equal[!=] name[cl].private] begin[:]
name[cl].private assign[=] name[clx].private
call[name[dirty]][name[cl]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da204623820> begin[:]
if compare[name[name] equal[==] constant[new]] begin[:]
name[cl].files assign[=] name[files]
variable[err] assign[=] call[name[EditCL], parameter[name[ui], name[repo], name[cl]]]
if compare[name[err] not_equal[!=] constant[]] begin[:]
<ast.Raise object at 0x7da20c6c5090>
call[name[dirty]][name[cl]] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da20c6c4fa0>, <ast.Name object at 0x7da20c6c5d80>]]] in starred[call[name[dirty].items, parameter[]]] begin[:]
variable[name] assign[=] name[d].name
call[name[d].Flush, parameter[name[ui], name[repo]]]
if compare[name[name] equal[==] constant[new]] begin[:]
call[name[d].Upload, parameter[name[ui], name[repo]]]
if call[name[opts]][constant[stdout]] begin[:]
call[name[ui].write, parameter[call[name[cl].EditorText, parameter[]]]]
return[None] | keyword[def] identifier[change] ( identifier[ui] , identifier[repo] ,* identifier[pats] ,** identifier[opts] ):
literal[string]
keyword[if] identifier[codereview_disabled] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( identifier[codereview_disabled] )
identifier[dirty] ={}
keyword[if] identifier[len] ( identifier[pats] )> literal[int] keyword[and] identifier[GoodCLName] ( identifier[pats] [ literal[int] ]):
identifier[name] = identifier[pats] [ literal[int] ]
keyword[if] identifier[len] ( identifier[pats] )!= literal[int] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] )
identifier[pats] = identifier[pats] [ literal[int] :]
identifier[cl] , identifier[err] = identifier[LoadCL] ( identifier[ui] , identifier[repo] , identifier[name] , identifier[web] = keyword[True] )
keyword[if] identifier[err] != literal[string] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( identifier[err] )
keyword[if] keyword[not] identifier[cl] . identifier[local] keyword[and] ( identifier[opts] [ literal[string] ] keyword[or] keyword[not] identifier[opts] [ literal[string] ]):
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] + identifier[name] )
keyword[else] :
identifier[name] = literal[string]
identifier[cl] = identifier[CL] ( literal[string] )
keyword[if] identifier[repo] [ keyword[None] ]. identifier[branch] ()!= literal[string] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] )
identifier[dirty] [ identifier[cl] ]= keyword[True]
identifier[files] = identifier[ChangedFiles] ( identifier[ui] , identifier[repo] , identifier[pats] , identifier[taken] = identifier[Taken] ( identifier[ui] , identifier[repo] ))
keyword[if] identifier[opts] [ literal[string] ] keyword[or] identifier[opts] [ literal[string] ]:
keyword[if] identifier[opts] [ literal[string] ] keyword[and] identifier[opts] [ literal[string] ]:
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] )
identifier[flag] = literal[string]
keyword[if] identifier[opts] [ literal[string] ]:
identifier[flag] = literal[string]
keyword[if] identifier[name] == literal[string] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] + identifier[flag] + literal[string] )
keyword[if] identifier[opts] [ literal[string] ] keyword[or] identifier[opts] [ literal[string] ]:
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] + identifier[flag] + literal[string] )
keyword[if] keyword[not] identifier[cl] . identifier[local] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] + identifier[name] )
keyword[if] identifier[opts] [ literal[string] ]:
keyword[if] identifier[cl] . identifier[copied_from] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] )
identifier[PostMessage] ( identifier[ui] , identifier[cl] . identifier[name] , literal[string] , identifier[send_mail] = identifier[cl] . identifier[mailed] )
identifier[EditDesc] ( identifier[cl] . identifier[name] , identifier[closed] = keyword[True] , identifier[private] = identifier[cl] . identifier[private] )
identifier[cl] . identifier[Delete] ( identifier[ui] , identifier[repo] )
keyword[return]
keyword[if] identifier[opts] [ literal[string] ]:
identifier[s] = identifier[sys] . identifier[stdin] . identifier[read] ()
identifier[clx] , identifier[line] , identifier[err] = identifier[ParseCL] ( identifier[s] , identifier[name] )
keyword[if] identifier[err] != literal[string] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( literal[string] %( identifier[line] , identifier[err] ))
keyword[if] identifier[clx] . identifier[desc] keyword[is] keyword[not] keyword[None] :
identifier[cl] . identifier[desc] = identifier[clx] . identifier[desc] ;
identifier[dirty] [ identifier[cl] ]= keyword[True]
keyword[if] identifier[clx] . identifier[reviewer] keyword[is] keyword[not] keyword[None] :
identifier[cl] . identifier[reviewer] = identifier[clx] . identifier[reviewer]
identifier[dirty] [ identifier[cl] ]= keyword[True]
keyword[if] identifier[clx] . identifier[cc] keyword[is] keyword[not] keyword[None] :
identifier[cl] . identifier[cc] = identifier[clx] . identifier[cc]
identifier[dirty] [ identifier[cl] ]= keyword[True]
keyword[if] identifier[clx] . identifier[files] keyword[is] keyword[not] keyword[None] :
identifier[cl] . identifier[files] = identifier[clx] . identifier[files]
identifier[dirty] [ identifier[cl] ]= keyword[True]
keyword[if] identifier[clx] . identifier[private] != identifier[cl] . identifier[private] :
identifier[cl] . identifier[private] = identifier[clx] . identifier[private]
identifier[dirty] [ identifier[cl] ]= keyword[True]
keyword[if] keyword[not] identifier[opts] [ literal[string] ] keyword[and] keyword[not] identifier[opts] [ literal[string] ]:
keyword[if] identifier[name] == literal[string] :
identifier[cl] . identifier[files] = identifier[files]
identifier[err] = identifier[EditCL] ( identifier[ui] , identifier[repo] , identifier[cl] )
keyword[if] identifier[err] != literal[string] :
keyword[raise] identifier[hg_util] . identifier[Abort] ( identifier[err] )
identifier[dirty] [ identifier[cl] ]= keyword[True]
keyword[for] identifier[d] , identifier[_] keyword[in] identifier[dirty] . identifier[items] ():
identifier[name] = identifier[d] . identifier[name]
identifier[d] . identifier[Flush] ( identifier[ui] , identifier[repo] )
keyword[if] identifier[name] == literal[string] :
identifier[d] . identifier[Upload] ( identifier[ui] , identifier[repo] , identifier[quiet] = keyword[True] )
keyword[if] identifier[opts] [ literal[string] ]:
identifier[ui] . identifier[write] ( identifier[cl] . identifier[EditorText] ())
keyword[elif] identifier[opts] [ literal[string] ]:
identifier[ui] . identifier[write] ( identifier[cl] . identifier[PendingText] ())
keyword[elif] identifier[name] == literal[string] :
keyword[if] identifier[ui] . identifier[quiet] :
identifier[ui] . identifier[write] ( identifier[cl] . identifier[name] )
keyword[else] :
identifier[ui] . identifier[write] ( literal[string] + identifier[cl] . identifier[url] + literal[string] )
keyword[return] | def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled) # depends on [control=['if'], data=[]]
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
raise hg_util.Abort('cannot specify CL name and file patterns') # depends on [control=['if'], data=[]]
pats = pats[1:]
(cl, err) = LoadCL(ui, repo, name, web=True)
if err != '':
raise hg_util.Abort(err) # depends on [control=['if'], data=['err']]
if not cl.local and (opts['stdin'] or not opts['stdout']):
raise hg_util.Abort('cannot change non-local CL ' + name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
name = 'new'
cl = CL('new')
if repo[None].branch() != 'default':
raise hg_util.Abort("cannot create CL outside default branch; switch with 'hg update default'") # depends on [control=['if'], data=[]]
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts['delete'] or opts['deletelocal']:
if opts['delete'] and opts['deletelocal']:
raise hg_util.Abort('cannot use -d and -D together') # depends on [control=['if'], data=[]]
flag = '-d'
if opts['deletelocal']:
flag = '-D' # depends on [control=['if'], data=[]]
if name == 'new':
raise hg_util.Abort('cannot use ' + flag + ' with file patterns') # depends on [control=['if'], data=[]]
if opts['stdin'] or opts['stdout']:
raise hg_util.Abort('cannot use ' + flag + ' with -i or -o') # depends on [control=['if'], data=[]]
if not cl.local:
raise hg_util.Abort('cannot change non-local CL ' + name) # depends on [control=['if'], data=[]]
if opts['delete']:
if cl.copied_from:
raise hg_util.Abort('original author must delete CL; hg change -D will remove locally') # depends on [control=['if'], data=[]]
PostMessage(ui, cl.name, '*** Abandoned ***', send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private) # depends on [control=['if'], data=[]]
cl.Delete(ui, repo)
return # depends on [control=['if'], data=[]]
if opts['stdin']:
s = sys.stdin.read()
(clx, line, err) = ParseCL(s, name)
if err != '':
raise hg_util.Abort('error parsing change list: line %d: %s' % (line, err)) # depends on [control=['if'], data=['err']]
if clx.desc is not None:
cl.desc = clx.desc
dirty[cl] = True # depends on [control=['if'], data=[]]
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True # depends on [control=['if'], data=[]]
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True # depends on [control=['if'], data=[]]
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True # depends on [control=['if'], data=[]]
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not opts['stdin'] and (not opts['stdout']):
if name == 'new':
cl.files = files # depends on [control=['if'], data=[]]
err = EditCL(ui, repo, cl)
if err != '':
raise hg_util.Abort(err) # depends on [control=['if'], data=['err']]
dirty[cl] = True # depends on [control=['if'], data=[]]
for (d, _) in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == 'new':
d.Upload(ui, repo, quiet=True) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if opts['stdout']:
ui.write(cl.EditorText()) # depends on [control=['if'], data=[]]
elif opts['pending']:
ui.write(cl.PendingText()) # depends on [control=['if'], data=[]]
elif name == 'new':
if ui.quiet:
ui.write(cl.name) # depends on [control=['if'], data=[]]
else:
ui.write('CL created: ' + cl.url + '\n') # depends on [control=['if'], data=[]]
return |
def notify_listeners(self, data: Optional[_ListenableDataType]=_NO_DATA_MARKER):
"""
Notify event listeners, passing them the given data (if any).
:param data: the data to pass to the event listeners
"""
for listener in self._listeners:
if data is not Listenable._NO_DATA_MARKER:
listener(data)
else:
listener() | def function[notify_listeners, parameter[self, data]]:
constant[
Notify event listeners, passing them the given data (if any).
:param data: the data to pass to the event listeners
]
for taget[name[listener]] in starred[name[self]._listeners] begin[:]
if compare[name[data] is_not name[Listenable]._NO_DATA_MARKER] begin[:]
call[name[listener], parameter[name[data]]] | keyword[def] identifier[notify_listeners] ( identifier[self] , identifier[data] : identifier[Optional] [ identifier[_ListenableDataType] ]= identifier[_NO_DATA_MARKER] ):
literal[string]
keyword[for] identifier[listener] keyword[in] identifier[self] . identifier[_listeners] :
keyword[if] identifier[data] keyword[is] keyword[not] identifier[Listenable] . identifier[_NO_DATA_MARKER] :
identifier[listener] ( identifier[data] )
keyword[else] :
identifier[listener] () | def notify_listeners(self, data: Optional[_ListenableDataType]=_NO_DATA_MARKER):
"""
Notify event listeners, passing them the given data (if any).
:param data: the data to pass to the event listeners
"""
for listener in self._listeners:
if data is not Listenable._NO_DATA_MARKER:
listener(data) # depends on [control=['if'], data=['data']]
else:
listener() # depends on [control=['for'], data=['listener']] |
def serialize(cls, **properties):
"""
With a Protobuf class and properties as keyword arguments, sets all the
properties on a new instance of the class and serializes the resulting
value.
"""
obj = cls()
for k, v in properties.iteritems():
log.debug("%s.%s = %r", cls.__name__, k, v)
setattr(obj, k, v)
return obj.SerializeToString() | def function[serialize, parameter[cls]]:
constant[
With a Protobuf class and properties as keyword arguments, sets all the
properties on a new instance of the class and serializes the resulting
value.
]
variable[obj] assign[=] call[name[cls], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0e3bf10>, <ast.Name object at 0x7da1b0e38880>]]] in starred[call[name[properties].iteritems, parameter[]]] begin[:]
call[name[log].debug, parameter[constant[%s.%s = %r], name[cls].__name__, name[k], name[v]]]
call[name[setattr], parameter[name[obj], name[k], name[v]]]
return[call[name[obj].SerializeToString, parameter[]]] | keyword[def] identifier[serialize] ( identifier[cls] ,** identifier[properties] ):
literal[string]
identifier[obj] = identifier[cls] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[properties] . identifier[iteritems] ():
identifier[log] . identifier[debug] ( literal[string] , identifier[cls] . identifier[__name__] , identifier[k] , identifier[v] )
identifier[setattr] ( identifier[obj] , identifier[k] , identifier[v] )
keyword[return] identifier[obj] . identifier[SerializeToString] () | def serialize(cls, **properties):
"""
With a Protobuf class and properties as keyword arguments, sets all the
properties on a new instance of the class and serializes the resulting
value.
"""
obj = cls()
for (k, v) in properties.iteritems():
log.debug('%s.%s = %r', cls.__name__, k, v)
setattr(obj, k, v) # depends on [control=['for'], data=[]]
return obj.SerializeToString() |
def validate(self):
"""
Validates the given Amazon S3 file with :attr:`validators`. If errors
occur they are appended to :attr:`errors`. If the file is valid and a
`AWS_UNVALIDATED_PREFIX` config is present, its value will be removed
from the file key.
:return: a boolean indicating if the file vas valid.
"""
for validator in self.validators:
try:
validator(self.obj)
except ValidationError as e:
self.errors.append(e.error)
if not self.errors and self._has_unvalidated_prefix():
self._move_to_validated()
return not self.errors | def function[validate, parameter[self]]:
constant[
Validates the given Amazon S3 file with :attr:`validators`. If errors
occur they are appended to :attr:`errors`. If the file is valid and a
`AWS_UNVALIDATED_PREFIX` config is present, its value will be removed
from the file key.
:return: a boolean indicating if the file vas valid.
]
for taget[name[validator]] in starred[name[self].validators] begin[:]
<ast.Try object at 0x7da18bc701c0>
if <ast.BoolOp object at 0x7da18bc722f0> begin[:]
call[name[self]._move_to_validated, parameter[]]
return[<ast.UnaryOp object at 0x7da18bc706a0>] | keyword[def] identifier[validate] ( identifier[self] ):
literal[string]
keyword[for] identifier[validator] keyword[in] identifier[self] . identifier[validators] :
keyword[try] :
identifier[validator] ( identifier[self] . identifier[obj] )
keyword[except] identifier[ValidationError] keyword[as] identifier[e] :
identifier[self] . identifier[errors] . identifier[append] ( identifier[e] . identifier[error] )
keyword[if] keyword[not] identifier[self] . identifier[errors] keyword[and] identifier[self] . identifier[_has_unvalidated_prefix] ():
identifier[self] . identifier[_move_to_validated] ()
keyword[return] keyword[not] identifier[self] . identifier[errors] | def validate(self):
"""
Validates the given Amazon S3 file with :attr:`validators`. If errors
occur they are appended to :attr:`errors`. If the file is valid and a
`AWS_UNVALIDATED_PREFIX` config is present, its value will be removed
from the file key.
:return: a boolean indicating if the file vas valid.
"""
for validator in self.validators:
try:
validator(self.obj) # depends on [control=['try'], data=[]]
except ValidationError as e:
self.errors.append(e.error) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['validator']]
if not self.errors and self._has_unvalidated_prefix():
self._move_to_validated() # depends on [control=['if'], data=[]]
return not self.errors |
def _insert_timestamp(self, slug, max_length=255):
"""Appends a timestamp integer to the given slug, yet ensuring the
result is less than the specified max_length.
"""
timestamp = str(int(time.time()))
ts_len = len(timestamp) + 1
while len(slug) + ts_len > max_length:
slug = '-'.join(slug.split('-')[:-1])
slug = '-'.join([slug, timestamp])
return slug | def function[_insert_timestamp, parameter[self, slug, max_length]]:
constant[Appends a timestamp integer to the given slug, yet ensuring the
result is less than the specified max_length.
]
variable[timestamp] assign[=] call[name[str], parameter[call[name[int], parameter[call[name[time].time, parameter[]]]]]]
variable[ts_len] assign[=] binary_operation[call[name[len], parameter[name[timestamp]]] + constant[1]]
while compare[binary_operation[call[name[len], parameter[name[slug]]] + name[ts_len]] greater[>] name[max_length]] begin[:]
variable[slug] assign[=] call[constant[-].join, parameter[call[call[name[slug].split, parameter[constant[-]]]][<ast.Slice object at 0x7da1b1b6b310>]]]
variable[slug] assign[=] call[constant[-].join, parameter[list[[<ast.Name object at 0x7da1b1b699f0>, <ast.Name object at 0x7da1b1b69870>]]]]
return[name[slug]] | keyword[def] identifier[_insert_timestamp] ( identifier[self] , identifier[slug] , identifier[max_length] = literal[int] ):
literal[string]
identifier[timestamp] = identifier[str] ( identifier[int] ( identifier[time] . identifier[time] ()))
identifier[ts_len] = identifier[len] ( identifier[timestamp] )+ literal[int]
keyword[while] identifier[len] ( identifier[slug] )+ identifier[ts_len] > identifier[max_length] :
identifier[slug] = literal[string] . identifier[join] ( identifier[slug] . identifier[split] ( literal[string] )[:- literal[int] ])
identifier[slug] = literal[string] . identifier[join] ([ identifier[slug] , identifier[timestamp] ])
keyword[return] identifier[slug] | def _insert_timestamp(self, slug, max_length=255):
"""Appends a timestamp integer to the given slug, yet ensuring the
result is less than the specified max_length.
"""
timestamp = str(int(time.time()))
ts_len = len(timestamp) + 1
while len(slug) + ts_len > max_length:
slug = '-'.join(slug.split('-')[:-1]) # depends on [control=['while'], data=[]]
slug = '-'.join([slug, timestamp])
return slug |
def health_check(self):
"""
Get health status
https://confluence.atlassian.com/jirakb/how-to-retrieve-health-check-results-using-rest-api-867195158.html
:return:
"""
# check as Troubleshooting & Support Tools Plugin
response = self.get('rest/troubleshooting/1.0/check/')
if not response:
# check as support tools
response = self.get('rest/supportHealthCheck/1.0/check/')
return response | def function[health_check, parameter[self]]:
constant[
Get health status
https://confluence.atlassian.com/jirakb/how-to-retrieve-health-check-results-using-rest-api-867195158.html
:return:
]
variable[response] assign[=] call[name[self].get, parameter[constant[rest/troubleshooting/1.0/check/]]]
if <ast.UnaryOp object at 0x7da18f720eb0> begin[:]
variable[response] assign[=] call[name[self].get, parameter[constant[rest/supportHealthCheck/1.0/check/]]]
return[name[response]] | keyword[def] identifier[health_check] ( identifier[self] ):
literal[string]
identifier[response] = identifier[self] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[response] :
identifier[response] = identifier[self] . identifier[get] ( literal[string] )
keyword[return] identifier[response] | def health_check(self):
"""
Get health status
https://confluence.atlassian.com/jirakb/how-to-retrieve-health-check-results-using-rest-api-867195158.html
:return:
"""
# check as Troubleshooting & Support Tools Plugin
response = self.get('rest/troubleshooting/1.0/check/')
if not response:
# check as support tools
response = self.get('rest/supportHealthCheck/1.0/check/') # depends on [control=['if'], data=[]]
return response |
def generateImplicitParameters(cls, obj):
"""
Create PRODID, VERSION, and VTIMEZONEs if needed.
VTIMEZONEs will need to exist whenever TZID parameters exist or when
datetimes with tzinfo exist.
"""
if not hasattr(obj, 'version'):
obj.add(ContentLine('VERSION', [], cls.versionString)) | def function[generateImplicitParameters, parameter[cls, obj]]:
constant[
Create PRODID, VERSION, and VTIMEZONEs if needed.
VTIMEZONEs will need to exist whenever TZID parameters exist or when
datetimes with tzinfo exist.
]
if <ast.UnaryOp object at 0x7da18fe93b50> begin[:]
call[name[obj].add, parameter[call[name[ContentLine], parameter[constant[VERSION], list[[]], name[cls].versionString]]]] | keyword[def] identifier[generateImplicitParameters] ( identifier[cls] , identifier[obj] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , literal[string] ):
identifier[obj] . identifier[add] ( identifier[ContentLine] ( literal[string] ,[], identifier[cls] . identifier[versionString] )) | def generateImplicitParameters(cls, obj):
"""
Create PRODID, VERSION, and VTIMEZONEs if needed.
VTIMEZONEs will need to exist whenever TZID parameters exist or when
datetimes with tzinfo exist.
"""
if not hasattr(obj, 'version'):
obj.add(ContentLine('VERSION', [], cls.versionString)) # depends on [control=['if'], data=[]] |
def passwordLogin(self, username):
"""
Generate a new challenge for the given username.
"""
self.challenge = secureRandom(16)
self.username = username
return {'challenge': self.challenge} | def function[passwordLogin, parameter[self, username]]:
constant[
Generate a new challenge for the given username.
]
name[self].challenge assign[=] call[name[secureRandom], parameter[constant[16]]]
name[self].username assign[=] name[username]
return[dictionary[[<ast.Constant object at 0x7da20c7959c0>], [<ast.Attribute object at 0x7da20c794dc0>]]] | keyword[def] identifier[passwordLogin] ( identifier[self] , identifier[username] ):
literal[string]
identifier[self] . identifier[challenge] = identifier[secureRandom] ( literal[int] )
identifier[self] . identifier[username] = identifier[username]
keyword[return] { literal[string] : identifier[self] . identifier[challenge] } | def passwordLogin(self, username):
"""
Generate a new challenge for the given username.
"""
self.challenge = secureRandom(16)
self.username = username
return {'challenge': self.challenge} |
def connect_fps(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
"""
from boto.fps.connection import FPSConnection
return FPSConnection(aws_access_key_id, aws_secret_access_key, **kwargs) | def function[connect_fps, parameter[aws_access_key_id, aws_secret_access_key]]:
constant[
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
]
from relative_module[boto.fps.connection] import module[FPSConnection]
return[call[name[FPSConnection], parameter[name[aws_access_key_id], name[aws_secret_access_key]]]] | keyword[def] identifier[connect_fps] ( identifier[aws_access_key_id] = keyword[None] , identifier[aws_secret_access_key] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[boto] . identifier[fps] . identifier[connection] keyword[import] identifier[FPSConnection]
keyword[return] identifier[FPSConnection] ( identifier[aws_access_key_id] , identifier[aws_secret_access_key] ,** identifier[kwargs] ) | def connect_fps(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
"""
from boto.fps.connection import FPSConnection
return FPSConnection(aws_access_key_id, aws_secret_access_key, **kwargs) |
def fprocess(infilep,outfilep):
"""
Scans an input file for LA equations between double square brackets,
e.g. [[ M3_mymatrix = M3_anothermatrix^-1 ]], and replaces the expression
with a comment containing the equation followed by nested function calls
that implement the equation as C code. A trailing semi-colon is appended.
The equation within [[ ]] should NOT end with a semicolon as that will raise
a ParseException. However, it is ok to have a semicolon after the right brackets.
Other text in the file is unaltered.
The arguments are file objects (NOT file names) opened for reading and
writing, respectively.
"""
pattern = r'\[\[\s*(.*?)\s*\]\]'
eqn = re.compile(pattern,re.DOTALL)
s = infilep.read()
def parser(mo):
ccode = parse(mo.group(1))
return "/* %s */\n%s;\nLAParserBufferReset();\n"%(mo.group(1),ccode)
content = eqn.sub(parser,s)
outfilep.write(content) | def function[fprocess, parameter[infilep, outfilep]]:
constant[
Scans an input file for LA equations between double square brackets,
e.g. [[ M3_mymatrix = M3_anothermatrix^-1 ]], and replaces the expression
with a comment containing the equation followed by nested function calls
that implement the equation as C code. A trailing semi-colon is appended.
The equation within [[ ]] should NOT end with a semicolon as that will raise
a ParseException. However, it is ok to have a semicolon after the right brackets.
Other text in the file is unaltered.
The arguments are file objects (NOT file names) opened for reading and
writing, respectively.
]
variable[pattern] assign[=] constant[\[\[\s*(.*?)\s*\]\]]
variable[eqn] assign[=] call[name[re].compile, parameter[name[pattern], name[re].DOTALL]]
variable[s] assign[=] call[name[infilep].read, parameter[]]
def function[parser, parameter[mo]]:
variable[ccode] assign[=] call[name[parse], parameter[call[name[mo].group, parameter[constant[1]]]]]
return[binary_operation[constant[/* %s */
%s;
LAParserBufferReset();
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f09e7a0>, <ast.Name object at 0x7da18f09e2f0>]]]]
variable[content] assign[=] call[name[eqn].sub, parameter[name[parser], name[s]]]
call[name[outfilep].write, parameter[name[content]]] | keyword[def] identifier[fprocess] ( identifier[infilep] , identifier[outfilep] ):
literal[string]
identifier[pattern] = literal[string]
identifier[eqn] = identifier[re] . identifier[compile] ( identifier[pattern] , identifier[re] . identifier[DOTALL] )
identifier[s] = identifier[infilep] . identifier[read] ()
keyword[def] identifier[parser] ( identifier[mo] ):
identifier[ccode] = identifier[parse] ( identifier[mo] . identifier[group] ( literal[int] ))
keyword[return] literal[string] %( identifier[mo] . identifier[group] ( literal[int] ), identifier[ccode] )
identifier[content] = identifier[eqn] . identifier[sub] ( identifier[parser] , identifier[s] )
identifier[outfilep] . identifier[write] ( identifier[content] ) | def fprocess(infilep, outfilep):
"""
Scans an input file for LA equations between double square brackets,
e.g. [[ M3_mymatrix = M3_anothermatrix^-1 ]], and replaces the expression
with a comment containing the equation followed by nested function calls
that implement the equation as C code. A trailing semi-colon is appended.
The equation within [[ ]] should NOT end with a semicolon as that will raise
a ParseException. However, it is ok to have a semicolon after the right brackets.
Other text in the file is unaltered.
The arguments are file objects (NOT file names) opened for reading and
writing, respectively.
"""
pattern = '\\[\\[\\s*(.*?)\\s*\\]\\]'
eqn = re.compile(pattern, re.DOTALL)
s = infilep.read()
def parser(mo):
ccode = parse(mo.group(1))
return '/* %s */\n%s;\nLAParserBufferReset();\n' % (mo.group(1), ccode)
content = eqn.sub(parser, s)
outfilep.write(content) |
def _get_element_by_names(source, names):
"""
Given a dict and path '/' or '.' separated. Digs into de dict to retrieve
the specified element.
Args:
source (dict): set of nested objects in which the data will be searched
path (list): list of attribute names
"""
if source is None:
return source
else:
if names:
head, *rest = names
if isinstance(source, dict) and head in source:
return _get_element_by_names(source[head], rest)
elif isinstance(source, list) and head.isdigit():
return _get_element_by_names(source[int(head)], rest)
elif not names[0]:
pass
else:
source = None
return source | def function[_get_element_by_names, parameter[source, names]]:
constant[
Given a dict and path '/' or '.' separated. Digs into de dict to retrieve
the specified element.
Args:
source (dict): set of nested objects in which the data will be searched
path (list): list of attribute names
]
if compare[name[source] is constant[None]] begin[:]
return[name[source]] | keyword[def] identifier[_get_element_by_names] ( identifier[source] , identifier[names] ):
literal[string]
keyword[if] identifier[source] keyword[is] keyword[None] :
keyword[return] identifier[source]
keyword[else] :
keyword[if] identifier[names] :
identifier[head] ,* identifier[rest] = identifier[names]
keyword[if] identifier[isinstance] ( identifier[source] , identifier[dict] ) keyword[and] identifier[head] keyword[in] identifier[source] :
keyword[return] identifier[_get_element_by_names] ( identifier[source] [ identifier[head] ], identifier[rest] )
keyword[elif] identifier[isinstance] ( identifier[source] , identifier[list] ) keyword[and] identifier[head] . identifier[isdigit] ():
keyword[return] identifier[_get_element_by_names] ( identifier[source] [ identifier[int] ( identifier[head] )], identifier[rest] )
keyword[elif] keyword[not] identifier[names] [ literal[int] ]:
keyword[pass]
keyword[else] :
identifier[source] = keyword[None]
keyword[return] identifier[source] | def _get_element_by_names(source, names):
"""
Given a dict and path '/' or '.' separated. Digs into de dict to retrieve
the specified element.
Args:
source (dict): set of nested objects in which the data will be searched
path (list): list of attribute names
"""
if source is None:
return source # depends on [control=['if'], data=['source']]
else:
if names:
(head, *rest) = names
if isinstance(source, dict) and head in source:
return _get_element_by_names(source[head], rest) # depends on [control=['if'], data=[]]
elif isinstance(source, list) and head.isdigit():
return _get_element_by_names(source[int(head)], rest) # depends on [control=['if'], data=[]]
elif not names[0]:
pass # depends on [control=['if'], data=[]]
else:
source = None # depends on [control=['if'], data=[]]
return source |
async def post(self, public_key):
"""Writes contents review
"""
if settings.SIGNATURE_VERIFICATION:
super().verify()
try:
body = json.loads(self.request.body)
except:
self.set_status(400)
self.write({"error":400, "reason":"Unexpected data format. JSON required"})
raise tornado.web.Finish
if isinstance(body["message"], str):
message = json.loads(body["message"])
elif isinstance(body["message"], dict):
message = body["message"]
cid = message.get("cid")
review = message.get("review")
rating = message.get("rating")
coinid = message.get("coinid")
if not all([cid, rating, review]):
self.set_status(400)
self.write({"error":400, "reason":"Missed required fields"})
if coinid in settings.bridges.keys():
self.account.blockchain.setendpoint(settings.bridges[coinid])
else:
self.set_status(400)
self.write({"error":400, "reason":"Invalid coinid"})
raise tornado.web.Finish
buyer_address = self.account.validator[coinid](public_key)
review = await self.account.blockchain.addreview(cid=int(cid),buyer_address=buyer_address,
stars=int(rating), review=review)
await self.account.setreview(cid=cid, txid=review["result"]["txid"], coinid=coinid)
self.write({"cid":cid, "review":review, "rating":rating}) | <ast.AsyncFunctionDef object at 0x7da1b2344a30> | keyword[async] keyword[def] identifier[post] ( identifier[self] , identifier[public_key] ):
literal[string]
keyword[if] identifier[settings] . identifier[SIGNATURE_VERIFICATION] :
identifier[super] (). identifier[verify] ()
keyword[try] :
identifier[body] = identifier[json] . identifier[loads] ( identifier[self] . identifier[request] . identifier[body] )
keyword[except] :
identifier[self] . identifier[set_status] ( literal[int] )
identifier[self] . identifier[write] ({ literal[string] : literal[int] , literal[string] : literal[string] })
keyword[raise] identifier[tornado] . identifier[web] . identifier[Finish]
keyword[if] identifier[isinstance] ( identifier[body] [ literal[string] ], identifier[str] ):
identifier[message] = identifier[json] . identifier[loads] ( identifier[body] [ literal[string] ])
keyword[elif] identifier[isinstance] ( identifier[body] [ literal[string] ], identifier[dict] ):
identifier[message] = identifier[body] [ literal[string] ]
identifier[cid] = identifier[message] . identifier[get] ( literal[string] )
identifier[review] = identifier[message] . identifier[get] ( literal[string] )
identifier[rating] = identifier[message] . identifier[get] ( literal[string] )
identifier[coinid] = identifier[message] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[all] ([ identifier[cid] , identifier[rating] , identifier[review] ]):
identifier[self] . identifier[set_status] ( literal[int] )
identifier[self] . identifier[write] ({ literal[string] : literal[int] , literal[string] : literal[string] })
keyword[if] identifier[coinid] keyword[in] identifier[settings] . identifier[bridges] . identifier[keys] ():
identifier[self] . identifier[account] . identifier[blockchain] . identifier[setendpoint] ( identifier[settings] . identifier[bridges] [ identifier[coinid] ])
keyword[else] :
identifier[self] . identifier[set_status] ( literal[int] )
identifier[self] . identifier[write] ({ literal[string] : literal[int] , literal[string] : literal[string] })
keyword[raise] identifier[tornado] . identifier[web] . identifier[Finish]
identifier[buyer_address] = identifier[self] . identifier[account] . identifier[validator] [ identifier[coinid] ]( identifier[public_key] )
identifier[review] = keyword[await] identifier[self] . identifier[account] . identifier[blockchain] . identifier[addreview] ( identifier[cid] = identifier[int] ( identifier[cid] ), identifier[buyer_address] = identifier[buyer_address] ,
identifier[stars] = identifier[int] ( identifier[rating] ), identifier[review] = identifier[review] )
keyword[await] identifier[self] . identifier[account] . identifier[setreview] ( identifier[cid] = identifier[cid] , identifier[txid] = identifier[review] [ literal[string] ][ literal[string] ], identifier[coinid] = identifier[coinid] )
identifier[self] . identifier[write] ({ literal[string] : identifier[cid] , literal[string] : identifier[review] , literal[string] : identifier[rating] }) | async def post(self, public_key):
"""Writes contents review
"""
if settings.SIGNATURE_VERIFICATION:
super().verify() # depends on [control=['if'], data=[]]
try:
body = json.loads(self.request.body) # depends on [control=['try'], data=[]]
except:
self.set_status(400)
self.write({'error': 400, 'reason': 'Unexpected data format. JSON required'})
raise tornado.web.Finish # depends on [control=['except'], data=[]]
if isinstance(body['message'], str):
message = json.loads(body['message']) # depends on [control=['if'], data=[]]
elif isinstance(body['message'], dict):
message = body['message'] # depends on [control=['if'], data=[]]
cid = message.get('cid')
review = message.get('review')
rating = message.get('rating')
coinid = message.get('coinid')
if not all([cid, rating, review]):
self.set_status(400)
self.write({'error': 400, 'reason': 'Missed required fields'}) # depends on [control=['if'], data=[]]
if coinid in settings.bridges.keys():
self.account.blockchain.setendpoint(settings.bridges[coinid]) # depends on [control=['if'], data=['coinid']]
else:
self.set_status(400)
self.write({'error': 400, 'reason': 'Invalid coinid'})
raise tornado.web.Finish
buyer_address = self.account.validator[coinid](public_key)
review = await self.account.blockchain.addreview(cid=int(cid), buyer_address=buyer_address, stars=int(rating), review=review)
await self.account.setreview(cid=cid, txid=review['result']['txid'], coinid=coinid)
self.write({'cid': cid, 'review': review, 'rating': rating}) |
def assign_from_list(obj: T,
fieldlist: Sequence[str],
valuelist: Sequence[any]) -> None:
"""Within "obj", assigns the values from the value list to the fields in
the fieldlist."""
if len(fieldlist) != len(valuelist):
raise AssertionError("assign_from_list: fieldlist and valuelist of "
"different length")
for i in range(len(valuelist)):
setattr(obj, fieldlist[i], valuelist[i]) | def function[assign_from_list, parameter[obj, fieldlist, valuelist]]:
constant[Within "obj", assigns the values from the value list to the fields in
the fieldlist.]
if compare[call[name[len], parameter[name[fieldlist]]] not_equal[!=] call[name[len], parameter[name[valuelist]]]] begin[:]
<ast.Raise object at 0x7da1b1728d30>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[valuelist]]]]]] begin[:]
call[name[setattr], parameter[name[obj], call[name[fieldlist]][name[i]], call[name[valuelist]][name[i]]]] | keyword[def] identifier[assign_from_list] ( identifier[obj] : identifier[T] ,
identifier[fieldlist] : identifier[Sequence] [ identifier[str] ],
identifier[valuelist] : identifier[Sequence] [ identifier[any] ])-> keyword[None] :
literal[string]
keyword[if] identifier[len] ( identifier[fieldlist] )!= identifier[len] ( identifier[valuelist] ):
keyword[raise] identifier[AssertionError] ( literal[string]
literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[valuelist] )):
identifier[setattr] ( identifier[obj] , identifier[fieldlist] [ identifier[i] ], identifier[valuelist] [ identifier[i] ]) | def assign_from_list(obj: T, fieldlist: Sequence[str], valuelist: Sequence[any]) -> None:
"""Within "obj", assigns the values from the value list to the fields in
the fieldlist."""
if len(fieldlist) != len(valuelist):
raise AssertionError('assign_from_list: fieldlist and valuelist of different length') # depends on [control=['if'], data=[]]
for i in range(len(valuelist)):
setattr(obj, fieldlist[i], valuelist[i]) # depends on [control=['for'], data=['i']] |
def stringify(data):
"""Turns all dictionary values into strings"""
if isinstance(data, dict):
for key, value in data.items():
data[key] = stringify(value)
elif isinstance(data, list):
return [stringify(item) for item in data]
else:
return smart_text(data)
return data | def function[stringify, parameter[data]]:
constant[Turns all dictionary values into strings]
if call[name[isinstance], parameter[name[data], name[dict]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c6a8d60>, <ast.Name object at 0x7da20c6aae90>]]] in starred[call[name[data].items, parameter[]]] begin[:]
call[name[data]][name[key]] assign[=] call[name[stringify], parameter[name[value]]]
return[name[data]] | keyword[def] identifier[stringify] ( identifier[data] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ):
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data] . identifier[items] ():
identifier[data] [ identifier[key] ]= identifier[stringify] ( identifier[value] )
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[list] ):
keyword[return] [ identifier[stringify] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[data] ]
keyword[else] :
keyword[return] identifier[smart_text] ( identifier[data] )
keyword[return] identifier[data] | def stringify(data):
"""Turns all dictionary values into strings"""
if isinstance(data, dict):
for (key, value) in data.items():
data[key] = stringify(value) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(data, list):
return [stringify(item) for item in data] # depends on [control=['if'], data=[]]
else:
return smart_text(data)
return data |
def group_for_policy(self, policy=None):
"""
Lookup the collective.workspace usergroup corresponding to the
given policy
:param policy: The value of the policy to lookup, defaults to the
current policy
:type policy: str
"""
if policy is None:
policy = self.context.participant_policy
return "%s:%s" % (policy.title(), self.context.UID()) | def function[group_for_policy, parameter[self, policy]]:
constant[
Lookup the collective.workspace usergroup corresponding to the
given policy
:param policy: The value of the policy to lookup, defaults to the
current policy
:type policy: str
]
if compare[name[policy] is constant[None]] begin[:]
variable[policy] assign[=] name[self].context.participant_policy
return[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1401cf0>, <ast.Call object at 0x7da1b1402050>]]]] | keyword[def] identifier[group_for_policy] ( identifier[self] , identifier[policy] = keyword[None] ):
literal[string]
keyword[if] identifier[policy] keyword[is] keyword[None] :
identifier[policy] = identifier[self] . identifier[context] . identifier[participant_policy]
keyword[return] literal[string] %( identifier[policy] . identifier[title] (), identifier[self] . identifier[context] . identifier[UID] ()) | def group_for_policy(self, policy=None):
"""
Lookup the collective.workspace usergroup corresponding to the
given policy
:param policy: The value of the policy to lookup, defaults to the
current policy
:type policy: str
"""
if policy is None:
policy = self.context.participant_policy # depends on [control=['if'], data=['policy']]
return '%s:%s' % (policy.title(), self.context.UID()) |
def work_peer_add(self, address, port):
"""
Add specific **IP address** and **port** as work peer for node until
restart
.. enable_control required
.. version 8.0 required
:param address: IP address of work peer to add
:type address: str
:param port: Port work peer to add
:type port: int
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.work_peer_add(address="::ffff:172.17.0.1", port="7076")
True
"""
address = self._process_value(address, 'ipaddr')
port = self._process_value(port, 'int')
payload = {"address": address, "port": port}
resp = self.call('work_peer_add', payload)
return 'success' in resp | def function[work_peer_add, parameter[self, address, port]]:
constant[
Add specific **IP address** and **port** as work peer for node until
restart
.. enable_control required
.. version 8.0 required
:param address: IP address of work peer to add
:type address: str
:param port: Port work peer to add
:type port: int
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.work_peer_add(address="::ffff:172.17.0.1", port="7076")
True
]
variable[address] assign[=] call[name[self]._process_value, parameter[name[address], constant[ipaddr]]]
variable[port] assign[=] call[name[self]._process_value, parameter[name[port], constant[int]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b253b700>, <ast.Constant object at 0x7da1b253afb0>], [<ast.Name object at 0x7da1b2539fc0>, <ast.Name object at 0x7da1b2539450>]]
variable[resp] assign[=] call[name[self].call, parameter[constant[work_peer_add], name[payload]]]
return[compare[constant[success] in name[resp]]] | keyword[def] identifier[work_peer_add] ( identifier[self] , identifier[address] , identifier[port] ):
literal[string]
identifier[address] = identifier[self] . identifier[_process_value] ( identifier[address] , literal[string] )
identifier[port] = identifier[self] . identifier[_process_value] ( identifier[port] , literal[string] )
identifier[payload] ={ literal[string] : identifier[address] , literal[string] : identifier[port] }
identifier[resp] = identifier[self] . identifier[call] ( literal[string] , identifier[payload] )
keyword[return] literal[string] keyword[in] identifier[resp] | def work_peer_add(self, address, port):
"""
Add specific **IP address** and **port** as work peer for node until
restart
.. enable_control required
.. version 8.0 required
:param address: IP address of work peer to add
:type address: str
:param port: Port work peer to add
:type port: int
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.work_peer_add(address="::ffff:172.17.0.1", port="7076")
True
"""
address = self._process_value(address, 'ipaddr')
port = self._process_value(port, 'int')
payload = {'address': address, 'port': port}
resp = self.call('work_peer_add', payload)
return 'success' in resp |
def getBottomLeft(self):
"""
Retrieves a tuple with the x,y coordinates of the lower left point of the circle.
Requires the radius and the coordinates to be numbers
"""
return (float(self.get_cx()) - float(self.get_r()), float(self.get_cy()) - float(self.get_r())) | def function[getBottomLeft, parameter[self]]:
constant[
Retrieves a tuple with the x,y coordinates of the lower left point of the circle.
Requires the radius and the coordinates to be numbers
]
return[tuple[[<ast.BinOp object at 0x7da18f722650>, <ast.BinOp object at 0x7da18f722830>]]] | keyword[def] identifier[getBottomLeft] ( identifier[self] ):
literal[string]
keyword[return] ( identifier[float] ( identifier[self] . identifier[get_cx] ())- identifier[float] ( identifier[self] . identifier[get_r] ()), identifier[float] ( identifier[self] . identifier[get_cy] ())- identifier[float] ( identifier[self] . identifier[get_r] ())) | def getBottomLeft(self):
"""
Retrieves a tuple with the x,y coordinates of the lower left point of the circle.
Requires the radius and the coordinates to be numbers
"""
return (float(self.get_cx()) - float(self.get_r()), float(self.get_cy()) - float(self.get_r())) |
def send(self, s):
"""
Sends all the given data to the socket.
Aliases: write, put, sendall, send_all
"""
self._print_header('======== Sending ({0}) ========'.format(len(s)))
self._log_send(s)
out = len(s)
while s:
s = s[self._send(s):]
return out | def function[send, parameter[self, s]]:
constant[
Sends all the given data to the socket.
Aliases: write, put, sendall, send_all
]
call[name[self]._print_header, parameter[call[constant[======== Sending ({0}) ========].format, parameter[call[name[len], parameter[name[s]]]]]]]
call[name[self]._log_send, parameter[name[s]]]
variable[out] assign[=] call[name[len], parameter[name[s]]]
while name[s] begin[:]
variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da1b2346c50>]
return[name[out]] | keyword[def] identifier[send] ( identifier[self] , identifier[s] ):
literal[string]
identifier[self] . identifier[_print_header] ( literal[string] . identifier[format] ( identifier[len] ( identifier[s] )))
identifier[self] . identifier[_log_send] ( identifier[s] )
identifier[out] = identifier[len] ( identifier[s] )
keyword[while] identifier[s] :
identifier[s] = identifier[s] [ identifier[self] . identifier[_send] ( identifier[s] ):]
keyword[return] identifier[out] | def send(self, s):
"""
Sends all the given data to the socket.
Aliases: write, put, sendall, send_all
"""
self._print_header('======== Sending ({0}) ========'.format(len(s)))
self._log_send(s)
out = len(s)
while s:
s = s[self._send(s):] # depends on [control=['while'], data=[]]
return out |
def traverse(self, index=0):
""" This is used to produce a list of lists where each each item
in that list is a diffrent combination of items from the lists
within with every combination of such values.
Args:
index (int) : the index at witch to start the list.
Note this is used only in the function as a processing
Returns:
list : is every combination.
"""
if index < len(self.nodes):
for entity in self.nodes[index]:
for next_result in self.traverse(index=index+1):
if isinstance(entity, list):
yield entity + next_result
else:
yield [entity] + next_result
else:
yield [] | def function[traverse, parameter[self, index]]:
constant[ This is used to produce a list of lists where each each item
in that list is a diffrent combination of items from the lists
within with every combination of such values.
Args:
index (int) : the index at witch to start the list.
Note this is used only in the function as a processing
Returns:
list : is every combination.
]
if compare[name[index] less[<] call[name[len], parameter[name[self].nodes]]] begin[:]
for taget[name[entity]] in starred[call[name[self].nodes][name[index]]] begin[:]
for taget[name[next_result]] in starred[call[name[self].traverse, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[entity], name[list]]] begin[:]
<ast.Yield object at 0x7da1b0745600> | keyword[def] identifier[traverse] ( identifier[self] , identifier[index] = literal[int] ):
literal[string]
keyword[if] identifier[index] < identifier[len] ( identifier[self] . identifier[nodes] ):
keyword[for] identifier[entity] keyword[in] identifier[self] . identifier[nodes] [ identifier[index] ]:
keyword[for] identifier[next_result] keyword[in] identifier[self] . identifier[traverse] ( identifier[index] = identifier[index] + literal[int] ):
keyword[if] identifier[isinstance] ( identifier[entity] , identifier[list] ):
keyword[yield] identifier[entity] + identifier[next_result]
keyword[else] :
keyword[yield] [ identifier[entity] ]+ identifier[next_result]
keyword[else] :
keyword[yield] [] | def traverse(self, index=0):
""" This is used to produce a list of lists where each each item
in that list is a diffrent combination of items from the lists
within with every combination of such values.
Args:
index (int) : the index at witch to start the list.
Note this is used only in the function as a processing
Returns:
list : is every combination.
"""
if index < len(self.nodes):
for entity in self.nodes[index]:
for next_result in self.traverse(index=index + 1):
if isinstance(entity, list):
yield (entity + next_result) # depends on [control=['if'], data=[]]
else:
yield ([entity] + next_result) # depends on [control=['for'], data=['next_result']] # depends on [control=['for'], data=['entity']] # depends on [control=['if'], data=['index']]
else:
yield [] |
def formatter(color, s):
""" Formats a string with color """
if no_coloring:
return s
return "{begin}{s}{reset}".format(begin=color, s=s, reset=Colors.RESET) | def function[formatter, parameter[color, s]]:
constant[ Formats a string with color ]
if name[no_coloring] begin[:]
return[name[s]]
return[call[constant[{begin}{s}{reset}].format, parameter[]]] | keyword[def] identifier[formatter] ( identifier[color] , identifier[s] ):
literal[string]
keyword[if] identifier[no_coloring] :
keyword[return] identifier[s]
keyword[return] literal[string] . identifier[format] ( identifier[begin] = identifier[color] , identifier[s] = identifier[s] , identifier[reset] = identifier[Colors] . identifier[RESET] ) | def formatter(color, s):
""" Formats a string with color """
if no_coloring:
return s # depends on [control=['if'], data=[]]
return '{begin}{s}{reset}'.format(begin=color, s=s, reset=Colors.RESET) |
def get_node_text(self, goid, goobj):
"""Return a string to be printed in a GO term box."""
txt = []
# Header line: "GO:0036464 L04 D06"
txt.append(self.get_hdr(goid, goobj))
# GO name line: "cytoplamic ribonucleoprotein"
if 'no_name' not in self.present:
txt.append(self._get_go_name(goobj))
# study info line: "24 genes"
if 'objgoea' in self.kws:
study_txt = self.kws['objgoea'].get_study_txt(goid)
if study_txt is not None:
txt.append(study_txt)
# Add user-specified text, if needed
if 'go2txt' in self.kws and goid in self.kws['go2txt']:
txt.append(self.kws['go2txt'][goid])
return "\n".join(txt) | def function[get_node_text, parameter[self, goid, goobj]]:
constant[Return a string to be printed in a GO term box.]
variable[txt] assign[=] list[[]]
call[name[txt].append, parameter[call[name[self].get_hdr, parameter[name[goid], name[goobj]]]]]
if compare[constant[no_name] <ast.NotIn object at 0x7da2590d7190> name[self].present] begin[:]
call[name[txt].append, parameter[call[name[self]._get_go_name, parameter[name[goobj]]]]]
if compare[constant[objgoea] in name[self].kws] begin[:]
variable[study_txt] assign[=] call[call[name[self].kws][constant[objgoea]].get_study_txt, parameter[name[goid]]]
if compare[name[study_txt] is_not constant[None]] begin[:]
call[name[txt].append, parameter[name[study_txt]]]
if <ast.BoolOp object at 0x7da18f811c90> begin[:]
call[name[txt].append, parameter[call[call[name[self].kws][constant[go2txt]]][name[goid]]]]
return[call[constant[
].join, parameter[name[txt]]]] | keyword[def] identifier[get_node_text] ( identifier[self] , identifier[goid] , identifier[goobj] ):
literal[string]
identifier[txt] =[]
identifier[txt] . identifier[append] ( identifier[self] . identifier[get_hdr] ( identifier[goid] , identifier[goobj] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[present] :
identifier[txt] . identifier[append] ( identifier[self] . identifier[_get_go_name] ( identifier[goobj] ))
keyword[if] literal[string] keyword[in] identifier[self] . identifier[kws] :
identifier[study_txt] = identifier[self] . identifier[kws] [ literal[string] ]. identifier[get_study_txt] ( identifier[goid] )
keyword[if] identifier[study_txt] keyword[is] keyword[not] keyword[None] :
identifier[txt] . identifier[append] ( identifier[study_txt] )
keyword[if] literal[string] keyword[in] identifier[self] . identifier[kws] keyword[and] identifier[goid] keyword[in] identifier[self] . identifier[kws] [ literal[string] ]:
identifier[txt] . identifier[append] ( identifier[self] . identifier[kws] [ literal[string] ][ identifier[goid] ])
keyword[return] literal[string] . identifier[join] ( identifier[txt] ) | def get_node_text(self, goid, goobj):
"""Return a string to be printed in a GO term box."""
txt = []
# Header line: "GO:0036464 L04 D06"
txt.append(self.get_hdr(goid, goobj))
# GO name line: "cytoplamic ribonucleoprotein"
if 'no_name' not in self.present:
txt.append(self._get_go_name(goobj)) # depends on [control=['if'], data=[]]
# study info line: "24 genes"
if 'objgoea' in self.kws:
study_txt = self.kws['objgoea'].get_study_txt(goid)
if study_txt is not None:
txt.append(study_txt) # depends on [control=['if'], data=['study_txt']] # depends on [control=['if'], data=[]]
# Add user-specified text, if needed
if 'go2txt' in self.kws and goid in self.kws['go2txt']:
txt.append(self.kws['go2txt'][goid]) # depends on [control=['if'], data=[]]
return '\n'.join(txt) |
def decipher(self, string):
"""Decipher string using Playfair cipher according to initialised key. Punctuation and whitespace
are removed from the input. The ciphertext should be an even number of characters. If the input ciphertext is not an even number of characters, an 'X' will be appended.
Example::
plaintext = Playfair(key='zgptfoihmuwdrcnykeqaxvsbl').decipher(ciphertext)
:param string: The string to decipher.
:returns: The deciphered string.
"""
string = self.remove_punctuation(string)
if len(string) % 2 == 1:
string += 'X'
ret = ''
for c in range(0, len(string), 2):
ret += self.decipher_pair(string[c], string[c + 1])
return ret | def function[decipher, parameter[self, string]]:
constant[Decipher string using Playfair cipher according to initialised key. Punctuation and whitespace
are removed from the input. The ciphertext should be an even number of characters. If the input ciphertext is not an even number of characters, an 'X' will be appended.
Example::
plaintext = Playfair(key='zgptfoihmuwdrcnykeqaxvsbl').decipher(ciphertext)
:param string: The string to decipher.
:returns: The deciphered string.
]
variable[string] assign[=] call[name[self].remove_punctuation, parameter[name[string]]]
if compare[binary_operation[call[name[len], parameter[name[string]]] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[1]] begin[:]
<ast.AugAssign object at 0x7da1b065a0e0>
variable[ret] assign[=] constant[]
for taget[name[c]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[string]]], constant[2]]]] begin[:]
<ast.AugAssign object at 0x7da1b0659ab0>
return[name[ret]] | keyword[def] identifier[decipher] ( identifier[self] , identifier[string] ):
literal[string]
identifier[string] = identifier[self] . identifier[remove_punctuation] ( identifier[string] )
keyword[if] identifier[len] ( identifier[string] )% literal[int] == literal[int] :
identifier[string] += literal[string]
identifier[ret] = literal[string]
keyword[for] identifier[c] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[string] ), literal[int] ):
identifier[ret] += identifier[self] . identifier[decipher_pair] ( identifier[string] [ identifier[c] ], identifier[string] [ identifier[c] + literal[int] ])
keyword[return] identifier[ret] | def decipher(self, string):
"""Decipher string using Playfair cipher according to initialised key. Punctuation and whitespace
are removed from the input. The ciphertext should be an even number of characters. If the input ciphertext is not an even number of characters, an 'X' will be appended.
Example::
plaintext = Playfair(key='zgptfoihmuwdrcnykeqaxvsbl').decipher(ciphertext)
:param string: The string to decipher.
:returns: The deciphered string.
"""
string = self.remove_punctuation(string)
if len(string) % 2 == 1:
string += 'X' # depends on [control=['if'], data=[]]
ret = ''
for c in range(0, len(string), 2):
ret += self.decipher_pair(string[c], string[c + 1]) # depends on [control=['for'], data=['c']]
return ret |
def volume(self) -> float:
"""
Volume of the unit cell.
"""
m = self._matrix
return float(abs(dot(np.cross(m[0], m[1]), m[2]))) | def function[volume, parameter[self]]:
constant[
Volume of the unit cell.
]
variable[m] assign[=] name[self]._matrix
return[call[name[float], parameter[call[name[abs], parameter[call[name[dot], parameter[call[name[np].cross, parameter[call[name[m]][constant[0]], call[name[m]][constant[1]]]], call[name[m]][constant[2]]]]]]]]] | keyword[def] identifier[volume] ( identifier[self] )-> identifier[float] :
literal[string]
identifier[m] = identifier[self] . identifier[_matrix]
keyword[return] identifier[float] ( identifier[abs] ( identifier[dot] ( identifier[np] . identifier[cross] ( identifier[m] [ literal[int] ], identifier[m] [ literal[int] ]), identifier[m] [ literal[int] ]))) | def volume(self) -> float:
"""
Volume of the unit cell.
"""
m = self._matrix
return float(abs(dot(np.cross(m[0], m[1]), m[2]))) |
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code."""
try: lines, lnum = findsource(object)
except IOError: return None
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#!': start = 1
while start < len(lines) and string.strip(lines[start]) in ['', '#']:
start = start + 1
if start < len(lines) and lines[start][:1] == '#':
comments = []
end = start
while end < len(lines) and lines[end][:1] == '#':
comments.append(string.expandtabs(lines[end]))
end = end + 1
return string.join(comments, '')
# Look for a preceding block of comments at the same indentation.
elif lnum > 0:
indent = indentsize(lines[lnum])
end = lnum - 1
if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \
indentsize(lines[end]) == indent:
comments = [string.lstrip(string.expandtabs(lines[end]))]
if end > 0:
end = end - 1
comment = string.lstrip(string.expandtabs(lines[end]))
while comment[:1] == '#' and indentsize(lines[end]) == indent:
comments[:0] = [comment]
end = end - 1
if end < 0: break
comment = string.lstrip(string.expandtabs(lines[end]))
while comments and string.strip(comments[0]) == '#':
comments[:1] = []
while comments and string.strip(comments[-1]) == '#':
comments[-1:] = []
return string.join(comments, '') | def function[getcomments, parameter[object]]:
constant[Get lines of comments immediately preceding an object's source code.]
<ast.Try object at 0x7da1b08d93c0>
if call[name[ismodule], parameter[name[object]]] begin[:]
variable[start] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b08da2f0> begin[:]
variable[start] assign[=] constant[1]
while <ast.BoolOp object at 0x7da1b08da530> begin[:]
variable[start] assign[=] binary_operation[name[start] + constant[1]]
if <ast.BoolOp object at 0x7da1b08dba00> begin[:]
variable[comments] assign[=] list[[]]
variable[end] assign[=] name[start]
while <ast.BoolOp object at 0x7da1b08dbb50> begin[:]
call[name[comments].append, parameter[call[name[string].expandtabs, parameter[call[name[lines]][name[end]]]]]]
variable[end] assign[=] binary_operation[name[end] + constant[1]]
return[call[name[string].join, parameter[name[comments], constant[]]]] | keyword[def] identifier[getcomments] ( identifier[object] ):
literal[string]
keyword[try] : identifier[lines] , identifier[lnum] = identifier[findsource] ( identifier[object] )
keyword[except] identifier[IOError] : keyword[return] keyword[None]
keyword[if] identifier[ismodule] ( identifier[object] ):
identifier[start] = literal[int]
keyword[if] identifier[lines] keyword[and] identifier[lines] [ literal[int] ][: literal[int] ]== literal[string] : identifier[start] = literal[int]
keyword[while] identifier[start] < identifier[len] ( identifier[lines] ) keyword[and] identifier[string] . identifier[strip] ( identifier[lines] [ identifier[start] ]) keyword[in] [ literal[string] , literal[string] ]:
identifier[start] = identifier[start] + literal[int]
keyword[if] identifier[start] < identifier[len] ( identifier[lines] ) keyword[and] identifier[lines] [ identifier[start] ][: literal[int] ]== literal[string] :
identifier[comments] =[]
identifier[end] = identifier[start]
keyword[while] identifier[end] < identifier[len] ( identifier[lines] ) keyword[and] identifier[lines] [ identifier[end] ][: literal[int] ]== literal[string] :
identifier[comments] . identifier[append] ( identifier[string] . identifier[expandtabs] ( identifier[lines] [ identifier[end] ]))
identifier[end] = identifier[end] + literal[int]
keyword[return] identifier[string] . identifier[join] ( identifier[comments] , literal[string] )
keyword[elif] identifier[lnum] > literal[int] :
identifier[indent] = identifier[indentsize] ( identifier[lines] [ identifier[lnum] ])
identifier[end] = identifier[lnum] - literal[int]
keyword[if] identifier[end] >= literal[int] keyword[and] identifier[string] . identifier[lstrip] ( identifier[lines] [ identifier[end] ])[: literal[int] ]== literal[string] keyword[and] identifier[indentsize] ( identifier[lines] [ identifier[end] ])== identifier[indent] :
identifier[comments] =[ identifier[string] . identifier[lstrip] ( identifier[string] . identifier[expandtabs] ( identifier[lines] [ identifier[end] ]))]
keyword[if] identifier[end] > literal[int] :
identifier[end] = identifier[end] - literal[int]
identifier[comment] = identifier[string] . identifier[lstrip] ( identifier[string] . identifier[expandtabs] ( identifier[lines] [ identifier[end] ]))
keyword[while] identifier[comment] [: literal[int] ]== literal[string] keyword[and] identifier[indentsize] ( identifier[lines] [ identifier[end] ])== identifier[indent] :
identifier[comments] [: literal[int] ]=[ identifier[comment] ]
identifier[end] = identifier[end] - literal[int]
keyword[if] identifier[end] < literal[int] : keyword[break]
identifier[comment] = identifier[string] . identifier[lstrip] ( identifier[string] . identifier[expandtabs] ( identifier[lines] [ identifier[end] ]))
keyword[while] identifier[comments] keyword[and] identifier[string] . identifier[strip] ( identifier[comments] [ literal[int] ])== literal[string] :
identifier[comments] [: literal[int] ]=[]
keyword[while] identifier[comments] keyword[and] identifier[string] . identifier[strip] ( identifier[comments] [- literal[int] ])== literal[string] :
identifier[comments] [- literal[int] :]=[]
keyword[return] identifier[string] . identifier[join] ( identifier[comments] , literal[string] ) | def getcomments(object):
"""Get lines of comments immediately preceding an object's source code."""
try:
(lines, lnum) = findsource(object) # depends on [control=['try'], data=[]]
except IOError:
return None # depends on [control=['except'], data=[]]
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#!':
start = 1 # depends on [control=['if'], data=[]]
while start < len(lines) and string.strip(lines[start]) in ['', '#']:
start = start + 1 # depends on [control=['while'], data=[]]
if start < len(lines) and lines[start][:1] == '#':
comments = []
end = start
while end < len(lines) and lines[end][:1] == '#':
comments.append(string.expandtabs(lines[end]))
end = end + 1 # depends on [control=['while'], data=[]]
return string.join(comments, '') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Look for a preceding block of comments at the same indentation.
elif lnum > 0:
indent = indentsize(lines[lnum])
end = lnum - 1
if end >= 0 and string.lstrip(lines[end])[:1] == '#' and (indentsize(lines[end]) == indent):
comments = [string.lstrip(string.expandtabs(lines[end]))]
if end > 0:
end = end - 1
comment = string.lstrip(string.expandtabs(lines[end]))
while comment[:1] == '#' and indentsize(lines[end]) == indent:
comments[:0] = [comment]
end = end - 1
if end < 0:
break # depends on [control=['if'], data=[]]
comment = string.lstrip(string.expandtabs(lines[end])) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=['end']]
while comments and string.strip(comments[0]) == '#':
comments[:1] = [] # depends on [control=['while'], data=[]]
while comments and string.strip(comments[-1]) == '#':
comments[-1:] = [] # depends on [control=['while'], data=[]]
return string.join(comments, '') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['lnum']] |
def egg_link_path(dist):
# type: (Distribution) -> Optional[str]
"""
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE
(don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2
locations.
This method will just return the first one found.
"""
sites = []
if running_under_virtualenv():
if virtualenv_no_global():
sites.append(site_packages)
else:
sites.append(site_packages)
if user_site:
sites.append(user_site)
else:
if user_site:
sites.append(user_site)
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + '.egg-link'
if os.path.isfile(egglink):
return egglink
return None | def function[egg_link_path, parameter[dist]]:
constant[
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE
(don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2
locations.
This method will just return the first one found.
]
variable[sites] assign[=] list[[]]
if call[name[running_under_virtualenv], parameter[]] begin[:]
if call[name[virtualenv_no_global], parameter[]] begin[:]
call[name[sites].append, parameter[name[site_packages]]]
for taget[name[site]] in starred[name[sites]] begin[:]
variable[egglink] assign[=] binary_operation[call[name[os].path.join, parameter[name[site], name[dist].project_name]] + constant[.egg-link]]
if call[name[os].path.isfile, parameter[name[egglink]]] begin[:]
return[name[egglink]]
return[constant[None]] | keyword[def] identifier[egg_link_path] ( identifier[dist] ):
literal[string]
identifier[sites] =[]
keyword[if] identifier[running_under_virtualenv] ():
keyword[if] identifier[virtualenv_no_global] ():
identifier[sites] . identifier[append] ( identifier[site_packages] )
keyword[else] :
identifier[sites] . identifier[append] ( identifier[site_packages] )
keyword[if] identifier[user_site] :
identifier[sites] . identifier[append] ( identifier[user_site] )
keyword[else] :
keyword[if] identifier[user_site] :
identifier[sites] . identifier[append] ( identifier[user_site] )
identifier[sites] . identifier[append] ( identifier[site_packages] )
keyword[for] identifier[site] keyword[in] identifier[sites] :
identifier[egglink] = identifier[os] . identifier[path] . identifier[join] ( identifier[site] , identifier[dist] . identifier[project_name] )+ literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[egglink] ):
keyword[return] identifier[egglink]
keyword[return] keyword[None] | def egg_link_path(dist):
# type: (Distribution) -> Optional[str]
"\n Return the path for the .egg-link file if it exists, otherwise, None.\n\n There's 3 scenarios:\n 1) not in a virtualenv\n try to find in site.USER_SITE, then site_packages\n 2) in a no-global virtualenv\n try to find in site_packages\n 3) in a yes-global virtualenv\n try to find in site_packages, then site.USER_SITE\n (don't look in global location)\n\n For #1 and #3, there could be odd cases, where there's an egg-link in 2\n locations.\n\n This method will just return the first one found.\n "
sites = []
if running_under_virtualenv():
if virtualenv_no_global():
sites.append(site_packages) # depends on [control=['if'], data=[]]
else:
sites.append(site_packages)
if user_site:
sites.append(user_site) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
if user_site:
sites.append(user_site) # depends on [control=['if'], data=[]]
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + '.egg-link'
if os.path.isfile(egglink):
return egglink # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['site']]
return None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.