code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def run_command(self, cmd,
sudo=False,
capture=True,
quiet=None,
return_result=False):
'''run_command is a wrapper for the global run_command, checking first
for sudo and exiting on error if needed. The message is returned as
a list of lines for the calling function to parse, and stdout uses
the parent process so it appears for the user.
Parameters
==========
cmd: the command to run
sudo: does the command require sudo?
quiet: if quiet set by function, overrides client setting.
return_result: return the result, if not successful (default False).
On success, returns result.
'''
# First preference to function, then to client setting
if quiet == None:
quiet = self.quiet
result = run_cmd(cmd, sudo=sudo, capture=capture, quiet=quiet)
# If one line is returned, squash dimension
if len(result['message']) == 1:
result['message'] = result['message'][0]
# If the user wants to return the result, just return it
if return_result is True:
return result
# On success, return result
if result['return_code'] == 0:
return result['message']
return result | def function[run_command, parameter[self, cmd, sudo, capture, quiet, return_result]]:
constant[run_command is a wrapper for the global run_command, checking first
for sudo and exiting on error if needed. The message is returned as
a list of lines for the calling function to parse, and stdout uses
the parent process so it appears for the user.
Parameters
==========
cmd: the command to run
sudo: does the command require sudo?
quiet: if quiet set by function, overrides client setting.
return_result: return the result, if not successful (default False).
On success, returns result.
]
if compare[name[quiet] equal[==] constant[None]] begin[:]
variable[quiet] assign[=] name[self].quiet
variable[result] assign[=] call[name[run_cmd], parameter[name[cmd]]]
if compare[call[name[len], parameter[call[name[result]][constant[message]]]] equal[==] constant[1]] begin[:]
call[name[result]][constant[message]] assign[=] call[call[name[result]][constant[message]]][constant[0]]
if compare[name[return_result] is constant[True]] begin[:]
return[name[result]]
if compare[call[name[result]][constant[return_code]] equal[==] constant[0]] begin[:]
return[call[name[result]][constant[message]]]
return[name[result]] | keyword[def] identifier[run_command] ( identifier[self] , identifier[cmd] ,
identifier[sudo] = keyword[False] ,
identifier[capture] = keyword[True] ,
identifier[quiet] = keyword[None] ,
identifier[return_result] = keyword[False] ):
literal[string]
keyword[if] identifier[quiet] == keyword[None] :
identifier[quiet] = identifier[self] . identifier[quiet]
identifier[result] = identifier[run_cmd] ( identifier[cmd] , identifier[sudo] = identifier[sudo] , identifier[capture] = identifier[capture] , identifier[quiet] = identifier[quiet] )
keyword[if] identifier[len] ( identifier[result] [ literal[string] ])== literal[int] :
identifier[result] [ literal[string] ]= identifier[result] [ literal[string] ][ literal[int] ]
keyword[if] identifier[return_result] keyword[is] keyword[True] :
keyword[return] identifier[result]
keyword[if] identifier[result] [ literal[string] ]== literal[int] :
keyword[return] identifier[result] [ literal[string] ]
keyword[return] identifier[result] | def run_command(self, cmd, sudo=False, capture=True, quiet=None, return_result=False):
"""run_command is a wrapper for the global run_command, checking first
for sudo and exiting on error if needed. The message is returned as
a list of lines for the calling function to parse, and stdout uses
the parent process so it appears for the user.
Parameters
==========
cmd: the command to run
sudo: does the command require sudo?
quiet: if quiet set by function, overrides client setting.
return_result: return the result, if not successful (default False).
On success, returns result.
"""
# First preference to function, then to client setting
if quiet == None:
quiet = self.quiet # depends on [control=['if'], data=['quiet']]
result = run_cmd(cmd, sudo=sudo, capture=capture, quiet=quiet)
# If one line is returned, squash dimension
if len(result['message']) == 1:
result['message'] = result['message'][0] # depends on [control=['if'], data=[]]
# If the user wants to return the result, just return it
if return_result is True:
return result # depends on [control=['if'], data=[]]
# On success, return result
if result['return_code'] == 0:
return result['message'] # depends on [control=['if'], data=[]]
return result |
def _get_adjusted_merge_area(self, attrs, insertion_point, no_to_insert,
axis):
"""Returns updated merge area
Parameters
----------
attrs: Dict
\tCell attribute dictionary that shall be adjusted
insertion_point: Integer
\tPont on axis, before which insertion takes place
no_to_insert: Integer >= 0
\tNumber of rows/cols/tabs that shall be inserted
axis: Integer in range(2)
\tSpecifies number of dimension, i.e. 0 == row, 1 == col
"""
assert axis in range(2)
if "merge_area" not in attrs or attrs["merge_area"] is None:
return
top, left, bottom, right = attrs["merge_area"]
selection = Selection([(top, left)], [(bottom, right)], [], [], [])
selection.insert(insertion_point, no_to_insert, axis)
__top, __left = selection.block_tl[0]
__bottom, __right = selection.block_br[0]
# Adjust merge area if it is beyond the grid shape
rows, cols, tabs = self.shape
if __top < 0 and __bottom < 0 or __top >= rows and __bottom >= rows or\
__left < 0 and __right < 0 or __left >= cols and __right >= cols:
return
if __top < 0:
__top = 0
if __top >= rows:
__top = rows - 1
if __bottom < 0:
__bottom = 0
if __bottom >= rows:
__bottom = rows - 1
if __left < 0:
__left = 0
if __left >= cols:
__left = cols - 1
if __right < 0:
__right = 0
if __right >= cols:
__right = cols - 1
return __top, __left, __bottom, __right | def function[_get_adjusted_merge_area, parameter[self, attrs, insertion_point, no_to_insert, axis]]:
constant[Returns updated merge area
Parameters
----------
attrs: Dict
Cell attribute dictionary that shall be adjusted
insertion_point: Integer
Pont on axis, before which insertion takes place
no_to_insert: Integer >= 0
Number of rows/cols/tabs that shall be inserted
axis: Integer in range(2)
Specifies number of dimension, i.e. 0 == row, 1 == col
]
assert[compare[name[axis] in call[name[range], parameter[constant[2]]]]]
if <ast.BoolOp object at 0x7da1b155cbe0> begin[:]
return[None]
<ast.Tuple object at 0x7da1b155c940> assign[=] call[name[attrs]][constant[merge_area]]
variable[selection] assign[=] call[name[Selection], parameter[list[[<ast.Tuple object at 0x7da1b155d000>]], list[[<ast.Tuple object at 0x7da1b155d2d0>]], list[[]], list[[]], list[[]]]]
call[name[selection].insert, parameter[name[insertion_point], name[no_to_insert], name[axis]]]
<ast.Tuple object at 0x7da1b155d5a0> assign[=] call[name[selection].block_tl][constant[0]]
<ast.Tuple object at 0x7da1b155d720> assign[=] call[name[selection].block_br][constant[0]]
<ast.Tuple object at 0x7da1b155c7c0> assign[=] name[self].shape
if <ast.BoolOp object at 0x7da1b155c670> begin[:]
return[None]
if compare[name[__top] less[<] constant[0]] begin[:]
variable[__top] assign[=] constant[0]
if compare[name[__top] greater_or_equal[>=] name[rows]] begin[:]
variable[__top] assign[=] binary_operation[name[rows] - constant[1]]
if compare[name[__bottom] less[<] constant[0]] begin[:]
variable[__bottom] assign[=] constant[0]
if compare[name[__bottom] greater_or_equal[>=] name[rows]] begin[:]
variable[__bottom] assign[=] binary_operation[name[rows] - constant[1]]
if compare[name[__left] less[<] constant[0]] begin[:]
variable[__left] assign[=] constant[0]
if compare[name[__left] greater_or_equal[>=] name[cols]] begin[:]
variable[__left] assign[=] binary_operation[name[cols] - constant[1]]
if compare[name[__right] less[<] constant[0]] begin[:]
variable[__right] assign[=] constant[0]
if compare[name[__right] greater_or_equal[>=] name[cols]] begin[:]
variable[__right] assign[=] binary_operation[name[cols] - constant[1]]
return[tuple[[<ast.Name object at 0x7da1b1529cf0>, <ast.Name object at 0x7da1b1529a20>, <ast.Name object at 0x7da1b1529b70>, <ast.Name object at 0x7da1b1528eb0>]]] | keyword[def] identifier[_get_adjusted_merge_area] ( identifier[self] , identifier[attrs] , identifier[insertion_point] , identifier[no_to_insert] ,
identifier[axis] ):
literal[string]
keyword[assert] identifier[axis] keyword[in] identifier[range] ( literal[int] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[attrs] keyword[or] identifier[attrs] [ literal[string] ] keyword[is] keyword[None] :
keyword[return]
identifier[top] , identifier[left] , identifier[bottom] , identifier[right] = identifier[attrs] [ literal[string] ]
identifier[selection] = identifier[Selection] ([( identifier[top] , identifier[left] )],[( identifier[bottom] , identifier[right] )],[],[],[])
identifier[selection] . identifier[insert] ( identifier[insertion_point] , identifier[no_to_insert] , identifier[axis] )
identifier[__top] , identifier[__left] = identifier[selection] . identifier[block_tl] [ literal[int] ]
identifier[__bottom] , identifier[__right] = identifier[selection] . identifier[block_br] [ literal[int] ]
identifier[rows] , identifier[cols] , identifier[tabs] = identifier[self] . identifier[shape]
keyword[if] identifier[__top] < literal[int] keyword[and] identifier[__bottom] < literal[int] keyword[or] identifier[__top] >= identifier[rows] keyword[and] identifier[__bottom] >= identifier[rows] keyword[or] identifier[__left] < literal[int] keyword[and] identifier[__right] < literal[int] keyword[or] identifier[__left] >= identifier[cols] keyword[and] identifier[__right] >= identifier[cols] :
keyword[return]
keyword[if] identifier[__top] < literal[int] :
identifier[__top] = literal[int]
keyword[if] identifier[__top] >= identifier[rows] :
identifier[__top] = identifier[rows] - literal[int]
keyword[if] identifier[__bottom] < literal[int] :
identifier[__bottom] = literal[int]
keyword[if] identifier[__bottom] >= identifier[rows] :
identifier[__bottom] = identifier[rows] - literal[int]
keyword[if] identifier[__left] < literal[int] :
identifier[__left] = literal[int]
keyword[if] identifier[__left] >= identifier[cols] :
identifier[__left] = identifier[cols] - literal[int]
keyword[if] identifier[__right] < literal[int] :
identifier[__right] = literal[int]
keyword[if] identifier[__right] >= identifier[cols] :
identifier[__right] = identifier[cols] - literal[int]
keyword[return] identifier[__top] , identifier[__left] , identifier[__bottom] , identifier[__right] | def _get_adjusted_merge_area(self, attrs, insertion_point, no_to_insert, axis):
"""Returns updated merge area
Parameters
----------
attrs: Dict
Cell attribute dictionary that shall be adjusted
insertion_point: Integer
Pont on axis, before which insertion takes place
no_to_insert: Integer >= 0
Number of rows/cols/tabs that shall be inserted
axis: Integer in range(2)
Specifies number of dimension, i.e. 0 == row, 1 == col
"""
assert axis in range(2)
if 'merge_area' not in attrs or attrs['merge_area'] is None:
return # depends on [control=['if'], data=[]]
(top, left, bottom, right) = attrs['merge_area']
selection = Selection([(top, left)], [(bottom, right)], [], [], [])
selection.insert(insertion_point, no_to_insert, axis)
(__top, __left) = selection.block_tl[0]
(__bottom, __right) = selection.block_br[0]
# Adjust merge area if it is beyond the grid shape
(rows, cols, tabs) = self.shape
if __top < 0 and __bottom < 0 or (__top >= rows and __bottom >= rows) or (__left < 0 and __right < 0) or (__left >= cols and __right >= cols):
return # depends on [control=['if'], data=[]]
if __top < 0:
__top = 0 # depends on [control=['if'], data=['__top']]
if __top >= rows:
__top = rows - 1 # depends on [control=['if'], data=['__top', 'rows']]
if __bottom < 0:
__bottom = 0 # depends on [control=['if'], data=['__bottom']]
if __bottom >= rows:
__bottom = rows - 1 # depends on [control=['if'], data=['__bottom', 'rows']]
if __left < 0:
__left = 0 # depends on [control=['if'], data=['__left']]
if __left >= cols:
__left = cols - 1 # depends on [control=['if'], data=['__left', 'cols']]
if __right < 0:
__right = 0 # depends on [control=['if'], data=['__right']]
if __right >= cols:
__right = cols - 1 # depends on [control=['if'], data=['__right', 'cols']]
return (__top, __left, __bottom, __right) |
def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
accessorDict = self._accessorDict(memberName, classNamingConvention, getter, setter)
for accessorName, accessor in accessorDict.items():
if accessorName not in originalMemberNameList and accessor is not None:
setattr(cls, accessorName, accessor) | def function[apply, parameter[self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter]]:
constant[
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
]
variable[accessorDict] assign[=] call[name[self]._accessorDict, parameter[name[memberName], name[classNamingConvention], name[getter], name[setter]]]
for taget[tuple[[<ast.Name object at 0x7da1b0a4ae00>, <ast.Name object at 0x7da1b0a48280>]]] in starred[call[name[accessorDict].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da20c795c00> begin[:]
call[name[setattr], parameter[name[cls], name[accessorName], name[accessor]]] | keyword[def] identifier[apply] ( identifier[self] , identifier[cls] , identifier[originalMemberNameList] , identifier[memberName] , identifier[classNamingConvention] , identifier[getter] , identifier[setter] ):
literal[string]
identifier[accessorDict] = identifier[self] . identifier[_accessorDict] ( identifier[memberName] , identifier[classNamingConvention] , identifier[getter] , identifier[setter] )
keyword[for] identifier[accessorName] , identifier[accessor] keyword[in] identifier[accessorDict] . identifier[items] ():
keyword[if] identifier[accessorName] keyword[not] keyword[in] identifier[originalMemberNameList] keyword[and] identifier[accessor] keyword[is] keyword[not] keyword[None] :
identifier[setattr] ( identifier[cls] , identifier[accessorName] , identifier[accessor] ) | def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
accessorDict = self._accessorDict(memberName, classNamingConvention, getter, setter)
for (accessorName, accessor) in accessorDict.items():
if accessorName not in originalMemberNameList and accessor is not None:
setattr(cls, accessorName, accessor) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def is_uniform(self):
"""Return if file contains a uniform series of pages."""
# the hashes of IFDs 0, 7, and -1 are the same
pages = self.pages
page = pages[0]
if page.is_scanimage or page.is_nih:
return True
try:
useframes = pages.useframes
pages.useframes = False
h = page.hash
for i in (1, 7, -1):
if pages[i].aspage().hash != h:
return False
except IndexError:
return False
finally:
pages.useframes = useframes
return True | def function[is_uniform, parameter[self]]:
constant[Return if file contains a uniform series of pages.]
variable[pages] assign[=] name[self].pages
variable[page] assign[=] call[name[pages]][constant[0]]
if <ast.BoolOp object at 0x7da1b18abcd0> begin[:]
return[constant[True]]
<ast.Try object at 0x7da1b18a9f00>
return[constant[True]] | keyword[def] identifier[is_uniform] ( identifier[self] ):
literal[string]
identifier[pages] = identifier[self] . identifier[pages]
identifier[page] = identifier[pages] [ literal[int] ]
keyword[if] identifier[page] . identifier[is_scanimage] keyword[or] identifier[page] . identifier[is_nih] :
keyword[return] keyword[True]
keyword[try] :
identifier[useframes] = identifier[pages] . identifier[useframes]
identifier[pages] . identifier[useframes] = keyword[False]
identifier[h] = identifier[page] . identifier[hash]
keyword[for] identifier[i] keyword[in] ( literal[int] , literal[int] ,- literal[int] ):
keyword[if] identifier[pages] [ identifier[i] ]. identifier[aspage] (). identifier[hash] != identifier[h] :
keyword[return] keyword[False]
keyword[except] identifier[IndexError] :
keyword[return] keyword[False]
keyword[finally] :
identifier[pages] . identifier[useframes] = identifier[useframes]
keyword[return] keyword[True] | def is_uniform(self):
"""Return if file contains a uniform series of pages."""
# the hashes of IFDs 0, 7, and -1 are the same
pages = self.pages
page = pages[0]
if page.is_scanimage or page.is_nih:
return True # depends on [control=['if'], data=[]]
try:
useframes = pages.useframes
pages.useframes = False
h = page.hash
for i in (1, 7, -1):
if pages[i].aspage().hash != h:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['try'], data=[]]
except IndexError:
return False # depends on [control=['except'], data=[]]
finally:
pages.useframes = useframes
return True |
def get_code(self, **kwargs): # noqa: E501
"""get_code # noqa: E501
get the code version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_code(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VersionInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_code_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_code_with_http_info(**kwargs) # noqa: E501
return data | def function[get_code, parameter[self]]:
constant[get_code # noqa: E501
get the code version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_code(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VersionInfo
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].get_code_with_http_info, parameter[]]] | keyword[def] identifier[get_code] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get_code_with_http_info] (** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[get_code_with_http_info] (** identifier[kwargs] )
keyword[return] identifier[data] | def get_code(self, **kwargs): # noqa: E501
'get_code # noqa: E501\n\n get the code version # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_code(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: VersionInfo\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_code_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.get_code_with_http_info(**kwargs) # noqa: E501
return data |
def waitAndGet(self, event_name, timeout=DEFAULT_TIMEOUT):
"""Blocks until an event of the specified name has been received and
return the event, or timeout.
Args:
event_name: string, name of the event to get.
timeout: float, the number of seconds to wait before giving up.
Returns:
SnippetEvent, the oldest entry of the specified event.
Raises:
Error: If the specified timeout is longer than the max timeout
supported.
TimeoutError: The expected event does not occur within time limit.
"""
if timeout:
if timeout > MAX_TIMEOUT:
raise Error(
self._ad,
'Specified timeout %s is longer than max timeout %s.' %
(timeout, MAX_TIMEOUT))
# Convert to milliseconds for java side.
timeout_ms = int(timeout * 1000)
try:
raw_event = self._event_client.eventWaitAndGet(
self._id, event_name, timeout_ms)
except Exception as e:
if 'EventSnippetException: timeout.' in str(e):
raise TimeoutError(
self._ad,
'Timed out after waiting %ss for event "%s" triggered by'
' %s (%s).' % (timeout, event_name, self._method_name,
self._id))
raise
return snippet_event.from_dict(raw_event) | def function[waitAndGet, parameter[self, event_name, timeout]]:
constant[Blocks until an event of the specified name has been received and
return the event, or timeout.
Args:
event_name: string, name of the event to get.
timeout: float, the number of seconds to wait before giving up.
Returns:
SnippetEvent, the oldest entry of the specified event.
Raises:
Error: If the specified timeout is longer than the max timeout
supported.
TimeoutError: The expected event does not occur within time limit.
]
if name[timeout] begin[:]
if compare[name[timeout] greater[>] name[MAX_TIMEOUT]] begin[:]
<ast.Raise object at 0x7da1b07473d0>
variable[timeout_ms] assign[=] call[name[int], parameter[binary_operation[name[timeout] * constant[1000]]]]
<ast.Try object at 0x7da1b0746110>
return[call[name[snippet_event].from_dict, parameter[name[raw_event]]]] | keyword[def] identifier[waitAndGet] ( identifier[self] , identifier[event_name] , identifier[timeout] = identifier[DEFAULT_TIMEOUT] ):
literal[string]
keyword[if] identifier[timeout] :
keyword[if] identifier[timeout] > identifier[MAX_TIMEOUT] :
keyword[raise] identifier[Error] (
identifier[self] . identifier[_ad] ,
literal[string] %
( identifier[timeout] , identifier[MAX_TIMEOUT] ))
identifier[timeout_ms] = identifier[int] ( identifier[timeout] * literal[int] )
keyword[try] :
identifier[raw_event] = identifier[self] . identifier[_event_client] . identifier[eventWaitAndGet] (
identifier[self] . identifier[_id] , identifier[event_name] , identifier[timeout_ms] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] literal[string] keyword[in] identifier[str] ( identifier[e] ):
keyword[raise] identifier[TimeoutError] (
identifier[self] . identifier[_ad] ,
literal[string]
literal[string] %( identifier[timeout] , identifier[event_name] , identifier[self] . identifier[_method_name] ,
identifier[self] . identifier[_id] ))
keyword[raise]
keyword[return] identifier[snippet_event] . identifier[from_dict] ( identifier[raw_event] ) | def waitAndGet(self, event_name, timeout=DEFAULT_TIMEOUT):
"""Blocks until an event of the specified name has been received and
return the event, or timeout.
Args:
event_name: string, name of the event to get.
timeout: float, the number of seconds to wait before giving up.
Returns:
SnippetEvent, the oldest entry of the specified event.
Raises:
Error: If the specified timeout is longer than the max timeout
supported.
TimeoutError: The expected event does not occur within time limit.
"""
if timeout:
if timeout > MAX_TIMEOUT:
raise Error(self._ad, 'Specified timeout %s is longer than max timeout %s.' % (timeout, MAX_TIMEOUT)) # depends on [control=['if'], data=['timeout', 'MAX_TIMEOUT']] # depends on [control=['if'], data=[]]
# Convert to milliseconds for java side.
timeout_ms = int(timeout * 1000)
try:
raw_event = self._event_client.eventWaitAndGet(self._id, event_name, timeout_ms) # depends on [control=['try'], data=[]]
except Exception as e:
if 'EventSnippetException: timeout.' in str(e):
raise TimeoutError(self._ad, 'Timed out after waiting %ss for event "%s" triggered by %s (%s).' % (timeout, event_name, self._method_name, self._id)) # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['e']]
return snippet_event.from_dict(raw_event) |
def get_config(self,
module_id,
option,
default=None,
boolean=False,
secret=False,
forceask=False,
forcenone=False,
hint=None):
"""Gets a specific config from the config files, allowing for a default.
Handles booleans vs strings appropriately.
@param module_id: module id this relates to, eg com.mycorp.mymodule.mymodule
@param option: config item to set
@param default: default value if not set in files
@param boolean: whether this is a boolean value or not (default False)
@param secret: whether the config item is a secret
@param forceask: if set to True, allows you to override any value already set (default False)
@param forcenone: if set to True, allows you to set the value to None (default False)
@param hint: if we are interactive, then show this prompt to help the user input a useful value
@type module_id: string
@type option: string
@type default: string
@type boolean: boolean
@type secret: boolean
@type forceask: boolean
@type forcenone: boolean
@type hint: string
"""
shutit_global.shutit_global_object.yield_to_draw()
cfg = self.cfg
if module_id not in cfg.keys():
cfg[module_id] = {}
if not self.config_parser.has_section(module_id):
self.config_parser.add_section(module_id)
if not forceask and self.config_parser.has_option(module_id, option):
if boolean:
cfg[module_id][option] = self.config_parser.getboolean(module_id, option)
else:
cfg[module_id][option] = self.config_parser.get(module_id, option)
else:
if not forcenone:
if shutit_global.shutit_global_object.interactive > 0:
if self.build['accept_defaults'] is None:
answer = None
# util_raw_input may change the interactive level, so guard for this.
while answer not in ('yes','no','') and shutit_global.shutit_global_object.interactive > 1:
answer = shutit_util.util_raw_input(prompt=shutit_util.colorise('32', 'Do you want to accept the config option defaults? ' + '(boolean - input "yes" or "no") (default: yes): \n'),default='yes',ispass=secret)
# util_raw_input may change the interactive level, so guard for this.
self.build['accept_defaults'] = answer in ('yes','') or shutit_global.shutit_global_object.interactive < 2
if self.build['accept_defaults'] and default != None:
cfg[module_id][option] = default
else:
# util_raw_input may change the interactive level, so guard for this.
prompt = '\n\nPlease input a value for ' + module_id + '.' + option
if default != None:
prompt = prompt + ' (default: ' + str(default) + ')'
if hint != None:
prompt = prompt + '\n\n' + hint
answer = None
if boolean:
while answer not in ('yes','no'):
answer = shutit_util.util_raw_input(prompt=shutit_util.colorise('32',prompt + ' (boolean - input "yes" or "no"): \n'),ispass=secret)
if answer == 'yes':
answer = True
elif answer == 'no':
answer = False
else:
if re.search('assw',option) is None:
answer = shutit_util.util_raw_input(prompt=shutit_util.colorise('32',prompt) + ': \n',ispass=secret)
else:
answer = shutit_util.util_raw_input(ispass=True,prompt=shutit_util.colorise('32',prompt) + ': \n')
if answer == '' and default != None:
answer = default
cfg[module_id][option] = answer
else:
if default != None:
cfg[module_id][option] = default
else:
self.fail('Config item: ' + option + ':\nin module:\n[' + module_id + ']\nmust be set!\n\nOften this is a deliberate requirement to place in your ~/.shutit/config file, or you can pass in with:\n\n-s ' + module_id + ' ' + option + ' yourvalue\n\nto the build command', throw_exception=False) # pragma: no cover
else:
cfg[module_id][option] = default
return True | def function[get_config, parameter[self, module_id, option, default, boolean, secret, forceask, forcenone, hint]]:
constant[Gets a specific config from the config files, allowing for a default.
Handles booleans vs strings appropriately.
@param module_id: module id this relates to, eg com.mycorp.mymodule.mymodule
@param option: config item to set
@param default: default value if not set in files
@param boolean: whether this is a boolean value or not (default False)
@param secret: whether the config item is a secret
@param forceask: if set to True, allows you to override any value already set (default False)
@param forcenone: if set to True, allows you to set the value to None (default False)
@param hint: if we are interactive, then show this prompt to help the user input a useful value
@type module_id: string
@type option: string
@type default: string
@type boolean: boolean
@type secret: boolean
@type forceask: boolean
@type forcenone: boolean
@type hint: string
]
call[name[shutit_global].shutit_global_object.yield_to_draw, parameter[]]
variable[cfg] assign[=] name[self].cfg
if compare[name[module_id] <ast.NotIn object at 0x7da2590d7190> call[name[cfg].keys, parameter[]]] begin[:]
call[name[cfg]][name[module_id]] assign[=] dictionary[[], []]
if <ast.UnaryOp object at 0x7da20cabebf0> begin[:]
call[name[self].config_parser.add_section, parameter[name[module_id]]]
if <ast.BoolOp object at 0x7da20cabeec0> begin[:]
if name[boolean] begin[:]
call[call[name[cfg]][name[module_id]]][name[option]] assign[=] call[name[self].config_parser.getboolean, parameter[name[module_id], name[option]]]
return[constant[True]] | keyword[def] identifier[get_config] ( identifier[self] ,
identifier[module_id] ,
identifier[option] ,
identifier[default] = keyword[None] ,
identifier[boolean] = keyword[False] ,
identifier[secret] = keyword[False] ,
identifier[forceask] = keyword[False] ,
identifier[forcenone] = keyword[False] ,
identifier[hint] = keyword[None] ):
literal[string]
identifier[shutit_global] . identifier[shutit_global_object] . identifier[yield_to_draw] ()
identifier[cfg] = identifier[self] . identifier[cfg]
keyword[if] identifier[module_id] keyword[not] keyword[in] identifier[cfg] . identifier[keys] ():
identifier[cfg] [ identifier[module_id] ]={}
keyword[if] keyword[not] identifier[self] . identifier[config_parser] . identifier[has_section] ( identifier[module_id] ):
identifier[self] . identifier[config_parser] . identifier[add_section] ( identifier[module_id] )
keyword[if] keyword[not] identifier[forceask] keyword[and] identifier[self] . identifier[config_parser] . identifier[has_option] ( identifier[module_id] , identifier[option] ):
keyword[if] identifier[boolean] :
identifier[cfg] [ identifier[module_id] ][ identifier[option] ]= identifier[self] . identifier[config_parser] . identifier[getboolean] ( identifier[module_id] , identifier[option] )
keyword[else] :
identifier[cfg] [ identifier[module_id] ][ identifier[option] ]= identifier[self] . identifier[config_parser] . identifier[get] ( identifier[module_id] , identifier[option] )
keyword[else] :
keyword[if] keyword[not] identifier[forcenone] :
keyword[if] identifier[shutit_global] . identifier[shutit_global_object] . identifier[interactive] > literal[int] :
keyword[if] identifier[self] . identifier[build] [ literal[string] ] keyword[is] keyword[None] :
identifier[answer] = keyword[None]
keyword[while] identifier[answer] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ) keyword[and] identifier[shutit_global] . identifier[shutit_global_object] . identifier[interactive] > literal[int] :
identifier[answer] = identifier[shutit_util] . identifier[util_raw_input] ( identifier[prompt] = identifier[shutit_util] . identifier[colorise] ( literal[string] , literal[string] + literal[string] ), identifier[default] = literal[string] , identifier[ispass] = identifier[secret] )
identifier[self] . identifier[build] [ literal[string] ]= identifier[answer] keyword[in] ( literal[string] , literal[string] ) keyword[or] identifier[shutit_global] . identifier[shutit_global_object] . identifier[interactive] < literal[int]
keyword[if] identifier[self] . identifier[build] [ literal[string] ] keyword[and] identifier[default] != keyword[None] :
identifier[cfg] [ identifier[module_id] ][ identifier[option] ]= identifier[default]
keyword[else] :
identifier[prompt] = literal[string] + identifier[module_id] + literal[string] + identifier[option]
keyword[if] identifier[default] != keyword[None] :
identifier[prompt] = identifier[prompt] + literal[string] + identifier[str] ( identifier[default] )+ literal[string]
keyword[if] identifier[hint] != keyword[None] :
identifier[prompt] = identifier[prompt] + literal[string] + identifier[hint]
identifier[answer] = keyword[None]
keyword[if] identifier[boolean] :
keyword[while] identifier[answer] keyword[not] keyword[in] ( literal[string] , literal[string] ):
identifier[answer] = identifier[shutit_util] . identifier[util_raw_input] ( identifier[prompt] = identifier[shutit_util] . identifier[colorise] ( literal[string] , identifier[prompt] + literal[string] ), identifier[ispass] = identifier[secret] )
keyword[if] identifier[answer] == literal[string] :
identifier[answer] = keyword[True]
keyword[elif] identifier[answer] == literal[string] :
identifier[answer] = keyword[False]
keyword[else] :
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[option] ) keyword[is] keyword[None] :
identifier[answer] = identifier[shutit_util] . identifier[util_raw_input] ( identifier[prompt] = identifier[shutit_util] . identifier[colorise] ( literal[string] , identifier[prompt] )+ literal[string] , identifier[ispass] = identifier[secret] )
keyword[else] :
identifier[answer] = identifier[shutit_util] . identifier[util_raw_input] ( identifier[ispass] = keyword[True] , identifier[prompt] = identifier[shutit_util] . identifier[colorise] ( literal[string] , identifier[prompt] )+ literal[string] )
keyword[if] identifier[answer] == literal[string] keyword[and] identifier[default] != keyword[None] :
identifier[answer] = identifier[default]
identifier[cfg] [ identifier[module_id] ][ identifier[option] ]= identifier[answer]
keyword[else] :
keyword[if] identifier[default] != keyword[None] :
identifier[cfg] [ identifier[module_id] ][ identifier[option] ]= identifier[default]
keyword[else] :
identifier[self] . identifier[fail] ( literal[string] + identifier[option] + literal[string] + identifier[module_id] + literal[string] + identifier[module_id] + literal[string] + identifier[option] + literal[string] , identifier[throw_exception] = keyword[False] )
keyword[else] :
identifier[cfg] [ identifier[module_id] ][ identifier[option] ]= identifier[default]
keyword[return] keyword[True] | def get_config(self, module_id, option, default=None, boolean=False, secret=False, forceask=False, forcenone=False, hint=None):
"""Gets a specific config from the config files, allowing for a default.
Handles booleans vs strings appropriately.
@param module_id: module id this relates to, eg com.mycorp.mymodule.mymodule
@param option: config item to set
@param default: default value if not set in files
@param boolean: whether this is a boolean value or not (default False)
@param secret: whether the config item is a secret
@param forceask: if set to True, allows you to override any value already set (default False)
@param forcenone: if set to True, allows you to set the value to None (default False)
@param hint: if we are interactive, then show this prompt to help the user input a useful value
@type module_id: string
@type option: string
@type default: string
@type boolean: boolean
@type secret: boolean
@type forceask: boolean
@type forcenone: boolean
@type hint: string
"""
shutit_global.shutit_global_object.yield_to_draw()
cfg = self.cfg
if module_id not in cfg.keys():
cfg[module_id] = {} # depends on [control=['if'], data=['module_id']]
if not self.config_parser.has_section(module_id):
self.config_parser.add_section(module_id) # depends on [control=['if'], data=[]]
if not forceask and self.config_parser.has_option(module_id, option):
if boolean:
cfg[module_id][option] = self.config_parser.getboolean(module_id, option) # depends on [control=['if'], data=[]]
else:
cfg[module_id][option] = self.config_parser.get(module_id, option) # depends on [control=['if'], data=[]]
elif not forcenone:
if shutit_global.shutit_global_object.interactive > 0:
if self.build['accept_defaults'] is None:
answer = None # util_raw_input may change the interactive level, so guard for this.
while answer not in ('yes', 'no', '') and shutit_global.shutit_global_object.interactive > 1:
answer = shutit_util.util_raw_input(prompt=shutit_util.colorise('32', 'Do you want to accept the config option defaults? ' + '(boolean - input "yes" or "no") (default: yes): \n'), default='yes', ispass=secret) # depends on [control=['while'], data=[]] # util_raw_input may change the interactive level, so guard for this.
self.build['accept_defaults'] = answer in ('yes', '') or shutit_global.shutit_global_object.interactive < 2 # depends on [control=['if'], data=[]]
if self.build['accept_defaults'] and default != None:
cfg[module_id][option] = default # depends on [control=['if'], data=[]]
else: # util_raw_input may change the interactive level, so guard for this.
prompt = '\n\nPlease input a value for ' + module_id + '.' + option
if default != None:
prompt = prompt + ' (default: ' + str(default) + ')' # depends on [control=['if'], data=['default']]
if hint != None:
prompt = prompt + '\n\n' + hint # depends on [control=['if'], data=['hint']]
answer = None
if boolean:
while answer not in ('yes', 'no'):
answer = shutit_util.util_raw_input(prompt=shutit_util.colorise('32', prompt + ' (boolean - input "yes" or "no"): \n'), ispass=secret) # depends on [control=['while'], data=['answer']]
if answer == 'yes':
answer = True # depends on [control=['if'], data=['answer']]
elif answer == 'no':
answer = False # depends on [control=['if'], data=['answer']] # depends on [control=['if'], data=[]]
elif re.search('assw', option) is None:
answer = shutit_util.util_raw_input(prompt=shutit_util.colorise('32', prompt) + ': \n', ispass=secret) # depends on [control=['if'], data=[]]
else:
answer = shutit_util.util_raw_input(ispass=True, prompt=shutit_util.colorise('32', prompt) + ': \n')
if answer == '' and default != None:
answer = default # depends on [control=['if'], data=[]]
cfg[module_id][option] = answer # depends on [control=['if'], data=[]]
elif default != None:
cfg[module_id][option] = default # depends on [control=['if'], data=['default']]
else:
self.fail('Config item: ' + option + ':\nin module:\n[' + module_id + ']\nmust be set!\n\nOften this is a deliberate requirement to place in your ~/.shutit/config file, or you can pass in with:\n\n-s ' + module_id + ' ' + option + ' yourvalue\n\nto the build command', throw_exception=False) # pragma: no cover # depends on [control=['if'], data=[]]
else:
cfg[module_id][option] = default
return True |
def _fit(self, y, exogenous=None, **fit_args):
"""Internal fit"""
# This wrapper is used for fitting either an ARIMA or a SARIMAX
def _fit_wrapper():
# these might change depending on which one
method = self.method
# If it's in kwargs, we'll use it
trend = self.trend
# if not seasonal:
if not self._is_seasonal():
if method is None:
method = "css-mle"
if trend is None:
if self.with_intercept:
trend = 'c'
else:
trend = 'nc'
# create the statsmodels ARIMA
arima = _ARIMA(endog=y, order=self.order, missing='none',
exog=exogenous, dates=None, freq=None)
# there's currently a bug in the ARIMA model where on pickling
# it tries to acquire an attribute called
# 'self.{dates|freq|missing}', but they do not exist as class
# attrs! They're passed up to TimeSeriesModel in base, but
# are never set. So we inject them here so as not to get an
# AttributeError later. (see http://bit.ly/2f7SkKH)
for attr, val in (('dates', None), ('freq', None),
('missing', 'none')):
if not hasattr(arima, attr):
setattr(arima, attr, val)
else:
if method is None:
method = 'lbfgs'
if trend is None:
if self.with_intercept:
trend = 'c'
else:
trend = None
# create the SARIMAX
arima = sm.tsa.statespace.SARIMAX(
endog=y, exog=exogenous, order=self.order,
seasonal_order=self.seasonal_order, trend=trend,
enforce_stationarity=self.transparams)
# actually fit the model, now. If this was called from 'update',
# give priority to start_params from the fit_args
start_params = fit_args.pop("start_params", self.start_params)
# Same for 'maxiter' if called from update. Also allows it to be
# passed as a fit arg, if a user does it explicitly.
_maxiter = self.maxiter
if _maxiter is None:
if self._is_seasonal():
_maxiter = sm_compat.DEFAULT_SEASONAL_MAXITER # 50
else:
_maxiter = sm_compat.DEFAULT_NON_SEASONAL_MAXITER # 500
# If maxiter is provided in the fit_args by a savvy user or the
# update method, we should default to their preference
_maxiter = fit_args.pop("maxiter", _maxiter)
return arima, arima.fit(start_params=start_params,
trend=trend, method=method,
transparams=self.transparams,
solver=self.solver, maxiter=_maxiter,
disp=self.disp, callback=self.callback,
**fit_args)
# sometimes too many warnings...
if self.suppress_warnings:
with warnings.catch_warnings(record=False):
warnings.simplefilter('ignore')
fit, self.arima_res_ = _fit_wrapper()
else:
fit, self.arima_res_ = _fit_wrapper()
# Set df_model attribute for SARIMAXResults object
sm_compat.bind_df_model(fit, self.arima_res_)
# Non-seasonal ARIMA models may not capture sigma2. Statsmodels' code
# is buggy and difficult to follow, so this checks whether it needs to
# be set or not...
# if (not self._is_seasonal()) and np.isnan(self.arima_res_.sigma2):
# self.arima_res_.sigma2 = self.arima_res_.model.loglike(
# self.params(), True)
# if the model is fit with an exogenous array, it must
# be predicted with one as well.
self.fit_with_exog_ = exogenous is not None
# Save nobs since we might change it later if using OOB
self.nobs_ = y.shape[0]
# As of version 0.7.2, start saving the version with the model so
# we can track changes over time.
self.pkg_version_ = pmdarima.__version__
return self | def function[_fit, parameter[self, y, exogenous]]:
constant[Internal fit]
def function[_fit_wrapper, parameter[]]:
variable[method] assign[=] name[self].method
variable[trend] assign[=] name[self].trend
if <ast.UnaryOp object at 0x7da1b1edbd90> begin[:]
if compare[name[method] is constant[None]] begin[:]
variable[method] assign[=] constant[css-mle]
if compare[name[trend] is constant[None]] begin[:]
if name[self].with_intercept begin[:]
variable[trend] assign[=] constant[c]
variable[arima] assign[=] call[name[_ARIMA], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1edbbe0>, <ast.Name object at 0x7da1b1edbdc0>]]] in starred[tuple[[<ast.Tuple object at 0x7da1b1edbbb0>, <ast.Tuple object at 0x7da1b1edbd30>, <ast.Tuple object at 0x7da1b1edb6a0>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1edadd0> begin[:]
call[name[setattr], parameter[name[arima], name[attr], name[val]]]
variable[start_params] assign[=] call[name[fit_args].pop, parameter[constant[start_params], name[self].start_params]]
variable[_maxiter] assign[=] name[self].maxiter
if compare[name[_maxiter] is constant[None]] begin[:]
if call[name[self]._is_seasonal, parameter[]] begin[:]
variable[_maxiter] assign[=] name[sm_compat].DEFAULT_SEASONAL_MAXITER
variable[_maxiter] assign[=] call[name[fit_args].pop, parameter[constant[maxiter], name[_maxiter]]]
return[tuple[[<ast.Name object at 0x7da1b1db4790>, <ast.Call object at 0x7da1b1db4880>]]]
if name[self].suppress_warnings begin[:]
with call[name[warnings].catch_warnings, parameter[]] begin[:]
call[name[warnings].simplefilter, parameter[constant[ignore]]]
<ast.Tuple object at 0x7da1b1ec3790> assign[=] call[name[_fit_wrapper], parameter[]]
call[name[sm_compat].bind_df_model, parameter[name[fit], name[self].arima_res_]]
name[self].fit_with_exog_ assign[=] compare[name[exogenous] is_not constant[None]]
name[self].nobs_ assign[=] call[name[y].shape][constant[0]]
name[self].pkg_version_ assign[=] name[pmdarima].__version__
return[name[self]] | keyword[def] identifier[_fit] ( identifier[self] , identifier[y] , identifier[exogenous] = keyword[None] ,** identifier[fit_args] ):
literal[string]
keyword[def] identifier[_fit_wrapper] ():
identifier[method] = identifier[self] . identifier[method]
identifier[trend] = identifier[self] . identifier[trend]
keyword[if] keyword[not] identifier[self] . identifier[_is_seasonal] ():
keyword[if] identifier[method] keyword[is] keyword[None] :
identifier[method] = literal[string]
keyword[if] identifier[trend] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[with_intercept] :
identifier[trend] = literal[string]
keyword[else] :
identifier[trend] = literal[string]
identifier[arima] = identifier[_ARIMA] ( identifier[endog] = identifier[y] , identifier[order] = identifier[self] . identifier[order] , identifier[missing] = literal[string] ,
identifier[exog] = identifier[exogenous] , identifier[dates] = keyword[None] , identifier[freq] = keyword[None] )
keyword[for] identifier[attr] , identifier[val] keyword[in] (( literal[string] , keyword[None] ),( literal[string] , keyword[None] ),
( literal[string] , literal[string] )):
keyword[if] keyword[not] identifier[hasattr] ( identifier[arima] , identifier[attr] ):
identifier[setattr] ( identifier[arima] , identifier[attr] , identifier[val] )
keyword[else] :
keyword[if] identifier[method] keyword[is] keyword[None] :
identifier[method] = literal[string]
keyword[if] identifier[trend] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[with_intercept] :
identifier[trend] = literal[string]
keyword[else] :
identifier[trend] = keyword[None]
identifier[arima] = identifier[sm] . identifier[tsa] . identifier[statespace] . identifier[SARIMAX] (
identifier[endog] = identifier[y] , identifier[exog] = identifier[exogenous] , identifier[order] = identifier[self] . identifier[order] ,
identifier[seasonal_order] = identifier[self] . identifier[seasonal_order] , identifier[trend] = identifier[trend] ,
identifier[enforce_stationarity] = identifier[self] . identifier[transparams] )
identifier[start_params] = identifier[fit_args] . identifier[pop] ( literal[string] , identifier[self] . identifier[start_params] )
identifier[_maxiter] = identifier[self] . identifier[maxiter]
keyword[if] identifier[_maxiter] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[_is_seasonal] ():
identifier[_maxiter] = identifier[sm_compat] . identifier[DEFAULT_SEASONAL_MAXITER]
keyword[else] :
identifier[_maxiter] = identifier[sm_compat] . identifier[DEFAULT_NON_SEASONAL_MAXITER]
identifier[_maxiter] = identifier[fit_args] . identifier[pop] ( literal[string] , identifier[_maxiter] )
keyword[return] identifier[arima] , identifier[arima] . identifier[fit] ( identifier[start_params] = identifier[start_params] ,
identifier[trend] = identifier[trend] , identifier[method] = identifier[method] ,
identifier[transparams] = identifier[self] . identifier[transparams] ,
identifier[solver] = identifier[self] . identifier[solver] , identifier[maxiter] = identifier[_maxiter] ,
identifier[disp] = identifier[self] . identifier[disp] , identifier[callback] = identifier[self] . identifier[callback] ,
** identifier[fit_args] )
keyword[if] identifier[self] . identifier[suppress_warnings] :
keyword[with] identifier[warnings] . identifier[catch_warnings] ( identifier[record] = keyword[False] ):
identifier[warnings] . identifier[simplefilter] ( literal[string] )
identifier[fit] , identifier[self] . identifier[arima_res_] = identifier[_fit_wrapper] ()
keyword[else] :
identifier[fit] , identifier[self] . identifier[arima_res_] = identifier[_fit_wrapper] ()
identifier[sm_compat] . identifier[bind_df_model] ( identifier[fit] , identifier[self] . identifier[arima_res_] )
identifier[self] . identifier[fit_with_exog_] = identifier[exogenous] keyword[is] keyword[not] keyword[None]
identifier[self] . identifier[nobs_] = identifier[y] . identifier[shape] [ literal[int] ]
identifier[self] . identifier[pkg_version_] = identifier[pmdarima] . identifier[__version__]
keyword[return] identifier[self] | def _fit(self, y, exogenous=None, **fit_args):
"""Internal fit"""
# This wrapper is used for fitting either an ARIMA or a SARIMAX
def _fit_wrapper():
# these might change depending on which one
method = self.method
# If it's in kwargs, we'll use it
trend = self.trend
# if not seasonal:
if not self._is_seasonal():
if method is None:
method = 'css-mle' # depends on [control=['if'], data=['method']]
if trend is None:
if self.with_intercept:
trend = 'c' # depends on [control=['if'], data=[]]
else:
trend = 'nc' # depends on [control=['if'], data=['trend']]
# create the statsmodels ARIMA
arima = _ARIMA(endog=y, order=self.order, missing='none', exog=exogenous, dates=None, freq=None)
# there's currently a bug in the ARIMA model where on pickling
# it tries to acquire an attribute called
# 'self.{dates|freq|missing}', but they do not exist as class
# attrs! They're passed up to TimeSeriesModel in base, but
# are never set. So we inject them here so as not to get an
# AttributeError later. (see http://bit.ly/2f7SkKH)
for (attr, val) in (('dates', None), ('freq', None), ('missing', 'none')):
if not hasattr(arima, attr):
setattr(arima, attr, val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
if method is None:
method = 'lbfgs' # depends on [control=['if'], data=['method']]
if trend is None:
if self.with_intercept:
trend = 'c' # depends on [control=['if'], data=[]]
else:
trend = None # depends on [control=['if'], data=['trend']]
# create the SARIMAX
arima = sm.tsa.statespace.SARIMAX(endog=y, exog=exogenous, order=self.order, seasonal_order=self.seasonal_order, trend=trend, enforce_stationarity=self.transparams)
# actually fit the model, now. If this was called from 'update',
# give priority to start_params from the fit_args
start_params = fit_args.pop('start_params', self.start_params)
# Same for 'maxiter' if called from update. Also allows it to be
# passed as a fit arg, if a user does it explicitly.
_maxiter = self.maxiter
if _maxiter is None:
if self._is_seasonal():
_maxiter = sm_compat.DEFAULT_SEASONAL_MAXITER # 50 # depends on [control=['if'], data=[]]
else:
_maxiter = sm_compat.DEFAULT_NON_SEASONAL_MAXITER # 500 # depends on [control=['if'], data=['_maxiter']]
# If maxiter is provided in the fit_args by a savvy user or the
# update method, we should default to their preference
_maxiter = fit_args.pop('maxiter', _maxiter)
return (arima, arima.fit(start_params=start_params, trend=trend, method=method, transparams=self.transparams, solver=self.solver, maxiter=_maxiter, disp=self.disp, callback=self.callback, **fit_args))
# sometimes too many warnings...
if self.suppress_warnings:
with warnings.catch_warnings(record=False):
warnings.simplefilter('ignore')
(fit, self.arima_res_) = _fit_wrapper() # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
else:
(fit, self.arima_res_) = _fit_wrapper()
# Set df_model attribute for SARIMAXResults object
sm_compat.bind_df_model(fit, self.arima_res_)
# Non-seasonal ARIMA models may not capture sigma2. Statsmodels' code
# is buggy and difficult to follow, so this checks whether it needs to
# be set or not...
# if (not self._is_seasonal()) and np.isnan(self.arima_res_.sigma2):
# self.arima_res_.sigma2 = self.arima_res_.model.loglike(
# self.params(), True)
# if the model is fit with an exogenous array, it must
# be predicted with one as well.
self.fit_with_exog_ = exogenous is not None
# Save nobs since we might change it later if using OOB
self.nobs_ = y.shape[0]
# As of version 0.7.2, start saving the version with the model so
# we can track changes over time.
self.pkg_version_ = pmdarima.__version__
return self |
def list_files(tag=None, sat_id=None, data_path=None, format_str=None):
"""Produce a list of ICON EUV files.
Notes
-----
Currently fixed to level-2
"""
desc = None
level = tag
if level == 'level_1':
code = 'L1'
desc = None
elif level == 'level_2':
code = 'L2'
desc = None
else:
raise ValueError('Unsupported level supplied: ' + level)
if format_str is None:
format_str = 'ICON_'+code+'_EUV_Daytime'
if desc is not None:
format_str += '_' + desc +'_'
format_str += '_{year:4d}-{month:02d}-{day:02d}_v{version:02d}r{revision:03d}.NC'
return pysat.Files.from_os(data_path=data_path,
format_str=format_str) | def function[list_files, parameter[tag, sat_id, data_path, format_str]]:
constant[Produce a list of ICON EUV files.
Notes
-----
Currently fixed to level-2
]
variable[desc] assign[=] constant[None]
variable[level] assign[=] name[tag]
if compare[name[level] equal[==] constant[level_1]] begin[:]
variable[code] assign[=] constant[L1]
variable[desc] assign[=] constant[None]
if compare[name[format_str] is constant[None]] begin[:]
variable[format_str] assign[=] binary_operation[binary_operation[constant[ICON_] + name[code]] + constant[_EUV_Daytime]]
if compare[name[desc] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da18f811ba0>
<ast.AugAssign object at 0x7da18f811570>
return[call[name[pysat].Files.from_os, parameter[]]] | keyword[def] identifier[list_files] ( identifier[tag] = keyword[None] , identifier[sat_id] = keyword[None] , identifier[data_path] = keyword[None] , identifier[format_str] = keyword[None] ):
literal[string]
identifier[desc] = keyword[None]
identifier[level] = identifier[tag]
keyword[if] identifier[level] == literal[string] :
identifier[code] = literal[string]
identifier[desc] = keyword[None]
keyword[elif] identifier[level] == literal[string] :
identifier[code] = literal[string]
identifier[desc] = keyword[None]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[level] )
keyword[if] identifier[format_str] keyword[is] keyword[None] :
identifier[format_str] = literal[string] + identifier[code] + literal[string]
keyword[if] identifier[desc] keyword[is] keyword[not] keyword[None] :
identifier[format_str] += literal[string] + identifier[desc] + literal[string]
identifier[format_str] += literal[string]
keyword[return] identifier[pysat] . identifier[Files] . identifier[from_os] ( identifier[data_path] = identifier[data_path] ,
identifier[format_str] = identifier[format_str] ) | def list_files(tag=None, sat_id=None, data_path=None, format_str=None):
"""Produce a list of ICON EUV files.
Notes
-----
Currently fixed to level-2
"""
desc = None
level = tag
if level == 'level_1':
code = 'L1'
desc = None # depends on [control=['if'], data=[]]
elif level == 'level_2':
code = 'L2'
desc = None # depends on [control=['if'], data=[]]
else:
raise ValueError('Unsupported level supplied: ' + level)
if format_str is None:
format_str = 'ICON_' + code + '_EUV_Daytime'
if desc is not None:
format_str += '_' + desc + '_' # depends on [control=['if'], data=['desc']]
format_str += '_{year:4d}-{month:02d}-{day:02d}_v{version:02d}r{revision:03d}.NC' # depends on [control=['if'], data=['format_str']]
return pysat.Files.from_os(data_path=data_path, format_str=format_str) |
def power_law(target, X, A1='', A2='', A3=''):
r"""
Calculates the rate, as well as slope and intercept of the following
function at the given value of *X*:
.. math::
r = A_{1} x^{A_{2}} + A_{3}
Parameters
----------
A1 -> A3 : string
The dictionary keys on the target object containing the coefficients
values to be used in the source term model
X : string
The dictionary key on the target objecxt containing the the quantity
of interest
Returns
-------
A dictionary containing the following three items:
**'rate'** - The value of the source term function at the given X.
**'S1'** - The slope of the source term function at the given X.
**'S2'** - The intercept of the source term function at the given X.
The slope and intercept provide a linearized source term equation about the
current value of X as follow:
.. math::
rate = S_{1} X + S_{2}
"""
A = _parse_args(target=target, key=A1, default=1.0)
B = _parse_args(target=target, key=A2, default=1.0)
C = _parse_args(target=target, key=A3, default=0.0)
X = target[X]
r = A * X ** B + C
S1 = A * B * X ** (B - 1)
S2 = A * X ** B * (1 - B) + C
values = {'S1': S1, 'S2': S2, 'rate': r}
return values | def function[power_law, parameter[target, X, A1, A2, A3]]:
constant[
Calculates the rate, as well as slope and intercept of the following
function at the given value of *X*:
.. math::
r = A_{1} x^{A_{2}} + A_{3}
Parameters
----------
A1 -> A3 : string
The dictionary keys on the target object containing the coefficients
values to be used in the source term model
X : string
The dictionary key on the target objecxt containing the the quantity
of interest
Returns
-------
A dictionary containing the following three items:
**'rate'** - The value of the source term function at the given X.
**'S1'** - The slope of the source term function at the given X.
**'S2'** - The intercept of the source term function at the given X.
The slope and intercept provide a linearized source term equation about the
current value of X as follow:
.. math::
rate = S_{1} X + S_{2}
]
variable[A] assign[=] call[name[_parse_args], parameter[]]
variable[B] assign[=] call[name[_parse_args], parameter[]]
variable[C] assign[=] call[name[_parse_args], parameter[]]
variable[X] assign[=] call[name[target]][name[X]]
variable[r] assign[=] binary_operation[binary_operation[name[A] * binary_operation[name[X] ** name[B]]] + name[C]]
variable[S1] assign[=] binary_operation[binary_operation[name[A] * name[B]] * binary_operation[name[X] ** binary_operation[name[B] - constant[1]]]]
variable[S2] assign[=] binary_operation[binary_operation[binary_operation[name[A] * binary_operation[name[X] ** name[B]]] * binary_operation[constant[1] - name[B]]] + name[C]]
variable[values] assign[=] dictionary[[<ast.Constant object at 0x7da2054a7b50>, <ast.Constant object at 0x7da2054a5930>, <ast.Constant object at 0x7da2054a7f70>], [<ast.Name object at 0x7da2054a47c0>, <ast.Name object at 0x7da2054a7490>, <ast.Name object at 0x7da2054a74c0>]]
return[name[values]] | keyword[def] identifier[power_law] ( identifier[target] , identifier[X] , identifier[A1] = literal[string] , identifier[A2] = literal[string] , identifier[A3] = literal[string] ):
literal[string]
identifier[A] = identifier[_parse_args] ( identifier[target] = identifier[target] , identifier[key] = identifier[A1] , identifier[default] = literal[int] )
identifier[B] = identifier[_parse_args] ( identifier[target] = identifier[target] , identifier[key] = identifier[A2] , identifier[default] = literal[int] )
identifier[C] = identifier[_parse_args] ( identifier[target] = identifier[target] , identifier[key] = identifier[A3] , identifier[default] = literal[int] )
identifier[X] = identifier[target] [ identifier[X] ]
identifier[r] = identifier[A] * identifier[X] ** identifier[B] + identifier[C]
identifier[S1] = identifier[A] * identifier[B] * identifier[X] **( identifier[B] - literal[int] )
identifier[S2] = identifier[A] * identifier[X] ** identifier[B] *( literal[int] - identifier[B] )+ identifier[C]
identifier[values] ={ literal[string] : identifier[S1] , literal[string] : identifier[S2] , literal[string] : identifier[r] }
keyword[return] identifier[values] | def power_law(target, X, A1='', A2='', A3=''):
"""
Calculates the rate, as well as slope and intercept of the following
function at the given value of *X*:
.. math::
r = A_{1} x^{A_{2}} + A_{3}
Parameters
----------
A1 -> A3 : string
The dictionary keys on the target object containing the coefficients
values to be used in the source term model
X : string
The dictionary key on the target objecxt containing the the quantity
of interest
Returns
-------
A dictionary containing the following three items:
**'rate'** - The value of the source term function at the given X.
**'S1'** - The slope of the source term function at the given X.
**'S2'** - The intercept of the source term function at the given X.
The slope and intercept provide a linearized source term equation about the
current value of X as follow:
.. math::
rate = S_{1} X + S_{2}
"""
A = _parse_args(target=target, key=A1, default=1.0)
B = _parse_args(target=target, key=A2, default=1.0)
C = _parse_args(target=target, key=A3, default=0.0)
X = target[X]
r = A * X ** B + C
S1 = A * B * X ** (B - 1)
S2 = A * X ** B * (1 - B) + C
values = {'S1': S1, 'S2': S2, 'rate': r}
return values |
def file_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs) | def function[file_set_details, parameter[object_id, input_params, always_retry]]:
constant[
Invokes the /file-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
]
return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/setDetails] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]] | keyword[def] identifier[file_set_details] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] ) | def file_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs) |
def depth_file_for_rgb_file(rgb_filename, rgb_file_list, depth_file_list):
"""Returns the *closest* depth file from an RGB filename"""
(root, filename) = os.path.split(rgb_filename)
rgb_timestamps = np.array(Kinect.timestamps_from_file_list(rgb_file_list))
depth_timestamps = np.array(Kinect.timestamps_from_file_list(depth_file_list))
needle_ts = rgb_timestamps[rgb_file_list.index(rgb_filename)]
haystack_idx = np.argmin(np.abs(depth_timestamps - needle_ts))
depth_filename = depth_file_list[haystack_idx]
return depth_filename | def function[depth_file_for_rgb_file, parameter[rgb_filename, rgb_file_list, depth_file_list]]:
constant[Returns the *closest* depth file from an RGB filename]
<ast.Tuple object at 0x7da2041d8940> assign[=] call[name[os].path.split, parameter[name[rgb_filename]]]
variable[rgb_timestamps] assign[=] call[name[np].array, parameter[call[name[Kinect].timestamps_from_file_list, parameter[name[rgb_file_list]]]]]
variable[depth_timestamps] assign[=] call[name[np].array, parameter[call[name[Kinect].timestamps_from_file_list, parameter[name[depth_file_list]]]]]
variable[needle_ts] assign[=] call[name[rgb_timestamps]][call[name[rgb_file_list].index, parameter[name[rgb_filename]]]]
variable[haystack_idx] assign[=] call[name[np].argmin, parameter[call[name[np].abs, parameter[binary_operation[name[depth_timestamps] - name[needle_ts]]]]]]
variable[depth_filename] assign[=] call[name[depth_file_list]][name[haystack_idx]]
return[name[depth_filename]] | keyword[def] identifier[depth_file_for_rgb_file] ( identifier[rgb_filename] , identifier[rgb_file_list] , identifier[depth_file_list] ):
literal[string]
( identifier[root] , identifier[filename] )= identifier[os] . identifier[path] . identifier[split] ( identifier[rgb_filename] )
identifier[rgb_timestamps] = identifier[np] . identifier[array] ( identifier[Kinect] . identifier[timestamps_from_file_list] ( identifier[rgb_file_list] ))
identifier[depth_timestamps] = identifier[np] . identifier[array] ( identifier[Kinect] . identifier[timestamps_from_file_list] ( identifier[depth_file_list] ))
identifier[needle_ts] = identifier[rgb_timestamps] [ identifier[rgb_file_list] . identifier[index] ( identifier[rgb_filename] )]
identifier[haystack_idx] = identifier[np] . identifier[argmin] ( identifier[np] . identifier[abs] ( identifier[depth_timestamps] - identifier[needle_ts] ))
identifier[depth_filename] = identifier[depth_file_list] [ identifier[haystack_idx] ]
keyword[return] identifier[depth_filename] | def depth_file_for_rgb_file(rgb_filename, rgb_file_list, depth_file_list):
"""Returns the *closest* depth file from an RGB filename"""
(root, filename) = os.path.split(rgb_filename)
rgb_timestamps = np.array(Kinect.timestamps_from_file_list(rgb_file_list))
depth_timestamps = np.array(Kinect.timestamps_from_file_list(depth_file_list))
needle_ts = rgb_timestamps[rgb_file_list.index(rgb_filename)]
haystack_idx = np.argmin(np.abs(depth_timestamps - needle_ts))
depth_filename = depth_file_list[haystack_idx]
return depth_filename |
def get_sql(self):
"""
Gets the FROM sql portion for this table
Ex: table_name AS alias
:returns: Returns the table identifier to be used in the FROM sql portion of the query
:rtype: str
"""
alias = self.get_alias()
if alias:
return '{0} AS {1}'.format(self.get_from_name(), alias)
return self.get_identifier() | def function[get_sql, parameter[self]]:
constant[
Gets the FROM sql portion for this table
Ex: table_name AS alias
:returns: Returns the table identifier to be used in the FROM sql portion of the query
:rtype: str
]
variable[alias] assign[=] call[name[self].get_alias, parameter[]]
if name[alias] begin[:]
return[call[constant[{0} AS {1}].format, parameter[call[name[self].get_from_name, parameter[]], name[alias]]]]
return[call[name[self].get_identifier, parameter[]]] | keyword[def] identifier[get_sql] ( identifier[self] ):
literal[string]
identifier[alias] = identifier[self] . identifier[get_alias] ()
keyword[if] identifier[alias] :
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[get_from_name] (), identifier[alias] )
keyword[return] identifier[self] . identifier[get_identifier] () | def get_sql(self):
"""
Gets the FROM sql portion for this table
Ex: table_name AS alias
:returns: Returns the table identifier to be used in the FROM sql portion of the query
:rtype: str
"""
alias = self.get_alias()
if alias:
return '{0} AS {1}'.format(self.get_from_name(), alias) # depends on [control=['if'], data=[]]
return self.get_identifier() |
def make_routing_table(obj, keys, prefix='on_'):
"""
:return:
a dictionary roughly equivalent to ``{'key1': obj.on_key1, 'key2': obj.on_key2, ...}``,
but ``obj`` does not have to define all methods. It may define the needed ones only.
:param obj: the object
:param keys: a list of keys
:param prefix: a string to be prepended to keys to make method names
"""
def maptuple(k):
if isinstance(k, tuple):
if len(k) == 2:
return k
elif len(k) == 1:
return k[0], _create_invoker(obj, prefix+k[0])
else:
raise ValueError()
else:
return k, _create_invoker(obj, prefix+k)
return dict([maptuple(k) for k in keys]) | def function[make_routing_table, parameter[obj, keys, prefix]]:
constant[
:return:
a dictionary roughly equivalent to ``{'key1': obj.on_key1, 'key2': obj.on_key2, ...}``,
but ``obj`` does not have to define all methods. It may define the needed ones only.
:param obj: the object
:param keys: a list of keys
:param prefix: a string to be prepended to keys to make method names
]
def function[maptuple, parameter[k]]:
if call[name[isinstance], parameter[name[k], name[tuple]]] begin[:]
if compare[call[name[len], parameter[name[k]]] equal[==] constant[2]] begin[:]
return[name[k]]
return[call[name[dict], parameter[<ast.ListComp object at 0x7da1b1a57ca0>]]] | keyword[def] identifier[make_routing_table] ( identifier[obj] , identifier[keys] , identifier[prefix] = literal[string] ):
literal[string]
keyword[def] identifier[maptuple] ( identifier[k] ):
keyword[if] identifier[isinstance] ( identifier[k] , identifier[tuple] ):
keyword[if] identifier[len] ( identifier[k] )== literal[int] :
keyword[return] identifier[k]
keyword[elif] identifier[len] ( identifier[k] )== literal[int] :
keyword[return] identifier[k] [ literal[int] ], identifier[_create_invoker] ( identifier[obj] , identifier[prefix] + identifier[k] [ literal[int] ])
keyword[else] :
keyword[raise] identifier[ValueError] ()
keyword[else] :
keyword[return] identifier[k] , identifier[_create_invoker] ( identifier[obj] , identifier[prefix] + identifier[k] )
keyword[return] identifier[dict] ([ identifier[maptuple] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[keys] ]) | def make_routing_table(obj, keys, prefix='on_'):
"""
:return:
a dictionary roughly equivalent to ``{'key1': obj.on_key1, 'key2': obj.on_key2, ...}``,
but ``obj`` does not have to define all methods. It may define the needed ones only.
:param obj: the object
:param keys: a list of keys
:param prefix: a string to be prepended to keys to make method names
"""
def maptuple(k):
if isinstance(k, tuple):
if len(k) == 2:
return k # depends on [control=['if'], data=[]]
elif len(k) == 1:
return (k[0], _create_invoker(obj, prefix + k[0])) # depends on [control=['if'], data=[]]
else:
raise ValueError() # depends on [control=['if'], data=[]]
else:
return (k, _create_invoker(obj, prefix + k))
return dict([maptuple(k) for k in keys]) |
async def serve(self) -> None:
"""
Write pidfile to sentinel directory if need be, and wait for sentinels
to shut down or build revocation registry and tails file.
"""
LOGGER.debug('RevRegBuilder.serve >>>')
assert self.external
file_pid = join(self._dir_tails_sentinel, '.pid')
if isfile(file_pid):
with open(file_pid, 'r') as fh_pid:
pid = int(fh_pid.read())
try:
kill(pid, 0)
except ProcessLookupError:
remove(file_pid)
LOGGER.info('RevRegBuilder removed derelict .pid file')
except PermissionError:
LOGGER.info('RevRegBuilder process already running with pid %s: exiting', pid)
LOGGER.debug('RevRegBuilder.serve <<<')
return
else:
LOGGER.info('RevRegBuilder process already running with pid %s: exiting', pid)
LOGGER.debug('RevRegBuilder.serve <<<')
return
pid = getpid()
with open(file_pid, 'w') as pid_fh:
print(str(pid), file=pid_fh)
file_stop = join(self._dir_tails_sentinel, '.stop')
while True:
if isfile(file_stop): # stop now, pick up any pending tasks next invocation
remove(file_stop)
remove(file_pid)
break
p_pending = [join(self._dir_tails_sentinel, d) for d in listdir(self._dir_tails_sentinel)
if isdir(join(self._dir_tails_sentinel, d))]
p_pending = [p for p in p_pending if [s for s in listdir(p) if s.startswith('.')]] # size marker
if p_pending:
pdir = basename(p_pending[0])
rr_id = pdir
rr_size = int([s for s in listdir(p_pending[0]) if s.startswith('.')][0][1:])
open(join(p_pending[0], '.in-progress'), 'w').close()
await self.create_rev_reg(rr_id, rr_size or None)
rmtree(p_pending[0])
await asyncio.sleep(1)
LOGGER.debug('RevRegBuilder.serve <<<') | <ast.AsyncFunctionDef object at 0x7da2054a5300> | keyword[async] keyword[def] identifier[serve] ( identifier[self] )-> keyword[None] :
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[assert] identifier[self] . identifier[external]
identifier[file_pid] = identifier[join] ( identifier[self] . identifier[_dir_tails_sentinel] , literal[string] )
keyword[if] identifier[isfile] ( identifier[file_pid] ):
keyword[with] identifier[open] ( identifier[file_pid] , literal[string] ) keyword[as] identifier[fh_pid] :
identifier[pid] = identifier[int] ( identifier[fh_pid] . identifier[read] ())
keyword[try] :
identifier[kill] ( identifier[pid] , literal[int] )
keyword[except] identifier[ProcessLookupError] :
identifier[remove] ( identifier[file_pid] )
identifier[LOGGER] . identifier[info] ( literal[string] )
keyword[except] identifier[PermissionError] :
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[pid] )
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[return]
keyword[else] :
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[pid] )
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[return]
identifier[pid] = identifier[getpid] ()
keyword[with] identifier[open] ( identifier[file_pid] , literal[string] ) keyword[as] identifier[pid_fh] :
identifier[print] ( identifier[str] ( identifier[pid] ), identifier[file] = identifier[pid_fh] )
identifier[file_stop] = identifier[join] ( identifier[self] . identifier[_dir_tails_sentinel] , literal[string] )
keyword[while] keyword[True] :
keyword[if] identifier[isfile] ( identifier[file_stop] ):
identifier[remove] ( identifier[file_stop] )
identifier[remove] ( identifier[file_pid] )
keyword[break]
identifier[p_pending] =[ identifier[join] ( identifier[self] . identifier[_dir_tails_sentinel] , identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[listdir] ( identifier[self] . identifier[_dir_tails_sentinel] )
keyword[if] identifier[isdir] ( identifier[join] ( identifier[self] . identifier[_dir_tails_sentinel] , identifier[d] ))]
identifier[p_pending] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[p_pending] keyword[if] [ identifier[s] keyword[for] identifier[s] keyword[in] identifier[listdir] ( identifier[p] ) keyword[if] identifier[s] . identifier[startswith] ( literal[string] )]]
keyword[if] identifier[p_pending] :
identifier[pdir] = identifier[basename] ( identifier[p_pending] [ literal[int] ])
identifier[rr_id] = identifier[pdir]
identifier[rr_size] = identifier[int] ([ identifier[s] keyword[for] identifier[s] keyword[in] identifier[listdir] ( identifier[p_pending] [ literal[int] ]) keyword[if] identifier[s] . identifier[startswith] ( literal[string] )][ literal[int] ][ literal[int] :])
identifier[open] ( identifier[join] ( identifier[p_pending] [ literal[int] ], literal[string] ), literal[string] ). identifier[close] ()
keyword[await] identifier[self] . identifier[create_rev_reg] ( identifier[rr_id] , identifier[rr_size] keyword[or] keyword[None] )
identifier[rmtree] ( identifier[p_pending] [ literal[int] ])
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
identifier[LOGGER] . identifier[debug] ( literal[string] ) | async def serve(self) -> None:
"""
Write pidfile to sentinel directory if need be, and wait for sentinels
to shut down or build revocation registry and tails file.
"""
LOGGER.debug('RevRegBuilder.serve >>>')
assert self.external
file_pid = join(self._dir_tails_sentinel, '.pid')
if isfile(file_pid):
with open(file_pid, 'r') as fh_pid:
pid = int(fh_pid.read()) # depends on [control=['with'], data=['fh_pid']]
try:
kill(pid, 0) # depends on [control=['try'], data=[]]
except ProcessLookupError:
remove(file_pid)
LOGGER.info('RevRegBuilder removed derelict .pid file') # depends on [control=['except'], data=[]]
except PermissionError:
LOGGER.info('RevRegBuilder process already running with pid %s: exiting', pid)
LOGGER.debug('RevRegBuilder.serve <<<')
return # depends on [control=['except'], data=[]]
else:
LOGGER.info('RevRegBuilder process already running with pid %s: exiting', pid)
LOGGER.debug('RevRegBuilder.serve <<<')
return # depends on [control=['if'], data=[]]
pid = getpid()
with open(file_pid, 'w') as pid_fh:
print(str(pid), file=pid_fh) # depends on [control=['with'], data=['pid_fh']]
file_stop = join(self._dir_tails_sentinel, '.stop')
while True:
if isfile(file_stop): # stop now, pick up any pending tasks next invocation
remove(file_stop)
remove(file_pid)
break # depends on [control=['if'], data=[]]
p_pending = [join(self._dir_tails_sentinel, d) for d in listdir(self._dir_tails_sentinel) if isdir(join(self._dir_tails_sentinel, d))]
p_pending = [p for p in p_pending if [s for s in listdir(p) if s.startswith('.')]] # size marker
if p_pending:
pdir = basename(p_pending[0])
rr_id = pdir
rr_size = int([s for s in listdir(p_pending[0]) if s.startswith('.')][0][1:])
open(join(p_pending[0], '.in-progress'), 'w').close()
await self.create_rev_reg(rr_id, rr_size or None)
rmtree(p_pending[0]) # depends on [control=['if'], data=[]]
await asyncio.sleep(1) # depends on [control=['while'], data=[]]
LOGGER.debug('RevRegBuilder.serve <<<') |
def replace_namespaced_custom_object_status(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data | def function[replace_namespaced_custom_object_status, parameter[self, group, version, namespace, plural, name, body]]:
constant[
replace status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].replace_namespaced_custom_object_status_with_http_info, parameter[name[group], name[version], name[namespace], name[plural], name[name], name[body]]]] | keyword[def] identifier[replace_namespaced_custom_object_status] ( identifier[self] , identifier[group] , identifier[version] , identifier[namespace] , identifier[plural] , identifier[name] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[replace_namespaced_custom_object_status_with_http_info] ( identifier[group] , identifier[version] , identifier[namespace] , identifier[plural] , identifier[name] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[replace_namespaced_custom_object_status_with_http_info] ( identifier[group] , identifier[version] , identifier[namespace] , identifier[plural] , identifier[name] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def replace_namespaced_custom_object_status(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data |
def apply_job_security(code):
"""Treat input `code` like Python 2 (implicit strings are byte literals).
The implementation is horribly inefficient but the goal is to be compatible
with what Mercurial does at runtime.
"""
buf = io.BytesIO(code.encode('utf8'))
tokens = tokenize.tokenize(buf.readline)
# NOTE: by setting the fullname to `mercurial.pycompat` below, we're
# ensuring that hg-specific pycompat imports aren't inserted to the code.
data = tokenize.untokenize(replacetokens(list(tokens), 'mercurial.pycompat'))
return cast(str, data.decode('utf8')) | def function[apply_job_security, parameter[code]]:
constant[Treat input `code` like Python 2 (implicit strings are byte literals).
The implementation is horribly inefficient but the goal is to be compatible
with what Mercurial does at runtime.
]
variable[buf] assign[=] call[name[io].BytesIO, parameter[call[name[code].encode, parameter[constant[utf8]]]]]
variable[tokens] assign[=] call[name[tokenize].tokenize, parameter[name[buf].readline]]
variable[data] assign[=] call[name[tokenize].untokenize, parameter[call[name[replacetokens], parameter[call[name[list], parameter[name[tokens]]], constant[mercurial.pycompat]]]]]
return[call[name[cast], parameter[name[str], call[name[data].decode, parameter[constant[utf8]]]]]] | keyword[def] identifier[apply_job_security] ( identifier[code] ):
literal[string]
identifier[buf] = identifier[io] . identifier[BytesIO] ( identifier[code] . identifier[encode] ( literal[string] ))
identifier[tokens] = identifier[tokenize] . identifier[tokenize] ( identifier[buf] . identifier[readline] )
identifier[data] = identifier[tokenize] . identifier[untokenize] ( identifier[replacetokens] ( identifier[list] ( identifier[tokens] ), literal[string] ))
keyword[return] identifier[cast] ( identifier[str] , identifier[data] . identifier[decode] ( literal[string] )) | def apply_job_security(code):
"""Treat input `code` like Python 2 (implicit strings are byte literals).
The implementation is horribly inefficient but the goal is to be compatible
with what Mercurial does at runtime.
"""
buf = io.BytesIO(code.encode('utf8'))
tokens = tokenize.tokenize(buf.readline)
# NOTE: by setting the fullname to `mercurial.pycompat` below, we're
# ensuring that hg-specific pycompat imports aren't inserted to the code.
data = tokenize.untokenize(replacetokens(list(tokens), 'mercurial.pycompat'))
return cast(str, data.decode('utf8')) |
def fix_e731(self, result):
"""Fix do not assign a lambda expression check."""
(line_index, _, target) = get_index_offset_contents(result,
self.source)
match = LAMBDA_REGEX.search(target)
if match:
end = match.end()
self.source[line_index] = '{}def {}({}): return {}'.format(
target[:match.start(0)], match.group(1), match.group(2),
target[end:].lstrip()) | def function[fix_e731, parameter[self, result]]:
constant[Fix do not assign a lambda expression check.]
<ast.Tuple object at 0x7da207f02d10> assign[=] call[name[get_index_offset_contents], parameter[name[result], name[self].source]]
variable[match] assign[=] call[name[LAMBDA_REGEX].search, parameter[name[target]]]
if name[match] begin[:]
variable[end] assign[=] call[name[match].end, parameter[]]
call[name[self].source][name[line_index]] assign[=] call[constant[{}def {}({}): return {}].format, parameter[call[name[target]][<ast.Slice object at 0x7da18ede6b00>], call[name[match].group, parameter[constant[1]]], call[name[match].group, parameter[constant[2]]], call[call[name[target]][<ast.Slice object at 0x7da18ede61a0>].lstrip, parameter[]]]] | keyword[def] identifier[fix_e731] ( identifier[self] , identifier[result] ):
literal[string]
( identifier[line_index] , identifier[_] , identifier[target] )= identifier[get_index_offset_contents] ( identifier[result] ,
identifier[self] . identifier[source] )
identifier[match] = identifier[LAMBDA_REGEX] . identifier[search] ( identifier[target] )
keyword[if] identifier[match] :
identifier[end] = identifier[match] . identifier[end] ()
identifier[self] . identifier[source] [ identifier[line_index] ]= literal[string] . identifier[format] (
identifier[target] [: identifier[match] . identifier[start] ( literal[int] )], identifier[match] . identifier[group] ( literal[int] ), identifier[match] . identifier[group] ( literal[int] ),
identifier[target] [ identifier[end] :]. identifier[lstrip] ()) | def fix_e731(self, result):
"""Fix do not assign a lambda expression check."""
(line_index, _, target) = get_index_offset_contents(result, self.source)
match = LAMBDA_REGEX.search(target)
if match:
end = match.end()
self.source[line_index] = '{}def {}({}): return {}'.format(target[:match.start(0)], match.group(1), match.group(2), target[end:].lstrip()) # depends on [control=['if'], data=[]] |
def ci(data, statfunction=None, alpha=0.05, n_samples=10000,
method='bca', output='lowhigh', epsilon=0.001, multi=None,
_iter=True):
"""
Given a set of data ``data``, and a statistics function ``statfunction`` that
applies to that data, computes the bootstrap confidence interval for
``statfunction`` on that data. Data points are assumed to be delineated by
axis 0.
Parameters
----------
data: array_like, shape (N, ...) OR tuple of array_like all with shape (N, ...)
Input data. Data points are assumed to be delineated by axis 0. Beyond this,
the shape doesn't matter, so long as ``statfunction`` can be applied to the
array. If a tuple of array_likes is passed, then samples from each array (along
axis 0) are passed in order as separate parameters to the statfunction. The
type of data (single array or tuple of arrays) can be explicitly specified
by the multi parameter.
statfunction: function (data, weights=(weights, optional)) -> value
This function should accept samples of data from ``data``. It is applied
to these samples individually.
If using the ABC method, the function _must_ accept a named ``weights``
parameter which will be an array_like with weights for each sample, and
must return a _weighted_ result. Otherwise this parameter is not used
or required. Note that numpy's np.average accepts this. (default=np.average)
alpha: float or iterable, optional
The percentiles to use for the confidence interval (default=0.05). If this
is a float, the returned values are (alpha/2, 1-alpha/2) percentile confidence
intervals. If it is an iterable, alpha is assumed to be an iterable of
each desired percentile.
n_samples: float, optional
The number of bootstrap samples to use (default=10000)
method: string, optional
The method to use: one of 'pi', 'bca', or 'abc' (default='bca')
output: string, optional
The format of the output. 'lowhigh' gives low and high confidence interval
values. 'errorbar' gives transposed abs(value-confidence interval value) values
that are suitable for use with matplotlib's errorbar function. (default='lowhigh')
epsilon: float, optional (only for ABC method)
The step size for finite difference calculations in the ABC method. Ignored for
all other methods. (default=0.001)
multi: boolean, optional
If False, assume data is a single array. If True, assume data is a tuple/other
iterable of arrays of the same length that should be sampled together. If None,
decide based on whether the data is an actual tuple. (default=None)
Returns
-------
confidences: tuple of floats
The confidence percentiles specified by alpha
Calculation Methods
-------------------
'pi': Percentile Interval (Efron 13.3)
The percentile interval method simply returns the 100*alphath bootstrap
sample's values for the statistic. This is an extremely simple method of
confidence interval calculation. However, it has several disadvantages
compared to the bias-corrected accelerated method, which is the default.
'bca': Bias-Corrected Accelerated (BCa) Non-Parametric (Efron 14.3) (default)
This method is much more complex to explain. However, it gives considerably
better results, and is generally recommended for normal situations. Note
that in cases where the statistic is smooth, and can be expressed with
weights, the ABC method will give approximated results much, much faster.
Note that in a case where the statfunction results in equal output for every
bootstrap sample, the BCa confidence interval is technically undefined, as
the acceleration value is undefined. To match the percentile interval method
and give reasonable output, the implementation of this method returns a
confidence interval of zero width using the 0th bootstrap sample in this
case, and warns the user.
'abc': Approximate Bootstrap Confidence (Efron 14.4, 22.6)
This method provides approximated bootstrap confidence intervals without
actually taking bootstrap samples. This requires that the statistic be
smooth, and allow for weighting of individual points with a weights=
parameter (note that np.average allows this). This is _much_ faster
than all other methods for situations where it can be used.
Examples
--------
To calculate the confidence intervals for the mean of some numbers:
>> boot.ci( np.randn(100), np.average )
Given some data points in arrays x and y calculate the confidence intervals
for all linear regression coefficients simultaneously:
>> boot.ci( (x,y), scipy.stats.linregress )
References
----------
Efron, An Introduction to the Bootstrap. Chapman & Hall 1993
"""
# Deal with the alpha values
if np.iterable(alpha):
alphas = np.array(alpha)
else:
alphas = np.array([alpha/2, 1-alpha/2])
if multi is None:
if isinstance(data, tuple):
multi = True
else:
multi = False
if statfunction is None:
if _iter:
statfunction = np.average
else:
def statfunc_wrapper(x, *args, **kwargs):
return np.average(x, axis=-1, *args, **kwargs)
statfunction = statfunc_wrapper
# Ensure that the data is actually an array. This isn't nice to pandas,
# but pandas seems much much slower and the indexes become a problem.
if not multi:
data = np.array(data)
tdata = (data,)
else:
tdata = tuple( np.array(x) for x in data )
# Deal with ABC *now*, as it doesn't need samples.
if method == 'abc':
n = tdata[0].shape[0]*1.0
nn = tdata[0].shape[0]
I = np.identity(nn)
ep = epsilon / n*1.0
p0 = np.repeat(1.0/n,nn)
try:
t0 = statfunction(*tdata,weights=p0)
except TypeError as e:
raise TypeError("statfunction does not accept correct arguments for ABC ({0})".format(e.message))
di_full = I - p0
tp = np.fromiter((statfunction(*tdata, weights=p0+ep*di)
for di in di_full), dtype=np.float)
tm = np.fromiter((statfunction(*tdata, weights=p0-ep*di)
for di in di_full), dtype=np.float)
t1 = (tp-tm)/(2*ep)
t2 = (tp-2*t0+tm)/ep**2
sighat = np.sqrt(np.sum(t1**2))/n
a = (np.sum(t1**3))/(6*n**3*sighat**3)
delta = t1/(n**2*sighat)
cq = (statfunction(*tdata,weights=p0+ep*delta)-2*t0+statfunction(*tdata,weights=p0-ep*delta))/(2*sighat*ep**2)
bhat = np.sum(t2)/(2*n**2)
curv = bhat/sighat-cq
z0 = nppf(2*ncdf(a)*ncdf(-curv))
Z = z0+nppf(alphas)
za = Z/(1-a*Z)**2
# stan = t0 + sighat * nppf(alphas)
abc = np.zeros_like(alphas)
for i in range(0,len(alphas)):
abc[i] = statfunction(*tdata,weights=p0+za[i]*delta)
if output == 'lowhigh':
return abc
elif output == 'errorbar':
return abs(abc-statfunction(tdata))[np.newaxis].T
else:
raise ValueError("Output option {0} is not supported.".format(output))
# We don't need to generate actual samples; that would take more memory.
# Instead, we can generate just the indexes, and then apply the statfun
# to those indexes.
if _iter:
bootindexes = bootstrap_indexes(tdata[0], n_samples)
stat = np.array([statfunction(*(x[indexes] for x in tdata))
for indexes in bootindexes])
else:
bootindexes = bootstrap_indexes_array(tdata[0], n_samples)
stat = statfunction(*(x[bootindexes] for x in tdata))
stat.sort(axis=0)
# Percentile Interval Method
if method == 'pi':
avals = alphas
# Bias-Corrected Accelerated Method
elif method == 'bca':
# The value of the statistic function applied just to the actual data.
ostat = statfunction(*tdata)
# The bias correction value.
z0 = nppf( ( 1.0*np.sum(stat < ostat, axis=0) ) / n_samples )
# Statistics of the jackknife distribution
jackindexes = jackknife_indexes(tdata[0])
jstat = [statfunction(*(x[indexes] for x in tdata)) for indexes in jackindexes]
jmean = np.mean(jstat,axis=0)
# Temporarily kill numpy warnings:
oldnperr = np.seterr(invalid='ignore')
# Acceleration value
a = np.sum((jmean - jstat)**3, axis=0) / (
6.0 * np.sum((jmean - jstat)**2, axis=0)**1.5)
if np.any(np.isnan(a)):
nanind = np.nonzero(np.isnan(a))
warnings.warn("BCa acceleration values for indexes {} were undefined. \
Statistic values were likely all equal. Affected CI will \
be inaccurate.".format(nanind), InstabilityWarning, stacklevel=2)
zs = z0 + nppf(alphas).reshape(alphas.shape+(1,)*z0.ndim)
avals = ncdf(z0 + zs/(1-a*zs))
np.seterr(**oldnperr)
else:
raise ValueError("Method {0} is not supported.".format(method))
nvals = np.round((n_samples-1)*avals)
oldnperr = np.seterr(invalid='ignore')
if np.any(np.isnan(nvals)):
warnings.warn("Some values were NaN; results are probably unstable " +
"(all values were probably equal)", InstabilityWarning,
stacklevel=2)
if np.any(nvals == 0) or np.any(nvals == n_samples-1):
warnings.warn("Some values used extremal samples; " +
"results are probably unstable.",
InstabilityWarning, stacklevel=2)
elif np.any(nvals < 10) or np.any(nvals >= n_samples-10):
warnings.warn("Some values used top 10 low/high samples; " +
"results may be unstable.",
InstabilityWarning, stacklevel=2)
np.seterr(**oldnperr)
nvals = np.nan_to_num(nvals).astype('int')
if output == 'lowhigh':
if nvals.ndim == 1:
# All nvals are the same. Simple broadcasting
return stat[nvals]
else:
# Nvals are different for each data point. Not simple broadcasting.
# Each set of nvals along axis 0 corresponds to the data at the same
# point in other axes.
return stat[(nvals, np.indices(nvals.shape)[1:].squeeze())]
elif output == 'errorbar':
if nvals.ndim == 1:
return abs(statfunction(data)-stat[nvals])[np.newaxis].T
else:
return abs(statfunction(data)-stat[(nvals, np.indices(nvals.shape)[1:])])[np.newaxis].T
else:
raise ValueError("Output option {0} is not supported.".format(output)) | def function[ci, parameter[data, statfunction, alpha, n_samples, method, output, epsilon, multi, _iter]]:
constant[
Given a set of data ``data``, and a statistics function ``statfunction`` that
applies to that data, computes the bootstrap confidence interval for
``statfunction`` on that data. Data points are assumed to be delineated by
axis 0.
Parameters
----------
data: array_like, shape (N, ...) OR tuple of array_like all with shape (N, ...)
Input data. Data points are assumed to be delineated by axis 0. Beyond this,
the shape doesn't matter, so long as ``statfunction`` can be applied to the
array. If a tuple of array_likes is passed, then samples from each array (along
axis 0) are passed in order as separate parameters to the statfunction. The
type of data (single array or tuple of arrays) can be explicitly specified
by the multi parameter.
statfunction: function (data, weights=(weights, optional)) -> value
This function should accept samples of data from ``data``. It is applied
to these samples individually.
If using the ABC method, the function _must_ accept a named ``weights``
parameter which will be an array_like with weights for each sample, and
must return a _weighted_ result. Otherwise this parameter is not used
or required. Note that numpy's np.average accepts this. (default=np.average)
alpha: float or iterable, optional
The percentiles to use for the confidence interval (default=0.05). If this
is a float, the returned values are (alpha/2, 1-alpha/2) percentile confidence
intervals. If it is an iterable, alpha is assumed to be an iterable of
each desired percentile.
n_samples: float, optional
The number of bootstrap samples to use (default=10000)
method: string, optional
The method to use: one of 'pi', 'bca', or 'abc' (default='bca')
output: string, optional
The format of the output. 'lowhigh' gives low and high confidence interval
values. 'errorbar' gives transposed abs(value-confidence interval value) values
that are suitable for use with matplotlib's errorbar function. (default='lowhigh')
epsilon: float, optional (only for ABC method)
The step size for finite difference calculations in the ABC method. Ignored for
all other methods. (default=0.001)
multi: boolean, optional
If False, assume data is a single array. If True, assume data is a tuple/other
iterable of arrays of the same length that should be sampled together. If None,
decide based on whether the data is an actual tuple. (default=None)
Returns
-------
confidences: tuple of floats
The confidence percentiles specified by alpha
Calculation Methods
-------------------
'pi': Percentile Interval (Efron 13.3)
The percentile interval method simply returns the 100*alphath bootstrap
sample's values for the statistic. This is an extremely simple method of
confidence interval calculation. However, it has several disadvantages
compared to the bias-corrected accelerated method, which is the default.
'bca': Bias-Corrected Accelerated (BCa) Non-Parametric (Efron 14.3) (default)
This method is much more complex to explain. However, it gives considerably
better results, and is generally recommended for normal situations. Note
that in cases where the statistic is smooth, and can be expressed with
weights, the ABC method will give approximated results much, much faster.
Note that in a case where the statfunction results in equal output for every
bootstrap sample, the BCa confidence interval is technically undefined, as
the acceleration value is undefined. To match the percentile interval method
and give reasonable output, the implementation of this method returns a
confidence interval of zero width using the 0th bootstrap sample in this
case, and warns the user.
'abc': Approximate Bootstrap Confidence (Efron 14.4, 22.6)
This method provides approximated bootstrap confidence intervals without
actually taking bootstrap samples. This requires that the statistic be
smooth, and allow for weighting of individual points with a weights=
parameter (note that np.average allows this). This is _much_ faster
than all other methods for situations where it can be used.
Examples
--------
To calculate the confidence intervals for the mean of some numbers:
>> boot.ci( np.randn(100), np.average )
Given some data points in arrays x and y calculate the confidence intervals
for all linear regression coefficients simultaneously:
>> boot.ci( (x,y), scipy.stats.linregress )
References
----------
Efron, An Introduction to the Bootstrap. Chapman & Hall 1993
]
if call[name[np].iterable, parameter[name[alpha]]] begin[:]
variable[alphas] assign[=] call[name[np].array, parameter[name[alpha]]]
if compare[name[multi] is constant[None]] begin[:]
if call[name[isinstance], parameter[name[data], name[tuple]]] begin[:]
variable[multi] assign[=] constant[True]
if compare[name[statfunction] is constant[None]] begin[:]
if name[_iter] begin[:]
variable[statfunction] assign[=] name[np].average
if <ast.UnaryOp object at 0x7da1b0c31930> begin[:]
variable[data] assign[=] call[name[np].array, parameter[name[data]]]
variable[tdata] assign[=] tuple[[<ast.Name object at 0x7da1b0c31720>]]
if compare[name[method] equal[==] constant[abc]] begin[:]
variable[n] assign[=] binary_operation[call[call[name[tdata]][constant[0]].shape][constant[0]] * constant[1.0]]
variable[nn] assign[=] call[call[name[tdata]][constant[0]].shape][constant[0]]
variable[I] assign[=] call[name[np].identity, parameter[name[nn]]]
variable[ep] assign[=] binary_operation[binary_operation[name[epsilon] / name[n]] * constant[1.0]]
variable[p0] assign[=] call[name[np].repeat, parameter[binary_operation[constant[1.0] / name[n]], name[nn]]]
<ast.Try object at 0x7da1b0c30c40>
variable[di_full] assign[=] binary_operation[name[I] - name[p0]]
variable[tp] assign[=] call[name[np].fromiter, parameter[<ast.GeneratorExp object at 0x7da1b0c306d0>]]
variable[tm] assign[=] call[name[np].fromiter, parameter[<ast.GeneratorExp object at 0x7da1b0c30280>]]
variable[t1] assign[=] binary_operation[binary_operation[name[tp] - name[tm]] / binary_operation[constant[2] * name[ep]]]
variable[t2] assign[=] binary_operation[binary_operation[binary_operation[name[tp] - binary_operation[constant[2] * name[t0]]] + name[tm]] / binary_operation[name[ep] ** constant[2]]]
variable[sighat] assign[=] binary_operation[call[name[np].sqrt, parameter[call[name[np].sum, parameter[binary_operation[name[t1] ** constant[2]]]]]] / name[n]]
variable[a] assign[=] binary_operation[call[name[np].sum, parameter[binary_operation[name[t1] ** constant[3]]]] / binary_operation[binary_operation[constant[6] * binary_operation[name[n] ** constant[3]]] * binary_operation[name[sighat] ** constant[3]]]]
variable[delta] assign[=] binary_operation[name[t1] / binary_operation[binary_operation[name[n] ** constant[2]] * name[sighat]]]
variable[cq] assign[=] binary_operation[binary_operation[binary_operation[call[name[statfunction], parameter[<ast.Starred object at 0x7da2046237f0>]] - binary_operation[constant[2] * name[t0]]] + call[name[statfunction], parameter[<ast.Starred object at 0x7da204620f10>]]] / binary_operation[binary_operation[constant[2] * name[sighat]] * binary_operation[name[ep] ** constant[2]]]]
variable[bhat] assign[=] binary_operation[call[name[np].sum, parameter[name[t2]]] / binary_operation[constant[2] * binary_operation[name[n] ** constant[2]]]]
variable[curv] assign[=] binary_operation[binary_operation[name[bhat] / name[sighat]] - name[cq]]
variable[z0] assign[=] call[name[nppf], parameter[binary_operation[binary_operation[constant[2] * call[name[ncdf], parameter[name[a]]]] * call[name[ncdf], parameter[<ast.UnaryOp object at 0x7da1b0c67e80>]]]]]
variable[Z] assign[=] binary_operation[name[z0] + call[name[nppf], parameter[name[alphas]]]]
variable[za] assign[=] binary_operation[name[Z] / binary_operation[binary_operation[constant[1] - binary_operation[name[a] * name[Z]]] ** constant[2]]]
variable[abc] assign[=] call[name[np].zeros_like, parameter[name[alphas]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[alphas]]]]]] begin[:]
call[name[abc]][name[i]] assign[=] call[name[statfunction], parameter[<ast.Starred object at 0x7da1b0c662c0>]]
if compare[name[output] equal[==] constant[lowhigh]] begin[:]
return[name[abc]]
if name[_iter] begin[:]
variable[bootindexes] assign[=] call[name[bootstrap_indexes], parameter[call[name[tdata]][constant[0]], name[n_samples]]]
variable[stat] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da207f9bca0>]]
call[name[stat].sort, parameter[]]
if compare[name[method] equal[==] constant[pi]] begin[:]
variable[avals] assign[=] name[alphas]
variable[nvals] assign[=] call[name[np].round, parameter[binary_operation[binary_operation[name[n_samples] - constant[1]] * name[avals]]]]
variable[oldnperr] assign[=] call[name[np].seterr, parameter[]]
if call[name[np].any, parameter[call[name[np].isnan, parameter[name[nvals]]]]] begin[:]
call[name[warnings].warn, parameter[binary_operation[constant[Some values were NaN; results are probably unstable ] + constant[(all values were probably equal)]], name[InstabilityWarning]]]
if <ast.BoolOp object at 0x7da1b0cb0160> begin[:]
call[name[warnings].warn, parameter[binary_operation[constant[Some values used extremal samples; ] + constant[results are probably unstable.]], name[InstabilityWarning]]]
call[name[np].seterr, parameter[]]
variable[nvals] assign[=] call[call[name[np].nan_to_num, parameter[name[nvals]]].astype, parameter[constant[int]]]
if compare[name[output] equal[==] constant[lowhigh]] begin[:]
if compare[name[nvals].ndim equal[==] constant[1]] begin[:]
return[call[name[stat]][name[nvals]]] | keyword[def] identifier[ci] ( identifier[data] , identifier[statfunction] = keyword[None] , identifier[alpha] = literal[int] , identifier[n_samples] = literal[int] ,
identifier[method] = literal[string] , identifier[output] = literal[string] , identifier[epsilon] = literal[int] , identifier[multi] = keyword[None] ,
identifier[_iter] = keyword[True] ):
literal[string]
keyword[if] identifier[np] . identifier[iterable] ( identifier[alpha] ):
identifier[alphas] = identifier[np] . identifier[array] ( identifier[alpha] )
keyword[else] :
identifier[alphas] = identifier[np] . identifier[array] ([ identifier[alpha] / literal[int] , literal[int] - identifier[alpha] / literal[int] ])
keyword[if] identifier[multi] keyword[is] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[data] , identifier[tuple] ):
identifier[multi] = keyword[True]
keyword[else] :
identifier[multi] = keyword[False]
keyword[if] identifier[statfunction] keyword[is] keyword[None] :
keyword[if] identifier[_iter] :
identifier[statfunction] = identifier[np] . identifier[average]
keyword[else] :
keyword[def] identifier[statfunc_wrapper] ( identifier[x] ,* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[np] . identifier[average] ( identifier[x] , identifier[axis] =- literal[int] ,* identifier[args] ,** identifier[kwargs] )
identifier[statfunction] = identifier[statfunc_wrapper]
keyword[if] keyword[not] identifier[multi] :
identifier[data] = identifier[np] . identifier[array] ( identifier[data] )
identifier[tdata] =( identifier[data] ,)
keyword[else] :
identifier[tdata] = identifier[tuple] ( identifier[np] . identifier[array] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[data] )
keyword[if] identifier[method] == literal[string] :
identifier[n] = identifier[tdata] [ literal[int] ]. identifier[shape] [ literal[int] ]* literal[int]
identifier[nn] = identifier[tdata] [ literal[int] ]. identifier[shape] [ literal[int] ]
identifier[I] = identifier[np] . identifier[identity] ( identifier[nn] )
identifier[ep] = identifier[epsilon] / identifier[n] * literal[int]
identifier[p0] = identifier[np] . identifier[repeat] ( literal[int] / identifier[n] , identifier[nn] )
keyword[try] :
identifier[t0] = identifier[statfunction] (* identifier[tdata] , identifier[weights] = identifier[p0] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[e] . identifier[message] ))
identifier[di_full] = identifier[I] - identifier[p0]
identifier[tp] = identifier[np] . identifier[fromiter] (( identifier[statfunction] (* identifier[tdata] , identifier[weights] = identifier[p0] + identifier[ep] * identifier[di] )
keyword[for] identifier[di] keyword[in] identifier[di_full] ), identifier[dtype] = identifier[np] . identifier[float] )
identifier[tm] = identifier[np] . identifier[fromiter] (( identifier[statfunction] (* identifier[tdata] , identifier[weights] = identifier[p0] - identifier[ep] * identifier[di] )
keyword[for] identifier[di] keyword[in] identifier[di_full] ), identifier[dtype] = identifier[np] . identifier[float] )
identifier[t1] =( identifier[tp] - identifier[tm] )/( literal[int] * identifier[ep] )
identifier[t2] =( identifier[tp] - literal[int] * identifier[t0] + identifier[tm] )/ identifier[ep] ** literal[int]
identifier[sighat] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] ( identifier[t1] ** literal[int] ))/ identifier[n]
identifier[a] =( identifier[np] . identifier[sum] ( identifier[t1] ** literal[int] ))/( literal[int] * identifier[n] ** literal[int] * identifier[sighat] ** literal[int] )
identifier[delta] = identifier[t1] /( identifier[n] ** literal[int] * identifier[sighat] )
identifier[cq] =( identifier[statfunction] (* identifier[tdata] , identifier[weights] = identifier[p0] + identifier[ep] * identifier[delta] )- literal[int] * identifier[t0] + identifier[statfunction] (* identifier[tdata] , identifier[weights] = identifier[p0] - identifier[ep] * identifier[delta] ))/( literal[int] * identifier[sighat] * identifier[ep] ** literal[int] )
identifier[bhat] = identifier[np] . identifier[sum] ( identifier[t2] )/( literal[int] * identifier[n] ** literal[int] )
identifier[curv] = identifier[bhat] / identifier[sighat] - identifier[cq]
identifier[z0] = identifier[nppf] ( literal[int] * identifier[ncdf] ( identifier[a] )* identifier[ncdf] (- identifier[curv] ))
identifier[Z] = identifier[z0] + identifier[nppf] ( identifier[alphas] )
identifier[za] = identifier[Z] /( literal[int] - identifier[a] * identifier[Z] )** literal[int]
identifier[abc] = identifier[np] . identifier[zeros_like] ( identifier[alphas] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[alphas] )):
identifier[abc] [ identifier[i] ]= identifier[statfunction] (* identifier[tdata] , identifier[weights] = identifier[p0] + identifier[za] [ identifier[i] ]* identifier[delta] )
keyword[if] identifier[output] == literal[string] :
keyword[return] identifier[abc]
keyword[elif] identifier[output] == literal[string] :
keyword[return] identifier[abs] ( identifier[abc] - identifier[statfunction] ( identifier[tdata] ))[ identifier[np] . identifier[newaxis] ]. identifier[T]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[output] ))
keyword[if] identifier[_iter] :
identifier[bootindexes] = identifier[bootstrap_indexes] ( identifier[tdata] [ literal[int] ], identifier[n_samples] )
identifier[stat] = identifier[np] . identifier[array] ([ identifier[statfunction] (*( identifier[x] [ identifier[indexes] ] keyword[for] identifier[x] keyword[in] identifier[tdata] ))
keyword[for] identifier[indexes] keyword[in] identifier[bootindexes] ])
keyword[else] :
identifier[bootindexes] = identifier[bootstrap_indexes_array] ( identifier[tdata] [ literal[int] ], identifier[n_samples] )
identifier[stat] = identifier[statfunction] (*( identifier[x] [ identifier[bootindexes] ] keyword[for] identifier[x] keyword[in] identifier[tdata] ))
identifier[stat] . identifier[sort] ( identifier[axis] = literal[int] )
keyword[if] identifier[method] == literal[string] :
identifier[avals] = identifier[alphas]
keyword[elif] identifier[method] == literal[string] :
identifier[ostat] = identifier[statfunction] (* identifier[tdata] )
identifier[z0] = identifier[nppf] (( literal[int] * identifier[np] . identifier[sum] ( identifier[stat] < identifier[ostat] , identifier[axis] = literal[int] ))/ identifier[n_samples] )
identifier[jackindexes] = identifier[jackknife_indexes] ( identifier[tdata] [ literal[int] ])
identifier[jstat] =[ identifier[statfunction] (*( identifier[x] [ identifier[indexes] ] keyword[for] identifier[x] keyword[in] identifier[tdata] )) keyword[for] identifier[indexes] keyword[in] identifier[jackindexes] ]
identifier[jmean] = identifier[np] . identifier[mean] ( identifier[jstat] , identifier[axis] = literal[int] )
identifier[oldnperr] = identifier[np] . identifier[seterr] ( identifier[invalid] = literal[string] )
identifier[a] = identifier[np] . identifier[sum] (( identifier[jmean] - identifier[jstat] )** literal[int] , identifier[axis] = literal[int] )/(
literal[int] * identifier[np] . identifier[sum] (( identifier[jmean] - identifier[jstat] )** literal[int] , identifier[axis] = literal[int] )** literal[int] )
keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[isnan] ( identifier[a] )):
identifier[nanind] = identifier[np] . identifier[nonzero] ( identifier[np] . identifier[isnan] ( identifier[a] ))
identifier[warnings] . identifier[warn] ( literal[string] . identifier[format] ( identifier[nanind] ), identifier[InstabilityWarning] , identifier[stacklevel] = literal[int] )
identifier[zs] = identifier[z0] + identifier[nppf] ( identifier[alphas] ). identifier[reshape] ( identifier[alphas] . identifier[shape] +( literal[int] ,)* identifier[z0] . identifier[ndim] )
identifier[avals] = identifier[ncdf] ( identifier[z0] + identifier[zs] /( literal[int] - identifier[a] * identifier[zs] ))
identifier[np] . identifier[seterr] (** identifier[oldnperr] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[method] ))
identifier[nvals] = identifier[np] . identifier[round] (( identifier[n_samples] - literal[int] )* identifier[avals] )
identifier[oldnperr] = identifier[np] . identifier[seterr] ( identifier[invalid] = literal[string] )
keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[isnan] ( identifier[nvals] )):
identifier[warnings] . identifier[warn] ( literal[string] +
literal[string] , identifier[InstabilityWarning] ,
identifier[stacklevel] = literal[int] )
keyword[if] identifier[np] . identifier[any] ( identifier[nvals] == literal[int] ) keyword[or] identifier[np] . identifier[any] ( identifier[nvals] == identifier[n_samples] - literal[int] ):
identifier[warnings] . identifier[warn] ( literal[string] +
literal[string] ,
identifier[InstabilityWarning] , identifier[stacklevel] = literal[int] )
keyword[elif] identifier[np] . identifier[any] ( identifier[nvals] < literal[int] ) keyword[or] identifier[np] . identifier[any] ( identifier[nvals] >= identifier[n_samples] - literal[int] ):
identifier[warnings] . identifier[warn] ( literal[string] +
literal[string] ,
identifier[InstabilityWarning] , identifier[stacklevel] = literal[int] )
identifier[np] . identifier[seterr] (** identifier[oldnperr] )
identifier[nvals] = identifier[np] . identifier[nan_to_num] ( identifier[nvals] ). identifier[astype] ( literal[string] )
keyword[if] identifier[output] == literal[string] :
keyword[if] identifier[nvals] . identifier[ndim] == literal[int] :
keyword[return] identifier[stat] [ identifier[nvals] ]
keyword[else] :
keyword[return] identifier[stat] [( identifier[nvals] , identifier[np] . identifier[indices] ( identifier[nvals] . identifier[shape] )[ literal[int] :]. identifier[squeeze] ())]
keyword[elif] identifier[output] == literal[string] :
keyword[if] identifier[nvals] . identifier[ndim] == literal[int] :
keyword[return] identifier[abs] ( identifier[statfunction] ( identifier[data] )- identifier[stat] [ identifier[nvals] ])[ identifier[np] . identifier[newaxis] ]. identifier[T]
keyword[else] :
keyword[return] identifier[abs] ( identifier[statfunction] ( identifier[data] )- identifier[stat] [( identifier[nvals] , identifier[np] . identifier[indices] ( identifier[nvals] . identifier[shape] )[ literal[int] :])])[ identifier[np] . identifier[newaxis] ]. identifier[T]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[output] )) | def ci(data, statfunction=None, alpha=0.05, n_samples=10000, method='bca', output='lowhigh', epsilon=0.001, multi=None, _iter=True):
"""
Given a set of data ``data``, and a statistics function ``statfunction`` that
applies to that data, computes the bootstrap confidence interval for
``statfunction`` on that data. Data points are assumed to be delineated by
axis 0.
Parameters
----------
data: array_like, shape (N, ...) OR tuple of array_like all with shape (N, ...)
Input data. Data points are assumed to be delineated by axis 0. Beyond this,
the shape doesn't matter, so long as ``statfunction`` can be applied to the
array. If a tuple of array_likes is passed, then samples from each array (along
axis 0) are passed in order as separate parameters to the statfunction. The
type of data (single array or tuple of arrays) can be explicitly specified
by the multi parameter.
statfunction: function (data, weights=(weights, optional)) -> value
This function should accept samples of data from ``data``. It is applied
to these samples individually.
If using the ABC method, the function _must_ accept a named ``weights``
parameter which will be an array_like with weights for each sample, and
must return a _weighted_ result. Otherwise this parameter is not used
or required. Note that numpy's np.average accepts this. (default=np.average)
alpha: float or iterable, optional
The percentiles to use for the confidence interval (default=0.05). If this
is a float, the returned values are (alpha/2, 1-alpha/2) percentile confidence
intervals. If it is an iterable, alpha is assumed to be an iterable of
each desired percentile.
n_samples: float, optional
The number of bootstrap samples to use (default=10000)
method: string, optional
The method to use: one of 'pi', 'bca', or 'abc' (default='bca')
output: string, optional
The format of the output. 'lowhigh' gives low and high confidence interval
values. 'errorbar' gives transposed abs(value-confidence interval value) values
that are suitable for use with matplotlib's errorbar function. (default='lowhigh')
epsilon: float, optional (only for ABC method)
The step size for finite difference calculations in the ABC method. Ignored for
all other methods. (default=0.001)
multi: boolean, optional
If False, assume data is a single array. If True, assume data is a tuple/other
iterable of arrays of the same length that should be sampled together. If None,
decide based on whether the data is an actual tuple. (default=None)
Returns
-------
confidences: tuple of floats
The confidence percentiles specified by alpha
Calculation Methods
-------------------
'pi': Percentile Interval (Efron 13.3)
The percentile interval method simply returns the 100*alphath bootstrap
sample's values for the statistic. This is an extremely simple method of
confidence interval calculation. However, it has several disadvantages
compared to the bias-corrected accelerated method, which is the default.
'bca': Bias-Corrected Accelerated (BCa) Non-Parametric (Efron 14.3) (default)
This method is much more complex to explain. However, it gives considerably
better results, and is generally recommended for normal situations. Note
that in cases where the statistic is smooth, and can be expressed with
weights, the ABC method will give approximated results much, much faster.
Note that in a case where the statfunction results in equal output for every
bootstrap sample, the BCa confidence interval is technically undefined, as
the acceleration value is undefined. To match the percentile interval method
and give reasonable output, the implementation of this method returns a
confidence interval of zero width using the 0th bootstrap sample in this
case, and warns the user.
'abc': Approximate Bootstrap Confidence (Efron 14.4, 22.6)
This method provides approximated bootstrap confidence intervals without
actually taking bootstrap samples. This requires that the statistic be
smooth, and allow for weighting of individual points with a weights=
parameter (note that np.average allows this). This is _much_ faster
than all other methods for situations where it can be used.
Examples
--------
To calculate the confidence intervals for the mean of some numbers:
>> boot.ci( np.randn(100), np.average )
Given some data points in arrays x and y calculate the confidence intervals
for all linear regression coefficients simultaneously:
>> boot.ci( (x,y), scipy.stats.linregress )
References
----------
Efron, An Introduction to the Bootstrap. Chapman & Hall 1993
"""
# Deal with the alpha values
if np.iterable(alpha):
alphas = np.array(alpha) # depends on [control=['if'], data=[]]
else:
alphas = np.array([alpha / 2, 1 - alpha / 2])
if multi is None:
if isinstance(data, tuple):
multi = True # depends on [control=['if'], data=[]]
else:
multi = False # depends on [control=['if'], data=['multi']]
if statfunction is None:
if _iter:
statfunction = np.average # depends on [control=['if'], data=[]]
else:
def statfunc_wrapper(x, *args, **kwargs):
return np.average(x, *args, axis=-1, **kwargs)
statfunction = statfunc_wrapper # depends on [control=['if'], data=['statfunction']]
# Ensure that the data is actually an array. This isn't nice to pandas,
# but pandas seems much much slower and the indexes become a problem.
if not multi:
data = np.array(data)
tdata = (data,) # depends on [control=['if'], data=[]]
else:
tdata = tuple((np.array(x) for x in data))
# Deal with ABC *now*, as it doesn't need samples.
if method == 'abc':
n = tdata[0].shape[0] * 1.0
nn = tdata[0].shape[0]
I = np.identity(nn)
ep = epsilon / n * 1.0
p0 = np.repeat(1.0 / n, nn)
try:
t0 = statfunction(*tdata, weights=p0) # depends on [control=['try'], data=[]]
except TypeError as e:
raise TypeError('statfunction does not accept correct arguments for ABC ({0})'.format(e.message)) # depends on [control=['except'], data=['e']]
di_full = I - p0
tp = np.fromiter((statfunction(*tdata, weights=p0 + ep * di) for di in di_full), dtype=np.float)
tm = np.fromiter((statfunction(*tdata, weights=p0 - ep * di) for di in di_full), dtype=np.float)
t1 = (tp - tm) / (2 * ep)
t2 = (tp - 2 * t0 + tm) / ep ** 2
sighat = np.sqrt(np.sum(t1 ** 2)) / n
a = np.sum(t1 ** 3) / (6 * n ** 3 * sighat ** 3)
delta = t1 / (n ** 2 * sighat)
cq = (statfunction(*tdata, weights=p0 + ep * delta) - 2 * t0 + statfunction(*tdata, weights=p0 - ep * delta)) / (2 * sighat * ep ** 2)
bhat = np.sum(t2) / (2 * n ** 2)
curv = bhat / sighat - cq
z0 = nppf(2 * ncdf(a) * ncdf(-curv))
Z = z0 + nppf(alphas)
za = Z / (1 - a * Z) ** 2
# stan = t0 + sighat * nppf(alphas)
abc = np.zeros_like(alphas)
for i in range(0, len(alphas)):
abc[i] = statfunction(*tdata, weights=p0 + za[i] * delta) # depends on [control=['for'], data=['i']]
if output == 'lowhigh':
return abc # depends on [control=['if'], data=[]]
elif output == 'errorbar':
return abs(abc - statfunction(tdata))[np.newaxis].T # depends on [control=['if'], data=[]]
else:
raise ValueError('Output option {0} is not supported.'.format(output)) # depends on [control=['if'], data=[]]
# We don't need to generate actual samples; that would take more memory.
# Instead, we can generate just the indexes, and then apply the statfun
# to those indexes.
if _iter:
bootindexes = bootstrap_indexes(tdata[0], n_samples)
stat = np.array([statfunction(*(x[indexes] for x in tdata)) for indexes in bootindexes]) # depends on [control=['if'], data=[]]
else:
bootindexes = bootstrap_indexes_array(tdata[0], n_samples)
stat = statfunction(*(x[bootindexes] for x in tdata))
stat.sort(axis=0)
# Percentile Interval Method
if method == 'pi':
avals = alphas # depends on [control=['if'], data=[]]
# Bias-Corrected Accelerated Method
elif method == 'bca':
# The value of the statistic function applied just to the actual data.
ostat = statfunction(*tdata)
# The bias correction value.
z0 = nppf(1.0 * np.sum(stat < ostat, axis=0) / n_samples)
# Statistics of the jackknife distribution
jackindexes = jackknife_indexes(tdata[0])
jstat = [statfunction(*(x[indexes] for x in tdata)) for indexes in jackindexes]
jmean = np.mean(jstat, axis=0)
# Temporarily kill numpy warnings:
oldnperr = np.seterr(invalid='ignore')
# Acceleration value
a = np.sum((jmean - jstat) ** 3, axis=0) / (6.0 * np.sum((jmean - jstat) ** 2, axis=0) ** 1.5)
if np.any(np.isnan(a)):
nanind = np.nonzero(np.isnan(a))
warnings.warn('BCa acceleration values for indexes {} were undefined. Statistic values were likely all equal. Affected CI will be inaccurate.'.format(nanind), InstabilityWarning, stacklevel=2) # depends on [control=['if'], data=[]]
zs = z0 + nppf(alphas).reshape(alphas.shape + (1,) * z0.ndim)
avals = ncdf(z0 + zs / (1 - a * zs))
np.seterr(**oldnperr) # depends on [control=['if'], data=[]]
else:
raise ValueError('Method {0} is not supported.'.format(method))
nvals = np.round((n_samples - 1) * avals)
oldnperr = np.seterr(invalid='ignore')
if np.any(np.isnan(nvals)):
warnings.warn('Some values were NaN; results are probably unstable ' + '(all values were probably equal)', InstabilityWarning, stacklevel=2) # depends on [control=['if'], data=[]]
if np.any(nvals == 0) or np.any(nvals == n_samples - 1):
warnings.warn('Some values used extremal samples; ' + 'results are probably unstable.', InstabilityWarning, stacklevel=2) # depends on [control=['if'], data=[]]
elif np.any(nvals < 10) or np.any(nvals >= n_samples - 10):
warnings.warn('Some values used top 10 low/high samples; ' + 'results may be unstable.', InstabilityWarning, stacklevel=2) # depends on [control=['if'], data=[]]
np.seterr(**oldnperr)
nvals = np.nan_to_num(nvals).astype('int')
if output == 'lowhigh':
if nvals.ndim == 1:
# All nvals are the same. Simple broadcasting
return stat[nvals] # depends on [control=['if'], data=[]]
else:
# Nvals are different for each data point. Not simple broadcasting.
# Each set of nvals along axis 0 corresponds to the data at the same
# point in other axes.
return stat[nvals, np.indices(nvals.shape)[1:].squeeze()] # depends on [control=['if'], data=[]]
elif output == 'errorbar':
if nvals.ndim == 1:
return abs(statfunction(data) - stat[nvals])[np.newaxis].T # depends on [control=['if'], data=[]]
else:
return abs(statfunction(data) - stat[nvals, np.indices(nvals.shape)[1:]])[np.newaxis].T # depends on [control=['if'], data=[]]
else:
raise ValueError('Output option {0} is not supported.'.format(output)) |
def update_metadata(self, href=None, metadata=None, version=None):
"""Update the metadata in a bundle.
'href' the relative href to the metadata. May not be None.
'metadata' may be None, or an object that can be converted to a
JSON string. See API documentation for restrictions. The
conversion will take place before the API call.
'version' the object version. May be None; if not None, must be
an integer, and the version must match the version of the
bundle. If not, a 409 conflict error will cause an APIException
to be thrown.
Returns a data structure equivalent to the JSON returned by the API.
If the response status is not 2xx, throws an APIException.
If the JSON to python data struct conversion fails, throws an
APIDataException."""
# Argument error checking.
assert href is not None
assert metadata is not None
assert version is None or isinstance(version, int)
# Prepare the data we're going to include in our bundle update.
data = None
fields = {}
if version is not None:
fields['version'] = version
fields['data'] = json.dumps(metadata)
data = fields
raw_result = self.put(href, data)
if raw_result.status < 200 or raw_result.status > 202:
raise APIException(raw_result.status, raw_result.json)
# Convert the JSON to a python data struct.
return self._parse_json(raw_result.json) | def function[update_metadata, parameter[self, href, metadata, version]]:
constant[Update the metadata in a bundle.
'href' the relative href to the metadata. May not be None.
'metadata' may be None, or an object that can be converted to a
JSON string. See API documentation for restrictions. The
conversion will take place before the API call.
'version' the object version. May be None; if not None, must be
an integer, and the version must match the version of the
bundle. If not, a 409 conflict error will cause an APIException
to be thrown.
Returns a data structure equivalent to the JSON returned by the API.
If the response status is not 2xx, throws an APIException.
If the JSON to python data struct conversion fails, throws an
APIDataException.]
assert[compare[name[href] is_not constant[None]]]
assert[compare[name[metadata] is_not constant[None]]]
assert[<ast.BoolOp object at 0x7da1b0852290>]
variable[data] assign[=] constant[None]
variable[fields] assign[=] dictionary[[], []]
if compare[name[version] is_not constant[None]] begin[:]
call[name[fields]][constant[version]] assign[=] name[version]
call[name[fields]][constant[data]] assign[=] call[name[json].dumps, parameter[name[metadata]]]
variable[data] assign[=] name[fields]
variable[raw_result] assign[=] call[name[self].put, parameter[name[href], name[data]]]
if <ast.BoolOp object at 0x7da1b0851e40> begin[:]
<ast.Raise object at 0x7da1b0852ad0>
return[call[name[self]._parse_json, parameter[name[raw_result].json]]] | keyword[def] identifier[update_metadata] ( identifier[self] , identifier[href] = keyword[None] , identifier[metadata] = keyword[None] , identifier[version] = keyword[None] ):
literal[string]
keyword[assert] identifier[href] keyword[is] keyword[not] keyword[None]
keyword[assert] identifier[metadata] keyword[is] keyword[not] keyword[None]
keyword[assert] identifier[version] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[version] , identifier[int] )
identifier[data] = keyword[None]
identifier[fields] ={}
keyword[if] identifier[version] keyword[is] keyword[not] keyword[None] :
identifier[fields] [ literal[string] ]= identifier[version]
identifier[fields] [ literal[string] ]= identifier[json] . identifier[dumps] ( identifier[metadata] )
identifier[data] = identifier[fields]
identifier[raw_result] = identifier[self] . identifier[put] ( identifier[href] , identifier[data] )
keyword[if] identifier[raw_result] . identifier[status] < literal[int] keyword[or] identifier[raw_result] . identifier[status] > literal[int] :
keyword[raise] identifier[APIException] ( identifier[raw_result] . identifier[status] , identifier[raw_result] . identifier[json] )
keyword[return] identifier[self] . identifier[_parse_json] ( identifier[raw_result] . identifier[json] ) | def update_metadata(self, href=None, metadata=None, version=None):
"""Update the metadata in a bundle.
'href' the relative href to the metadata. May not be None.
'metadata' may be None, or an object that can be converted to a
JSON string. See API documentation for restrictions. The
conversion will take place before the API call.
'version' the object version. May be None; if not None, must be
an integer, and the version must match the version of the
bundle. If not, a 409 conflict error will cause an APIException
to be thrown.
Returns a data structure equivalent to the JSON returned by the API.
If the response status is not 2xx, throws an APIException.
If the JSON to python data struct conversion fails, throws an
APIDataException."""
# Argument error checking.
assert href is not None
assert metadata is not None
assert version is None or isinstance(version, int)
# Prepare the data we're going to include in our bundle update.
data = None
fields = {}
if version is not None:
fields['version'] = version # depends on [control=['if'], data=['version']]
fields['data'] = json.dumps(metadata)
data = fields
raw_result = self.put(href, data)
if raw_result.status < 200 or raw_result.status > 202:
raise APIException(raw_result.status, raw_result.json) # depends on [control=['if'], data=[]]
# Convert the JSON to a python data struct.
return self._parse_json(raw_result.json) |
def almost_eq(arr1, arr2, thresh=1E-11, ret_error=False):
""" checks if floating point number are equal to a threshold
"""
error = np.abs(arr1 - arr2)
passed = error < thresh
if ret_error:
return passed, error
return passed | def function[almost_eq, parameter[arr1, arr2, thresh, ret_error]]:
constant[ checks if floating point number are equal to a threshold
]
variable[error] assign[=] call[name[np].abs, parameter[binary_operation[name[arr1] - name[arr2]]]]
variable[passed] assign[=] compare[name[error] less[<] name[thresh]]
if name[ret_error] begin[:]
return[tuple[[<ast.Name object at 0x7da1b235bfd0>, <ast.Name object at 0x7da1b2358850>]]]
return[name[passed]] | keyword[def] identifier[almost_eq] ( identifier[arr1] , identifier[arr2] , identifier[thresh] = literal[int] , identifier[ret_error] = keyword[False] ):
literal[string]
identifier[error] = identifier[np] . identifier[abs] ( identifier[arr1] - identifier[arr2] )
identifier[passed] = identifier[error] < identifier[thresh]
keyword[if] identifier[ret_error] :
keyword[return] identifier[passed] , identifier[error]
keyword[return] identifier[passed] | def almost_eq(arr1, arr2, thresh=1e-11, ret_error=False):
""" checks if floating point number are equal to a threshold
"""
error = np.abs(arr1 - arr2)
passed = error < thresh
if ret_error:
return (passed, error) # depends on [control=['if'], data=[]]
return passed |
def _reinit_daq_daemons(sender, instance, **kwargs):
"""
update the daq daemon configuration when changes be applied in the models
"""
if type(instance) is OneWireDevice:
post_save.send_robust(sender=Device, instance=instance.onewire_device)
elif type(instance) is OneWireVariable:
post_save.send_robust(sender=Variable, instance=instance.onewire_variable)
elif type(instance) is ExtendedOneWireVariable:
post_save.send_robust(sender=Variable, instance=Variable.objects.get(pk=instance.pk))
elif type(instance) is ExtendedOneWireDevice:
post_save.send_robust(sender=Device, instance=Device.objects.get(pk=instance.pk)) | def function[_reinit_daq_daemons, parameter[sender, instance]]:
constant[
update the daq daemon configuration when changes be applied in the models
]
if compare[call[name[type], parameter[name[instance]]] is name[OneWireDevice]] begin[:]
call[name[post_save].send_robust, parameter[]] | keyword[def] identifier[_reinit_daq_daemons] ( identifier[sender] , identifier[instance] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[type] ( identifier[instance] ) keyword[is] identifier[OneWireDevice] :
identifier[post_save] . identifier[send_robust] ( identifier[sender] = identifier[Device] , identifier[instance] = identifier[instance] . identifier[onewire_device] )
keyword[elif] identifier[type] ( identifier[instance] ) keyword[is] identifier[OneWireVariable] :
identifier[post_save] . identifier[send_robust] ( identifier[sender] = identifier[Variable] , identifier[instance] = identifier[instance] . identifier[onewire_variable] )
keyword[elif] identifier[type] ( identifier[instance] ) keyword[is] identifier[ExtendedOneWireVariable] :
identifier[post_save] . identifier[send_robust] ( identifier[sender] = identifier[Variable] , identifier[instance] = identifier[Variable] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[instance] . identifier[pk] ))
keyword[elif] identifier[type] ( identifier[instance] ) keyword[is] identifier[ExtendedOneWireDevice] :
identifier[post_save] . identifier[send_robust] ( identifier[sender] = identifier[Device] , identifier[instance] = identifier[Device] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[instance] . identifier[pk] )) | def _reinit_daq_daemons(sender, instance, **kwargs):
"""
update the daq daemon configuration when changes be applied in the models
"""
if type(instance) is OneWireDevice:
post_save.send_robust(sender=Device, instance=instance.onewire_device) # depends on [control=['if'], data=[]]
elif type(instance) is OneWireVariable:
post_save.send_robust(sender=Variable, instance=instance.onewire_variable) # depends on [control=['if'], data=[]]
elif type(instance) is ExtendedOneWireVariable:
post_save.send_robust(sender=Variable, instance=Variable.objects.get(pk=instance.pk)) # depends on [control=['if'], data=[]]
elif type(instance) is ExtendedOneWireDevice:
post_save.send_robust(sender=Device, instance=Device.objects.get(pk=instance.pk)) # depends on [control=['if'], data=[]] |
def get_context_data(self, **kwargs):
"""Allow adding a 'render_description' parameter"""
context = super(ScheduleXmlView, self).get_context_data(**kwargs)
if self.request.GET.get('render_description', None) == '1':
context['render_description'] = True
else:
context['render_description'] = False
return context | def function[get_context_data, parameter[self]]:
constant[Allow adding a 'render_description' parameter]
variable[context] assign[=] call[call[name[super], parameter[name[ScheduleXmlView], name[self]]].get_context_data, parameter[]]
if compare[call[name[self].request.GET.get, parameter[constant[render_description], constant[None]]] equal[==] constant[1]] begin[:]
call[name[context]][constant[render_description]] assign[=] constant[True]
return[name[context]] | keyword[def] identifier[get_context_data] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[context] = identifier[super] ( identifier[ScheduleXmlView] , identifier[self] ). identifier[get_context_data] (** identifier[kwargs] )
keyword[if] identifier[self] . identifier[request] . identifier[GET] . identifier[get] ( literal[string] , keyword[None] )== literal[string] :
identifier[context] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[context] [ literal[string] ]= keyword[False]
keyword[return] identifier[context] | def get_context_data(self, **kwargs):
"""Allow adding a 'render_description' parameter"""
context = super(ScheduleXmlView, self).get_context_data(**kwargs)
if self.request.GET.get('render_description', None) == '1':
context['render_description'] = True # depends on [control=['if'], data=[]]
else:
context['render_description'] = False
return context |
def set_idlesleep(self, idlesleep):
"""
Sets CPU idle sleep time value.
:param idlesleep: idle sleep value (integer)
"""
is_running = yield from self.is_running()
if is_running: # router is running
yield from self._hypervisor.send('vm set_idle_sleep_time "{name}" 0 {idlesleep}'.format(name=self._name,
idlesleep=idlesleep))
log.info('Router "{name}" [{id}]: idlesleep updated from {old_idlesleep} to {new_idlesleep}'.format(name=self._name,
id=self._id,
old_idlesleep=self._idlesleep,
new_idlesleep=idlesleep))
self._idlesleep = idlesleep | def function[set_idlesleep, parameter[self, idlesleep]]:
constant[
Sets CPU idle sleep time value.
:param idlesleep: idle sleep value (integer)
]
variable[is_running] assign[=] <ast.YieldFrom object at 0x7da20e955570>
if name[is_running] begin[:]
<ast.YieldFrom object at 0x7da20e9577c0>
call[name[log].info, parameter[call[constant[Router "{name}" [{id}]: idlesleep updated from {old_idlesleep} to {new_idlesleep}].format, parameter[]]]]
name[self]._idlesleep assign[=] name[idlesleep] | keyword[def] identifier[set_idlesleep] ( identifier[self] , identifier[idlesleep] ):
literal[string]
identifier[is_running] = keyword[yield] keyword[from] identifier[self] . identifier[is_running] ()
keyword[if] identifier[is_running] :
keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] ,
identifier[idlesleep] = identifier[idlesleep] ))
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] ,
identifier[id] = identifier[self] . identifier[_id] ,
identifier[old_idlesleep] = identifier[self] . identifier[_idlesleep] ,
identifier[new_idlesleep] = identifier[idlesleep] ))
identifier[self] . identifier[_idlesleep] = identifier[idlesleep] | def set_idlesleep(self, idlesleep):
"""
Sets CPU idle sleep time value.
:param idlesleep: idle sleep value (integer)
"""
is_running = (yield from self.is_running())
if is_running: # router is running
yield from self._hypervisor.send('vm set_idle_sleep_time "{name}" 0 {idlesleep}'.format(name=self._name, idlesleep=idlesleep)) # depends on [control=['if'], data=[]]
log.info('Router "{name}" [{id}]: idlesleep updated from {old_idlesleep} to {new_idlesleep}'.format(name=self._name, id=self._id, old_idlesleep=self._idlesleep, new_idlesleep=idlesleep))
self._idlesleep = idlesleep |
def find_contours(x, level=0.8, fully_connected='low', positive_orientation='low'):
"""Find iso-valued contours in a 2D array for a given level value, returns list of (n, 2)-ndarrays
see `skimage.measure.find_contours <http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.find_contours>`__.
Parameters
------------
x : 2D ndarray of double.
Input data in which to find contours.
level : float
Value along which to find contours in the array.
fully_connected : str
Either `low` or `high`. Indicates whether array elements below the given level value are to be considered fully-connected (and hence elements above the value will only be face connected), or vice-versa. (See notes below for details.)
positive_orientation : str
Either `low` or `high`. Indicates whether the output contours will produce positively-oriented polygons around islands of low- or high-valued elements. If `low` then contours will wind counter-clockwise around elements below the iso-value. Alternately, this means that low-valued elements are always on the left of the contour.
Returns
--------
list of (n,2)-ndarrays
Each contour is an ndarray of shape (n, 2), consisting of n (row, column) coordinates along the contour.
"""
return skimage.measure.find_contours(
x, level, fully_connected=fully_connected, positive_orientation=positive_orientation
) | def function[find_contours, parameter[x, level, fully_connected, positive_orientation]]:
constant[Find iso-valued contours in a 2D array for a given level value, returns list of (n, 2)-ndarrays
see `skimage.measure.find_contours <http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.find_contours>`__.
Parameters
------------
x : 2D ndarray of double.
Input data in which to find contours.
level : float
Value along which to find contours in the array.
fully_connected : str
Either `low` or `high`. Indicates whether array elements below the given level value are to be considered fully-connected (and hence elements above the value will only be face connected), or vice-versa. (See notes below for details.)
positive_orientation : str
Either `low` or `high`. Indicates whether the output contours will produce positively-oriented polygons around islands of low- or high-valued elements. If `low` then contours will wind counter-clockwise around elements below the iso-value. Alternately, this means that low-valued elements are always on the left of the contour.
Returns
--------
list of (n,2)-ndarrays
Each contour is an ndarray of shape (n, 2), consisting of n (row, column) coordinates along the contour.
]
return[call[name[skimage].measure.find_contours, parameter[name[x], name[level]]]] | keyword[def] identifier[find_contours] ( identifier[x] , identifier[level] = literal[int] , identifier[fully_connected] = literal[string] , identifier[positive_orientation] = literal[string] ):
literal[string]
keyword[return] identifier[skimage] . identifier[measure] . identifier[find_contours] (
identifier[x] , identifier[level] , identifier[fully_connected] = identifier[fully_connected] , identifier[positive_orientation] = identifier[positive_orientation]
) | def find_contours(x, level=0.8, fully_connected='low', positive_orientation='low'):
"""Find iso-valued contours in a 2D array for a given level value, returns list of (n, 2)-ndarrays
see `skimage.measure.find_contours <http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.find_contours>`__.
Parameters
------------
x : 2D ndarray of double.
Input data in which to find contours.
level : float
Value along which to find contours in the array.
fully_connected : str
Either `low` or `high`. Indicates whether array elements below the given level value are to be considered fully-connected (and hence elements above the value will only be face connected), or vice-versa. (See notes below for details.)
positive_orientation : str
Either `low` or `high`. Indicates whether the output contours will produce positively-oriented polygons around islands of low- or high-valued elements. If `low` then contours will wind counter-clockwise around elements below the iso-value. Alternately, this means that low-valued elements are always on the left of the contour.
Returns
--------
list of (n,2)-ndarrays
Each contour is an ndarray of shape (n, 2), consisting of n (row, column) coordinates along the contour.
"""
return skimage.measure.find_contours(x, level, fully_connected=fully_connected, positive_orientation=positive_orientation) |
def note(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.note was expecting a unicode template")
Log._annotate(
LogItem(
context=exceptions.NOTE,
format=template,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth+1
) | def function[note, parameter[cls, template, default_params, stack_depth, log_context]]:
constant[
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
]
variable[timestamp] assign[=] call[name[datetime].utcnow, parameter[]]
if <ast.UnaryOp object at 0x7da18f00cca0> begin[:]
call[name[Log].error, parameter[constant[Log.note was expecting a unicode template]]]
call[name[Log]._annotate, parameter[call[name[LogItem], parameter[]], name[timestamp], binary_operation[name[stack_depth] + constant[1]]]] | keyword[def] identifier[note] (
identifier[cls] ,
identifier[template] ,
identifier[default_params] ={},
identifier[stack_depth] = literal[int] ,
identifier[log_context] = keyword[None] ,
** identifier[more_params]
):
literal[string]
identifier[timestamp] = identifier[datetime] . identifier[utcnow] ()
keyword[if] keyword[not] identifier[is_text] ( identifier[template] ):
identifier[Log] . identifier[error] ( literal[string] )
identifier[Log] . identifier[_annotate] (
identifier[LogItem] (
identifier[context] = identifier[exceptions] . identifier[NOTE] ,
identifier[format] = identifier[template] ,
identifier[template] = identifier[template] ,
identifier[params] = identifier[dict] ( identifier[default_params] ,** identifier[more_params] )
),
identifier[timestamp] ,
identifier[stack_depth] + literal[int]
) | def note(cls, template, default_params={}, stack_depth=0, log_context=None, **more_params):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error('Log.note was expecting a unicode template') # depends on [control=['if'], data=[]]
Log._annotate(LogItem(context=exceptions.NOTE, format=template, template=template, params=dict(default_params, **more_params)), timestamp, stack_depth + 1) |
def _load_module(self, name: YangIdentifier,
rev: RevisionDate) -> Statement:
"""Read and parse a YANG module or submodule."""
for d in self.module_search_path:
run = 0
while run < 2:
fn = f"{d}/{name}"
if rev and run == 0:
fn += "@" + rev
fn += ".yang"
try:
with open(fn, encoding='utf-8') as infile:
res = ModuleParser(infile.read(), name, rev).parse()
except (FileNotFoundError, PermissionError, ModuleContentMismatch):
run += 1
continue
return res
raise ModuleNotFound(name, rev) | def function[_load_module, parameter[self, name, rev]]:
constant[Read and parse a YANG module or submodule.]
for taget[name[d]] in starred[name[self].module_search_path] begin[:]
variable[run] assign[=] constant[0]
while compare[name[run] less[<] constant[2]] begin[:]
variable[fn] assign[=] <ast.JoinedStr object at 0x7da1b02e6cb0>
if <ast.BoolOp object at 0x7da1b04d52a0> begin[:]
<ast.AugAssign object at 0x7da1b04d7f10>
<ast.AugAssign object at 0x7da1b04d62c0>
<ast.Try object at 0x7da1b04d6d10>
return[name[res]]
<ast.Raise object at 0x7da1b04d7a60> | keyword[def] identifier[_load_module] ( identifier[self] , identifier[name] : identifier[YangIdentifier] ,
identifier[rev] : identifier[RevisionDate] )-> identifier[Statement] :
literal[string]
keyword[for] identifier[d] keyword[in] identifier[self] . identifier[module_search_path] :
identifier[run] = literal[int]
keyword[while] identifier[run] < literal[int] :
identifier[fn] = literal[string]
keyword[if] identifier[rev] keyword[and] identifier[run] == literal[int] :
identifier[fn] += literal[string] + identifier[rev]
identifier[fn] += literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[fn] , identifier[encoding] = literal[string] ) keyword[as] identifier[infile] :
identifier[res] = identifier[ModuleParser] ( identifier[infile] . identifier[read] (), identifier[name] , identifier[rev] ). identifier[parse] ()
keyword[except] ( identifier[FileNotFoundError] , identifier[PermissionError] , identifier[ModuleContentMismatch] ):
identifier[run] += literal[int]
keyword[continue]
keyword[return] identifier[res]
keyword[raise] identifier[ModuleNotFound] ( identifier[name] , identifier[rev] ) | def _load_module(self, name: YangIdentifier, rev: RevisionDate) -> Statement:
"""Read and parse a YANG module or submodule."""
for d in self.module_search_path:
run = 0
while run < 2:
fn = f'{d}/{name}'
if rev and run == 0:
fn += '@' + rev # depends on [control=['if'], data=[]]
fn += '.yang'
try:
with open(fn, encoding='utf-8') as infile:
res = ModuleParser(infile.read(), name, rev).parse() # depends on [control=['with'], data=['infile']] # depends on [control=['try'], data=[]]
except (FileNotFoundError, PermissionError, ModuleContentMismatch):
run += 1
continue # depends on [control=['except'], data=[]]
return res # depends on [control=['while'], data=['run']] # depends on [control=['for'], data=['d']]
raise ModuleNotFound(name, rev) |
def scp(cls, vm_id, login, identity, local_file, remote_file):
"""Copy file to remote VM."""
cmd = ['scp']
if identity:
cmd.extend(('-i', identity,))
version, ip_addr = cls.vm_ip(vm_id)
if version == 6:
ip_addr = '[%s]' % ip_addr
cmd.extend((local_file, '%s@%s:%s' %
(login, ip_addr, remote_file),))
cls.echo('Running %s' % ' '.join(cmd))
for _ in range(5):
ret = cls.execute(cmd, False)
if ret:
break
time.sleep(.5)
return ret | def function[scp, parameter[cls, vm_id, login, identity, local_file, remote_file]]:
constant[Copy file to remote VM.]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18eb57940>]]
if name[identity] begin[:]
call[name[cmd].extend, parameter[tuple[[<ast.Constant object at 0x7da18eb54460>, <ast.Name object at 0x7da18eb54190>]]]]
<ast.Tuple object at 0x7da18eb553f0> assign[=] call[name[cls].vm_ip, parameter[name[vm_id]]]
if compare[name[version] equal[==] constant[6]] begin[:]
variable[ip_addr] assign[=] binary_operation[constant[[%s]] <ast.Mod object at 0x7da2590d6920> name[ip_addr]]
call[name[cmd].extend, parameter[tuple[[<ast.Name object at 0x7da18eb54eb0>, <ast.BinOp object at 0x7da18eb54220>]]]]
call[name[cls].echo, parameter[binary_operation[constant[Running %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[cmd]]]]]]
for taget[name[_]] in starred[call[name[range], parameter[constant[5]]]] begin[:]
variable[ret] assign[=] call[name[cls].execute, parameter[name[cmd], constant[False]]]
if name[ret] begin[:]
break
call[name[time].sleep, parameter[constant[0.5]]]
return[name[ret]] | keyword[def] identifier[scp] ( identifier[cls] , identifier[vm_id] , identifier[login] , identifier[identity] , identifier[local_file] , identifier[remote_file] ):
literal[string]
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[identity] :
identifier[cmd] . identifier[extend] (( literal[string] , identifier[identity] ,))
identifier[version] , identifier[ip_addr] = identifier[cls] . identifier[vm_ip] ( identifier[vm_id] )
keyword[if] identifier[version] == literal[int] :
identifier[ip_addr] = literal[string] % identifier[ip_addr]
identifier[cmd] . identifier[extend] (( identifier[local_file] , literal[string] %
( identifier[login] , identifier[ip_addr] , identifier[remote_file] ),))
identifier[cls] . identifier[echo] ( literal[string] % literal[string] . identifier[join] ( identifier[cmd] ))
keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] ):
identifier[ret] = identifier[cls] . identifier[execute] ( identifier[cmd] , keyword[False] )
keyword[if] identifier[ret] :
keyword[break]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[return] identifier[ret] | def scp(cls, vm_id, login, identity, local_file, remote_file):
"""Copy file to remote VM."""
cmd = ['scp']
if identity:
cmd.extend(('-i', identity)) # depends on [control=['if'], data=[]]
(version, ip_addr) = cls.vm_ip(vm_id)
if version == 6:
ip_addr = '[%s]' % ip_addr # depends on [control=['if'], data=[]]
cmd.extend((local_file, '%s@%s:%s' % (login, ip_addr, remote_file)))
cls.echo('Running %s' % ' '.join(cmd))
for _ in range(5):
ret = cls.execute(cmd, False)
if ret:
break # depends on [control=['if'], data=[]]
time.sleep(0.5) # depends on [control=['for'], data=[]]
return ret |
def re_install_net_ctrl_paths(self, vrf_table):
"""Re-installs paths from NC with current BGP policy.
Iterates over known paths from NC installed in `vrf4_table` and
adds new path with path attributes as per current VRF configuration.
"""
assert vrf_table
for dest in vrf_table.values():
for path in dest.known_path_list:
if path.source is None:
vrf_table.insert_vrf_path(
nlri=path.nlri,
next_hop=path.nexthop,
gen_lbl=True
)
LOG.debug('Re-installed NC paths with current policy for table %s.',
vrf_table) | def function[re_install_net_ctrl_paths, parameter[self, vrf_table]]:
constant[Re-installs paths from NC with current BGP policy.
Iterates over known paths from NC installed in `vrf4_table` and
adds new path with path attributes as per current VRF configuration.
]
assert[name[vrf_table]]
for taget[name[dest]] in starred[call[name[vrf_table].values, parameter[]]] begin[:]
for taget[name[path]] in starred[name[dest].known_path_list] begin[:]
if compare[name[path].source is constant[None]] begin[:]
call[name[vrf_table].insert_vrf_path, parameter[]]
call[name[LOG].debug, parameter[constant[Re-installed NC paths with current policy for table %s.], name[vrf_table]]] | keyword[def] identifier[re_install_net_ctrl_paths] ( identifier[self] , identifier[vrf_table] ):
literal[string]
keyword[assert] identifier[vrf_table]
keyword[for] identifier[dest] keyword[in] identifier[vrf_table] . identifier[values] ():
keyword[for] identifier[path] keyword[in] identifier[dest] . identifier[known_path_list] :
keyword[if] identifier[path] . identifier[source] keyword[is] keyword[None] :
identifier[vrf_table] . identifier[insert_vrf_path] (
identifier[nlri] = identifier[path] . identifier[nlri] ,
identifier[next_hop] = identifier[path] . identifier[nexthop] ,
identifier[gen_lbl] = keyword[True]
)
identifier[LOG] . identifier[debug] ( literal[string] ,
identifier[vrf_table] ) | def re_install_net_ctrl_paths(self, vrf_table):
"""Re-installs paths from NC with current BGP policy.
Iterates over known paths from NC installed in `vrf4_table` and
adds new path with path attributes as per current VRF configuration.
"""
assert vrf_table
for dest in vrf_table.values():
for path in dest.known_path_list:
if path.source is None:
vrf_table.insert_vrf_path(nlri=path.nlri, next_hop=path.nexthop, gen_lbl=True) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] # depends on [control=['for'], data=['dest']]
LOG.debug('Re-installed NC paths with current policy for table %s.', vrf_table) |
def rate_limit_sleeper(api, response):
"""
Pauses the execution if rate limit is breached.
:param api: Api instance.
:param response: requests.Response object
"""
while response.status_code == 429:
headers = response.headers
remaining_time = headers.get('X-RateLimit-Reset')
sleep = int(remaining_time) - int(time.time())
logger.warning('Rate limit reached! Waiting for [%s]s', sleep)
time.sleep(sleep + 5)
response = api.session.send(response.request)
return response | def function[rate_limit_sleeper, parameter[api, response]]:
constant[
Pauses the execution if rate limit is breached.
:param api: Api instance.
:param response: requests.Response object
]
while compare[name[response].status_code equal[==] constant[429]] begin[:]
variable[headers] assign[=] name[response].headers
variable[remaining_time] assign[=] call[name[headers].get, parameter[constant[X-RateLimit-Reset]]]
variable[sleep] assign[=] binary_operation[call[name[int], parameter[name[remaining_time]]] - call[name[int], parameter[call[name[time].time, parameter[]]]]]
call[name[logger].warning, parameter[constant[Rate limit reached! Waiting for [%s]s], name[sleep]]]
call[name[time].sleep, parameter[binary_operation[name[sleep] + constant[5]]]]
variable[response] assign[=] call[name[api].session.send, parameter[name[response].request]]
return[name[response]] | keyword[def] identifier[rate_limit_sleeper] ( identifier[api] , identifier[response] ):
literal[string]
keyword[while] identifier[response] . identifier[status_code] == literal[int] :
identifier[headers] = identifier[response] . identifier[headers]
identifier[remaining_time] = identifier[headers] . identifier[get] ( literal[string] )
identifier[sleep] = identifier[int] ( identifier[remaining_time] )- identifier[int] ( identifier[time] . identifier[time] ())
identifier[logger] . identifier[warning] ( literal[string] , identifier[sleep] )
identifier[time] . identifier[sleep] ( identifier[sleep] + literal[int] )
identifier[response] = identifier[api] . identifier[session] . identifier[send] ( identifier[response] . identifier[request] )
keyword[return] identifier[response] | def rate_limit_sleeper(api, response):
"""
Pauses the execution if rate limit is breached.
:param api: Api instance.
:param response: requests.Response object
"""
while response.status_code == 429:
headers = response.headers
remaining_time = headers.get('X-RateLimit-Reset')
sleep = int(remaining_time) - int(time.time())
logger.warning('Rate limit reached! Waiting for [%s]s', sleep)
time.sleep(sleep + 5)
response = api.session.send(response.request) # depends on [control=['while'], data=[]]
return response |
def add_assigned_resource(self, resource_type: str,
value: Union[str, int, float, bool],
parameters: dict = None):
"""Add assigned resource to the processing block.
Args:
resource_type (str): Resource type
value: Resource value
parameters (dict, optional): Parameters specific to the resource
"""
if parameters is None:
parameters = dict()
resources = DB.get_hash_value(self.key, 'resources_assigned')
resources = ast.literal_eval(resources)
resources.append(dict(type=resource_type, value=value,
parameters=parameters))
DB.set_hash_value(self.key, 'resources_assigned', resources) | def function[add_assigned_resource, parameter[self, resource_type, value, parameters]]:
constant[Add assigned resource to the processing block.
Args:
resource_type (str): Resource type
value: Resource value
parameters (dict, optional): Parameters specific to the resource
]
if compare[name[parameters] is constant[None]] begin[:]
variable[parameters] assign[=] call[name[dict], parameter[]]
variable[resources] assign[=] call[name[DB].get_hash_value, parameter[name[self].key, constant[resources_assigned]]]
variable[resources] assign[=] call[name[ast].literal_eval, parameter[name[resources]]]
call[name[resources].append, parameter[call[name[dict], parameter[]]]]
call[name[DB].set_hash_value, parameter[name[self].key, constant[resources_assigned], name[resources]]] | keyword[def] identifier[add_assigned_resource] ( identifier[self] , identifier[resource_type] : identifier[str] ,
identifier[value] : identifier[Union] [ identifier[str] , identifier[int] , identifier[float] , identifier[bool] ],
identifier[parameters] : identifier[dict] = keyword[None] ):
literal[string]
keyword[if] identifier[parameters] keyword[is] keyword[None] :
identifier[parameters] = identifier[dict] ()
identifier[resources] = identifier[DB] . identifier[get_hash_value] ( identifier[self] . identifier[key] , literal[string] )
identifier[resources] = identifier[ast] . identifier[literal_eval] ( identifier[resources] )
identifier[resources] . identifier[append] ( identifier[dict] ( identifier[type] = identifier[resource_type] , identifier[value] = identifier[value] ,
identifier[parameters] = identifier[parameters] ))
identifier[DB] . identifier[set_hash_value] ( identifier[self] . identifier[key] , literal[string] , identifier[resources] ) | def add_assigned_resource(self, resource_type: str, value: Union[str, int, float, bool], parameters: dict=None):
"""Add assigned resource to the processing block.
Args:
resource_type (str): Resource type
value: Resource value
parameters (dict, optional): Parameters specific to the resource
"""
if parameters is None:
parameters = dict() # depends on [control=['if'], data=['parameters']]
resources = DB.get_hash_value(self.key, 'resources_assigned')
resources = ast.literal_eval(resources)
resources.append(dict(type=resource_type, value=value, parameters=parameters))
DB.set_hash_value(self.key, 'resources_assigned', resources) |
def get_owlsim_stats(url) -> Tuple[IcStatistic, Dict[str, IcStatistic]]:
"""
:return Tuple[IcStatistic, Dict[str, IcStatistic]]
:raises JSONDecodeError: If the response body does not contain valid json
"""
scigraph = OntologyFactory().create('scigraph:ontology')
category_stats = {}
categories = [enum.value for enum in HpoUpperLevel]
sim_response = get_attribute_information_profile(url, categories=tuple(categories))
try:
global_stats = IcStatistic(
mean_mean_ic=float(sim_response['system_stats']['meanMeanIC']),
mean_sum_ic=float(sim_response['system_stats']['meanSumIC']),
mean_cls=float(sim_response['system_stats']['meanN']),
max_max_ic=float(sim_response['system_stats']['maxMaxIC']),
max_sum_ic=float(sim_response['system_stats']['maxSumIC']),
individual_count=int(sim_response['system_stats']['individuals']),
mean_max_ic=float(sim_response['system_stats']['meanMaxIC'])
)
for cat_stat in sim_response['categorical_scores']:
category_stats[cat_stat['id']] = IcStatistic(
mean_mean_ic=float(cat_stat['system_stats']['meanMeanIC']),
mean_sum_ic=float(cat_stat['system_stats']['meanSumIC']),
mean_cls=float(cat_stat['system_stats']['meanN']),
max_max_ic=float(cat_stat['system_stats']['maxMaxIC']),
max_sum_ic=float(cat_stat['system_stats']['maxSumIC']),
individual_count=int(cat_stat['system_stats']['individuals']),
mean_max_ic=float(cat_stat['system_stats']['meanMaxIC']),
descendants=scigraph.descendants(cat_stat['id'], relations=["subClassOf"])
)
except JSONDecodeError as json_exc:
raise JSONDecodeError(
"Cannot parse owlsim2 response: {}".format(json_exc.msg),
json_exc.doc,
json_exc.pos
)
return global_stats, category_stats | def function[get_owlsim_stats, parameter[url]]:
constant[
:return Tuple[IcStatistic, Dict[str, IcStatistic]]
:raises JSONDecodeError: If the response body does not contain valid json
]
variable[scigraph] assign[=] call[call[name[OntologyFactory], parameter[]].create, parameter[constant[scigraph:ontology]]]
variable[category_stats] assign[=] dictionary[[], []]
variable[categories] assign[=] <ast.ListComp object at 0x7da1b07322c0>
variable[sim_response] assign[=] call[name[get_attribute_information_profile], parameter[name[url]]]
<ast.Try object at 0x7da1b0732e30>
return[tuple[[<ast.Name object at 0x7da1b08b9e40>, <ast.Name object at 0x7da1b08b9e10>]]] | keyword[def] identifier[get_owlsim_stats] ( identifier[url] )-> identifier[Tuple] [ identifier[IcStatistic] , identifier[Dict] [ identifier[str] , identifier[IcStatistic] ]]:
literal[string]
identifier[scigraph] = identifier[OntologyFactory] (). identifier[create] ( literal[string] )
identifier[category_stats] ={}
identifier[categories] =[ identifier[enum] . identifier[value] keyword[for] identifier[enum] keyword[in] identifier[HpoUpperLevel] ]
identifier[sim_response] = identifier[get_attribute_information_profile] ( identifier[url] , identifier[categories] = identifier[tuple] ( identifier[categories] ))
keyword[try] :
identifier[global_stats] = identifier[IcStatistic] (
identifier[mean_mean_ic] = identifier[float] ( identifier[sim_response] [ literal[string] ][ literal[string] ]),
identifier[mean_sum_ic] = identifier[float] ( identifier[sim_response] [ literal[string] ][ literal[string] ]),
identifier[mean_cls] = identifier[float] ( identifier[sim_response] [ literal[string] ][ literal[string] ]),
identifier[max_max_ic] = identifier[float] ( identifier[sim_response] [ literal[string] ][ literal[string] ]),
identifier[max_sum_ic] = identifier[float] ( identifier[sim_response] [ literal[string] ][ literal[string] ]),
identifier[individual_count] = identifier[int] ( identifier[sim_response] [ literal[string] ][ literal[string] ]),
identifier[mean_max_ic] = identifier[float] ( identifier[sim_response] [ literal[string] ][ literal[string] ])
)
keyword[for] identifier[cat_stat] keyword[in] identifier[sim_response] [ literal[string] ]:
identifier[category_stats] [ identifier[cat_stat] [ literal[string] ]]= identifier[IcStatistic] (
identifier[mean_mean_ic] = identifier[float] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[mean_sum_ic] = identifier[float] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[mean_cls] = identifier[float] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[max_max_ic] = identifier[float] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[max_sum_ic] = identifier[float] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[individual_count] = identifier[int] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[mean_max_ic] = identifier[float] ( identifier[cat_stat] [ literal[string] ][ literal[string] ]),
identifier[descendants] = identifier[scigraph] . identifier[descendants] ( identifier[cat_stat] [ literal[string] ], identifier[relations] =[ literal[string] ])
)
keyword[except] identifier[JSONDecodeError] keyword[as] identifier[json_exc] :
keyword[raise] identifier[JSONDecodeError] (
literal[string] . identifier[format] ( identifier[json_exc] . identifier[msg] ),
identifier[json_exc] . identifier[doc] ,
identifier[json_exc] . identifier[pos]
)
keyword[return] identifier[global_stats] , identifier[category_stats] | def get_owlsim_stats(url) -> Tuple[IcStatistic, Dict[str, IcStatistic]]:
"""
:return Tuple[IcStatistic, Dict[str, IcStatistic]]
:raises JSONDecodeError: If the response body does not contain valid json
"""
scigraph = OntologyFactory().create('scigraph:ontology')
category_stats = {}
categories = [enum.value for enum in HpoUpperLevel]
sim_response = get_attribute_information_profile(url, categories=tuple(categories))
try:
global_stats = IcStatistic(mean_mean_ic=float(sim_response['system_stats']['meanMeanIC']), mean_sum_ic=float(sim_response['system_stats']['meanSumIC']), mean_cls=float(sim_response['system_stats']['meanN']), max_max_ic=float(sim_response['system_stats']['maxMaxIC']), max_sum_ic=float(sim_response['system_stats']['maxSumIC']), individual_count=int(sim_response['system_stats']['individuals']), mean_max_ic=float(sim_response['system_stats']['meanMaxIC']))
for cat_stat in sim_response['categorical_scores']:
category_stats[cat_stat['id']] = IcStatistic(mean_mean_ic=float(cat_stat['system_stats']['meanMeanIC']), mean_sum_ic=float(cat_stat['system_stats']['meanSumIC']), mean_cls=float(cat_stat['system_stats']['meanN']), max_max_ic=float(cat_stat['system_stats']['maxMaxIC']), max_sum_ic=float(cat_stat['system_stats']['maxSumIC']), individual_count=int(cat_stat['system_stats']['individuals']), mean_max_ic=float(cat_stat['system_stats']['meanMaxIC']), descendants=scigraph.descendants(cat_stat['id'], relations=['subClassOf'])) # depends on [control=['for'], data=['cat_stat']] # depends on [control=['try'], data=[]]
except JSONDecodeError as json_exc:
raise JSONDecodeError('Cannot parse owlsim2 response: {}'.format(json_exc.msg), json_exc.doc, json_exc.pos) # depends on [control=['except'], data=['json_exc']]
return (global_stats, category_stats) |
def populate_settings_dir(force: bool = False) -> bool:
"""
Populate settings directory with default settings files
Args:
force: if ``True``, replace existing settings files with default ones
Returns:
``True`` if any files were copied and ``False`` otherwise
"""
res = False
if _default_settings_path == _settings_path:
return res
for src in list(_default_settings_path.glob('**/*.json')):
dest = _settings_path / src.relative_to(_default_settings_path)
if not force and dest.exists():
continue
res = True
dest.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(src, dest)
return res | def function[populate_settings_dir, parameter[force]]:
constant[
Populate settings directory with default settings files
Args:
force: if ``True``, replace existing settings files with default ones
Returns:
``True`` if any files were copied and ``False`` otherwise
]
variable[res] assign[=] constant[False]
if compare[name[_default_settings_path] equal[==] name[_settings_path]] begin[:]
return[name[res]]
for taget[name[src]] in starred[call[name[list], parameter[call[name[_default_settings_path].glob, parameter[constant[**/*.json]]]]]] begin[:]
variable[dest] assign[=] binary_operation[name[_settings_path] / call[name[src].relative_to, parameter[name[_default_settings_path]]]]
if <ast.BoolOp object at 0x7da20c76f310> begin[:]
continue
variable[res] assign[=] constant[True]
call[name[dest].parent.mkdir, parameter[]]
call[name[shutil].copy, parameter[name[src], name[dest]]]
return[name[res]] | keyword[def] identifier[populate_settings_dir] ( identifier[force] : identifier[bool] = keyword[False] )-> identifier[bool] :
literal[string]
identifier[res] = keyword[False]
keyword[if] identifier[_default_settings_path] == identifier[_settings_path] :
keyword[return] identifier[res]
keyword[for] identifier[src] keyword[in] identifier[list] ( identifier[_default_settings_path] . identifier[glob] ( literal[string] )):
identifier[dest] = identifier[_settings_path] / identifier[src] . identifier[relative_to] ( identifier[_default_settings_path] )
keyword[if] keyword[not] identifier[force] keyword[and] identifier[dest] . identifier[exists] ():
keyword[continue]
identifier[res] = keyword[True]
identifier[dest] . identifier[parent] . identifier[mkdir] ( identifier[parents] = keyword[True] , identifier[exist_ok] = keyword[True] )
identifier[shutil] . identifier[copy] ( identifier[src] , identifier[dest] )
keyword[return] identifier[res] | def populate_settings_dir(force: bool=False) -> bool:
"""
Populate settings directory with default settings files
Args:
force: if ``True``, replace existing settings files with default ones
Returns:
``True`` if any files were copied and ``False`` otherwise
"""
res = False
if _default_settings_path == _settings_path:
return res # depends on [control=['if'], data=[]]
for src in list(_default_settings_path.glob('**/*.json')):
dest = _settings_path / src.relative_to(_default_settings_path)
if not force and dest.exists():
continue # depends on [control=['if'], data=[]]
res = True
dest.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(src, dest) # depends on [control=['for'], data=['src']]
return res |
def get(key, default=-1):
"""Backport support for original codes."""
if isinstance(key, int):
return EtherType(key)
if key not in EtherType._member_map_:
extend_enum(EtherType, key, default)
return EtherType[key] | def function[get, parameter[key, default]]:
constant[Backport support for original codes.]
if call[name[isinstance], parameter[name[key], name[int]]] begin[:]
return[call[name[EtherType], parameter[name[key]]]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[EtherType]._member_map_] begin[:]
call[name[extend_enum], parameter[name[EtherType], name[key], name[default]]]
return[call[name[EtherType]][name[key]]] | keyword[def] identifier[get] ( identifier[key] , identifier[default] =- literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[key] , identifier[int] ):
keyword[return] identifier[EtherType] ( identifier[key] )
keyword[if] identifier[key] keyword[not] keyword[in] identifier[EtherType] . identifier[_member_map_] :
identifier[extend_enum] ( identifier[EtherType] , identifier[key] , identifier[default] )
keyword[return] identifier[EtherType] [ identifier[key] ] | def get(key, default=-1):
"""Backport support for original codes."""
if isinstance(key, int):
return EtherType(key) # depends on [control=['if'], data=[]]
if key not in EtherType._member_map_:
extend_enum(EtherType, key, default) # depends on [control=['if'], data=['key']]
return EtherType[key] |
def initialize_new_session():
"""Check session and initialize if necessary
Before every request, check the user session. If no session exists, add
one and provide temporary locations for images
"""
if 'image_uid_counter' in session and 'image_list' in session:
logger.debug('images are already being tracked')
else:
# reset image list counter for the session
session['image_uid_counter'] = 0
session['image_list'] = []
if 'img_input_dir' in session and 'img_output_dir' in session:
logger.debug('temporary image directories already exist')
else:
# make image upload directory
session['img_input_dir'] = mkdtemp()
session['img_output_dir'] = mkdtemp() | def function[initialize_new_session, parameter[]]:
constant[Check session and initialize if necessary
Before every request, check the user session. If no session exists, add
one and provide temporary locations for images
]
if <ast.BoolOp object at 0x7da1b152a5c0> begin[:]
call[name[logger].debug, parameter[constant[images are already being tracked]]]
if <ast.BoolOp object at 0x7da1b152a380> begin[:]
call[name[logger].debug, parameter[constant[temporary image directories already exist]]] | keyword[def] identifier[initialize_new_session] ():
literal[string]
keyword[if] literal[string] keyword[in] identifier[session] keyword[and] literal[string] keyword[in] identifier[session] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[session] [ literal[string] ]= literal[int]
identifier[session] [ literal[string] ]=[]
keyword[if] literal[string] keyword[in] identifier[session] keyword[and] literal[string] keyword[in] identifier[session] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[session] [ literal[string] ]= identifier[mkdtemp] ()
identifier[session] [ literal[string] ]= identifier[mkdtemp] () | def initialize_new_session():
"""Check session and initialize if necessary
Before every request, check the user session. If no session exists, add
one and provide temporary locations for images
"""
if 'image_uid_counter' in session and 'image_list' in session:
logger.debug('images are already being tracked') # depends on [control=['if'], data=[]]
else:
# reset image list counter for the session
session['image_uid_counter'] = 0
session['image_list'] = []
if 'img_input_dir' in session and 'img_output_dir' in session:
logger.debug('temporary image directories already exist') # depends on [control=['if'], data=[]]
else:
# make image upload directory
session['img_input_dir'] = mkdtemp()
session['img_output_dir'] = mkdtemp() |
def rezero(self):
"""
Move the current scene so that the AABB of the whole
scene is centered at the origin.
Does this by changing the base frame to a new, offset
base frame.
"""
if self.is_empty or np.allclose(self.centroid, 0.0):
# early exit since what we want already exists
return
# the transformation to move the overall scene to AABB centroid
matrix = np.eye(4)
matrix[:3, 3] = -self.centroid
# we are going to change the base frame
new_base = str(self.graph.base_frame) + '_I'
self.graph.update(frame_from=new_base,
frame_to=self.graph.base_frame,
matrix=matrix)
self.graph.base_frame = new_base | def function[rezero, parameter[self]]:
constant[
Move the current scene so that the AABB of the whole
scene is centered at the origin.
Does this by changing the base frame to a new, offset
base frame.
]
if <ast.BoolOp object at 0x7da1b22d40d0> begin[:]
return[None]
variable[matrix] assign[=] call[name[np].eye, parameter[constant[4]]]
call[name[matrix]][tuple[[<ast.Slice object at 0x7da1b22d36d0>, <ast.Constant object at 0x7da1b22d3190>]]] assign[=] <ast.UnaryOp object at 0x7da1b22d3c40>
variable[new_base] assign[=] binary_operation[call[name[str], parameter[name[self].graph.base_frame]] + constant[_I]]
call[name[self].graph.update, parameter[]]
name[self].graph.base_frame assign[=] name[new_base] | keyword[def] identifier[rezero] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_empty] keyword[or] identifier[np] . identifier[allclose] ( identifier[self] . identifier[centroid] , literal[int] ):
keyword[return]
identifier[matrix] = identifier[np] . identifier[eye] ( literal[int] )
identifier[matrix] [: literal[int] , literal[int] ]=- identifier[self] . identifier[centroid]
identifier[new_base] = identifier[str] ( identifier[self] . identifier[graph] . identifier[base_frame] )+ literal[string]
identifier[self] . identifier[graph] . identifier[update] ( identifier[frame_from] = identifier[new_base] ,
identifier[frame_to] = identifier[self] . identifier[graph] . identifier[base_frame] ,
identifier[matrix] = identifier[matrix] )
identifier[self] . identifier[graph] . identifier[base_frame] = identifier[new_base] | def rezero(self):
"""
Move the current scene so that the AABB of the whole
scene is centered at the origin.
Does this by changing the base frame to a new, offset
base frame.
"""
if self.is_empty or np.allclose(self.centroid, 0.0):
# early exit since what we want already exists
return # depends on [control=['if'], data=[]]
# the transformation to move the overall scene to AABB centroid
matrix = np.eye(4)
matrix[:3, 3] = -self.centroid
# we are going to change the base frame
new_base = str(self.graph.base_frame) + '_I'
self.graph.update(frame_from=new_base, frame_to=self.graph.base_frame, matrix=matrix)
self.graph.base_frame = new_base |
def explain_image(self, labels, instance, column_name=None, num_features=100000,
num_samples=300, batch_size=200, hide_color=0):
"""Explain an image of a prediction.
It analyze the prediction by LIME, and returns a report of which words are most impactful
in contributing to certain labels.
Args:
labels: a list of labels to explain.
instance: the prediction instance. It needs to conform to model's input. Can be a csv
line string, or a dict.
column_name: which image column to explain. Can be None if there is only one image column
in the model input.
num_features: maximum number of areas (features) to analyze. Passed to
LIME LimeImageExplainer directly.
num_samples: size of the neighborhood to learn the linear model. Passed to
LIME LimeImageExplainer directly.
batch_size: size of batches passed to predict_fn. Passed to
LIME LimeImageExplainer directly.
hide_color: the color used to perturb images. Passed to
LIME LimeImageExplainer directly.
Returns:
A LIME's lime.explanation.Explanation.
Throws:
ValueError if the given image column is not found in model input or column_name is None
but there are multiple image columns in model input.
"""
from lime.lime_image import LimeImageExplainer
if len(self._image_columns) > 1 and not column_name:
raise ValueError('There are multiple image columns in the input of the model. ' +
'Please specify "column_name".')
elif column_name and column_name not in self._image_columns:
raise ValueError('Specified column_name "%s" not found in the model input.'
% column_name)
image_column_name = column_name if column_name else self._image_columns[0]
if isinstance(instance, six.string_types):
instance = next(csv.DictReader([instance], fieldnames=self._headers))
predict_fn = self._make_image_predict_fn(labels, instance, image_column_name)
explainer = LimeImageExplainer()
with file_io.FileIO(instance[image_column_name], 'rb') as fi:
im = Image.open(fi)
im.thumbnail((299, 299), Image.ANTIALIAS)
rgb_im = np.asarray(im.convert('RGB'))
exp = explainer.explain_instance(
rgb_im, predict_fn, labels=range(len(labels)), top_labels=None,
hide_color=hide_color, num_features=num_features,
num_samples=num_samples, batch_size=batch_size)
return exp | def function[explain_image, parameter[self, labels, instance, column_name, num_features, num_samples, batch_size, hide_color]]:
constant[Explain an image of a prediction.
It analyze the prediction by LIME, and returns a report of which words are most impactful
in contributing to certain labels.
Args:
labels: a list of labels to explain.
instance: the prediction instance. It needs to conform to model's input. Can be a csv
line string, or a dict.
column_name: which image column to explain. Can be None if there is only one image column
in the model input.
num_features: maximum number of areas (features) to analyze. Passed to
LIME LimeImageExplainer directly.
num_samples: size of the neighborhood to learn the linear model. Passed to
LIME LimeImageExplainer directly.
batch_size: size of batches passed to predict_fn. Passed to
LIME LimeImageExplainer directly.
hide_color: the color used to perturb images. Passed to
LIME LimeImageExplainer directly.
Returns:
A LIME's lime.explanation.Explanation.
Throws:
ValueError if the given image column is not found in model input or column_name is None
but there are multiple image columns in model input.
]
from relative_module[lime.lime_image] import module[LimeImageExplainer]
if <ast.BoolOp object at 0x7da1b2347010> begin[:]
<ast.Raise object at 0x7da1b2345ed0>
variable[image_column_name] assign[=] <ast.IfExp object at 0x7da1b2347730>
if call[name[isinstance], parameter[name[instance], name[six].string_types]] begin[:]
variable[instance] assign[=] call[name[next], parameter[call[name[csv].DictReader, parameter[list[[<ast.Name object at 0x7da1b2347100>]]]]]]
variable[predict_fn] assign[=] call[name[self]._make_image_predict_fn, parameter[name[labels], name[instance], name[image_column_name]]]
variable[explainer] assign[=] call[name[LimeImageExplainer], parameter[]]
with call[name[file_io].FileIO, parameter[call[name[instance]][name[image_column_name]], constant[rb]]] begin[:]
variable[im] assign[=] call[name[Image].open, parameter[name[fi]]]
call[name[im].thumbnail, parameter[tuple[[<ast.Constant object at 0x7da1b23470a0>, <ast.Constant object at 0x7da1b23444f0>]], name[Image].ANTIALIAS]]
variable[rgb_im] assign[=] call[name[np].asarray, parameter[call[name[im].convert, parameter[constant[RGB]]]]]
variable[exp] assign[=] call[name[explainer].explain_instance, parameter[name[rgb_im], name[predict_fn]]]
return[name[exp]] | keyword[def] identifier[explain_image] ( identifier[self] , identifier[labels] , identifier[instance] , identifier[column_name] = keyword[None] , identifier[num_features] = literal[int] ,
identifier[num_samples] = literal[int] , identifier[batch_size] = literal[int] , identifier[hide_color] = literal[int] ):
literal[string]
keyword[from] identifier[lime] . identifier[lime_image] keyword[import] identifier[LimeImageExplainer]
keyword[if] identifier[len] ( identifier[self] . identifier[_image_columns] )> literal[int] keyword[and] keyword[not] identifier[column_name] :
keyword[raise] identifier[ValueError] ( literal[string] +
literal[string] )
keyword[elif] identifier[column_name] keyword[and] identifier[column_name] keyword[not] keyword[in] identifier[self] . identifier[_image_columns] :
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[column_name] )
identifier[image_column_name] = identifier[column_name] keyword[if] identifier[column_name] keyword[else] identifier[self] . identifier[_image_columns] [ literal[int] ]
keyword[if] identifier[isinstance] ( identifier[instance] , identifier[six] . identifier[string_types] ):
identifier[instance] = identifier[next] ( identifier[csv] . identifier[DictReader] ([ identifier[instance] ], identifier[fieldnames] = identifier[self] . identifier[_headers] ))
identifier[predict_fn] = identifier[self] . identifier[_make_image_predict_fn] ( identifier[labels] , identifier[instance] , identifier[image_column_name] )
identifier[explainer] = identifier[LimeImageExplainer] ()
keyword[with] identifier[file_io] . identifier[FileIO] ( identifier[instance] [ identifier[image_column_name] ], literal[string] ) keyword[as] identifier[fi] :
identifier[im] = identifier[Image] . identifier[open] ( identifier[fi] )
identifier[im] . identifier[thumbnail] (( literal[int] , literal[int] ), identifier[Image] . identifier[ANTIALIAS] )
identifier[rgb_im] = identifier[np] . identifier[asarray] ( identifier[im] . identifier[convert] ( literal[string] ))
identifier[exp] = identifier[explainer] . identifier[explain_instance] (
identifier[rgb_im] , identifier[predict_fn] , identifier[labels] = identifier[range] ( identifier[len] ( identifier[labels] )), identifier[top_labels] = keyword[None] ,
identifier[hide_color] = identifier[hide_color] , identifier[num_features] = identifier[num_features] ,
identifier[num_samples] = identifier[num_samples] , identifier[batch_size] = identifier[batch_size] )
keyword[return] identifier[exp] | def explain_image(self, labels, instance, column_name=None, num_features=100000, num_samples=300, batch_size=200, hide_color=0):
"""Explain an image of a prediction.
It analyze the prediction by LIME, and returns a report of which words are most impactful
in contributing to certain labels.
Args:
labels: a list of labels to explain.
instance: the prediction instance. It needs to conform to model's input. Can be a csv
line string, or a dict.
column_name: which image column to explain. Can be None if there is only one image column
in the model input.
num_features: maximum number of areas (features) to analyze. Passed to
LIME LimeImageExplainer directly.
num_samples: size of the neighborhood to learn the linear model. Passed to
LIME LimeImageExplainer directly.
batch_size: size of batches passed to predict_fn. Passed to
LIME LimeImageExplainer directly.
hide_color: the color used to perturb images. Passed to
LIME LimeImageExplainer directly.
Returns:
A LIME's lime.explanation.Explanation.
Throws:
ValueError if the given image column is not found in model input or column_name is None
but there are multiple image columns in model input.
"""
from lime.lime_image import LimeImageExplainer
if len(self._image_columns) > 1 and (not column_name):
raise ValueError('There are multiple image columns in the input of the model. ' + 'Please specify "column_name".') # depends on [control=['if'], data=[]]
elif column_name and column_name not in self._image_columns:
raise ValueError('Specified column_name "%s" not found in the model input.' % column_name) # depends on [control=['if'], data=[]]
image_column_name = column_name if column_name else self._image_columns[0]
if isinstance(instance, six.string_types):
instance = next(csv.DictReader([instance], fieldnames=self._headers)) # depends on [control=['if'], data=[]]
predict_fn = self._make_image_predict_fn(labels, instance, image_column_name)
explainer = LimeImageExplainer()
with file_io.FileIO(instance[image_column_name], 'rb') as fi:
im = Image.open(fi) # depends on [control=['with'], data=['fi']]
im.thumbnail((299, 299), Image.ANTIALIAS)
rgb_im = np.asarray(im.convert('RGB'))
exp = explainer.explain_instance(rgb_im, predict_fn, labels=range(len(labels)), top_labels=None, hide_color=hide_color, num_features=num_features, num_samples=num_samples, batch_size=batch_size)
return exp |
def _teigha_convert(data, extension='dwg'):
"""
Convert any DXF/DWG to R14 ASCII DXF using Teigha Converter.
Parameters
---------------
data : str or bytes
The contents of a DXF or DWG file
extension : str
The format of data: 'dwg' or 'dxf'
Returns
--------------
converted : str
Result as R14 ASCII DXF
"""
# temp directory for DWG file
dir_dwg = tempfile.mkdtemp()
# temp directory for DXF output
dir_out = tempfile.mkdtemp()
# put together the subprocess command
cmd = [_xvfb_run, # suppress the GUI QT status bar
'-a', # use an automatic screen
_teigha, # run the converter
dir_dwg, # the directory containing DWG files
dir_out, # the directory for output DXF files
'ACAD14', # the revision of DXF
'DXF', # the output format
'1', # recurse input folder
'1'] # audit each file
# if Xvfb is already running it probably
# has a working configuration so use it
running = b'Xvfb' in subprocess.check_output(['ps', '-eaf'])
# chop off XVFB if it isn't installed or is running
if running or _xvfb_run is None:
cmd = cmd[2:]
# create file in correct mode for data
if hasattr(data, 'encode'):
# data is a string which can be encoded to bytes
mode = 'w'
else:
# data is already bytes
mode = 'wb'
# write the file_obj in the temp directory
dwg_name = os.path.join(dir_dwg, 'drawing.' + extension)
with open(dwg_name, mode) as f:
f.write(data)
# run the conversion
output = subprocess.check_output(cmd)
# load the ASCII DXF produced from the conversion
name_result = os.path.join(dir_out, 'drawing.dxf')
# if the conversion failed log things before failing
if not os.path.exists(name_result):
log.error('teigha convert failed!\nls {}: {}\n\n {}'.format(
dir_out,
os.listdir(dir_out),
output))
raise ValueError('conversion using Teigha failed!')
# load converted file into a string
with open(name_result, 'r') as f:
converted = f.read()
# remove the temporary directories
shutil.rmtree(dir_out)
shutil.rmtree(dir_dwg)
return converted | def function[_teigha_convert, parameter[data, extension]]:
constant[
Convert any DXF/DWG to R14 ASCII DXF using Teigha Converter.
Parameters
---------------
data : str or bytes
The contents of a DXF or DWG file
extension : str
The format of data: 'dwg' or 'dxf'
Returns
--------------
converted : str
Result as R14 ASCII DXF
]
variable[dir_dwg] assign[=] call[name[tempfile].mkdtemp, parameter[]]
variable[dir_out] assign[=] call[name[tempfile].mkdtemp, parameter[]]
variable[cmd] assign[=] list[[<ast.Name object at 0x7da1b23c5780>, <ast.Constant object at 0x7da1b23c4fd0>, <ast.Name object at 0x7da1b23c57b0>, <ast.Name object at 0x7da1b23c4100>, <ast.Name object at 0x7da1b23c53f0>, <ast.Constant object at 0x7da1b23c4af0>, <ast.Constant object at 0x7da1b23c5300>, <ast.Constant object at 0x7da1b23c5cf0>, <ast.Constant object at 0x7da1b23c5090>]]
variable[running] assign[=] compare[constant[b'Xvfb'] in call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da1b23c52a0>, <ast.Constant object at 0x7da1b23c5f90>]]]]]
if <ast.BoolOp object at 0x7da1b23c5c90> begin[:]
variable[cmd] assign[=] call[name[cmd]][<ast.Slice object at 0x7da1b23c4850>]
if call[name[hasattr], parameter[name[data], constant[encode]]] begin[:]
variable[mode] assign[=] constant[w]
variable[dwg_name] assign[=] call[name[os].path.join, parameter[name[dir_dwg], binary_operation[constant[drawing.] + name[extension]]]]
with call[name[open], parameter[name[dwg_name], name[mode]]] begin[:]
call[name[f].write, parameter[name[data]]]
variable[output] assign[=] call[name[subprocess].check_output, parameter[name[cmd]]]
variable[name_result] assign[=] call[name[os].path.join, parameter[name[dir_out], constant[drawing.dxf]]]
if <ast.UnaryOp object at 0x7da1b23c4c70> begin[:]
call[name[log].error, parameter[call[constant[teigha convert failed!
ls {}: {}
{}].format, parameter[name[dir_out], call[name[os].listdir, parameter[name[dir_out]]], name[output]]]]]
<ast.Raise object at 0x7da1b23c7fa0>
with call[name[open], parameter[name[name_result], constant[r]]] begin[:]
variable[converted] assign[=] call[name[f].read, parameter[]]
call[name[shutil].rmtree, parameter[name[dir_out]]]
call[name[shutil].rmtree, parameter[name[dir_dwg]]]
return[name[converted]] | keyword[def] identifier[_teigha_convert] ( identifier[data] , identifier[extension] = literal[string] ):
literal[string]
identifier[dir_dwg] = identifier[tempfile] . identifier[mkdtemp] ()
identifier[dir_out] = identifier[tempfile] . identifier[mkdtemp] ()
identifier[cmd] =[ identifier[_xvfb_run] ,
literal[string] ,
identifier[_teigha] ,
identifier[dir_dwg] ,
identifier[dir_out] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
identifier[running] = literal[string] keyword[in] identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] ])
keyword[if] identifier[running] keyword[or] identifier[_xvfb_run] keyword[is] keyword[None] :
identifier[cmd] = identifier[cmd] [ literal[int] :]
keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ):
identifier[mode] = literal[string]
keyword[else] :
identifier[mode] = literal[string]
identifier[dwg_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_dwg] , literal[string] + identifier[extension] )
keyword[with] identifier[open] ( identifier[dwg_name] , identifier[mode] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[data] )
identifier[output] = identifier[subprocess] . identifier[check_output] ( identifier[cmd] )
identifier[name_result] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_out] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[name_result] ):
identifier[log] . identifier[error] ( literal[string] . identifier[format] (
identifier[dir_out] ,
identifier[os] . identifier[listdir] ( identifier[dir_out] ),
identifier[output] ))
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[with] identifier[open] ( identifier[name_result] , literal[string] ) keyword[as] identifier[f] :
identifier[converted] = identifier[f] . identifier[read] ()
identifier[shutil] . identifier[rmtree] ( identifier[dir_out] )
identifier[shutil] . identifier[rmtree] ( identifier[dir_dwg] )
keyword[return] identifier[converted] | def _teigha_convert(data, extension='dwg'):
"""
Convert any DXF/DWG to R14 ASCII DXF using Teigha Converter.
Parameters
---------------
data : str or bytes
The contents of a DXF or DWG file
extension : str
The format of data: 'dwg' or 'dxf'
Returns
--------------
converted : str
Result as R14 ASCII DXF
"""
# temp directory for DWG file
dir_dwg = tempfile.mkdtemp()
# temp directory for DXF output
dir_out = tempfile.mkdtemp()
# put together the subprocess command
# suppress the GUI QT status bar
# use an automatic screen
# run the converter
# the directory containing DWG files
# the directory for output DXF files
# the revision of DXF
# the output format
# recurse input folder
cmd = [_xvfb_run, '-a', _teigha, dir_dwg, dir_out, 'ACAD14', 'DXF', '1', '1'] # audit each file
# if Xvfb is already running it probably
# has a working configuration so use it
running = b'Xvfb' in subprocess.check_output(['ps', '-eaf'])
# chop off XVFB if it isn't installed or is running
if running or _xvfb_run is None:
cmd = cmd[2:] # depends on [control=['if'], data=[]]
# create file in correct mode for data
if hasattr(data, 'encode'):
# data is a string which can be encoded to bytes
mode = 'w' # depends on [control=['if'], data=[]]
else:
# data is already bytes
mode = 'wb'
# write the file_obj in the temp directory
dwg_name = os.path.join(dir_dwg, 'drawing.' + extension)
with open(dwg_name, mode) as f:
f.write(data) # depends on [control=['with'], data=['f']]
# run the conversion
output = subprocess.check_output(cmd)
# load the ASCII DXF produced from the conversion
name_result = os.path.join(dir_out, 'drawing.dxf')
# if the conversion failed log things before failing
if not os.path.exists(name_result):
log.error('teigha convert failed!\nls {}: {}\n\n {}'.format(dir_out, os.listdir(dir_out), output))
raise ValueError('conversion using Teigha failed!') # depends on [control=['if'], data=[]]
# load converted file into a string
with open(name_result, 'r') as f:
converted = f.read() # depends on [control=['with'], data=['f']]
# remove the temporary directories
shutil.rmtree(dir_out)
shutil.rmtree(dir_dwg)
return converted |
def finalize(self, **kwargs):
"""
Finalize the drawing setting labels and title.
"""
# Set the title
self.set_title('Feature Importances of {} Features using {}'.format(
len(self.features_), self.name))
# Set the xlabel
self.ax.set_xlabel(self._get_xlabel())
# Remove the ygrid
self.ax.grid(False, axis='y')
if self.stack:
plt.legend(bbox_to_anchor=(1.04, 0.5), loc="center left")
# Ensure we have a tight fit
plt.tight_layout() | def function[finalize, parameter[self]]:
constant[
Finalize the drawing setting labels and title.
]
call[name[self].set_title, parameter[call[constant[Feature Importances of {} Features using {}].format, parameter[call[name[len], parameter[name[self].features_]], name[self].name]]]]
call[name[self].ax.set_xlabel, parameter[call[name[self]._get_xlabel, parameter[]]]]
call[name[self].ax.grid, parameter[constant[False]]]
if name[self].stack begin[:]
call[name[plt].legend, parameter[]]
call[name[plt].tight_layout, parameter[]] | keyword[def] identifier[finalize] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[set_title] ( literal[string] . identifier[format] (
identifier[len] ( identifier[self] . identifier[features_] ), identifier[self] . identifier[name] ))
identifier[self] . identifier[ax] . identifier[set_xlabel] ( identifier[self] . identifier[_get_xlabel] ())
identifier[self] . identifier[ax] . identifier[grid] ( keyword[False] , identifier[axis] = literal[string] )
keyword[if] identifier[self] . identifier[stack] :
identifier[plt] . identifier[legend] ( identifier[bbox_to_anchor] =( literal[int] , literal[int] ), identifier[loc] = literal[string] )
identifier[plt] . identifier[tight_layout] () | def finalize(self, **kwargs):
"""
Finalize the drawing setting labels and title.
"""
# Set the title
self.set_title('Feature Importances of {} Features using {}'.format(len(self.features_), self.name))
# Set the xlabel
self.ax.set_xlabel(self._get_xlabel())
# Remove the ygrid
self.ax.grid(False, axis='y')
if self.stack:
plt.legend(bbox_to_anchor=(1.04, 0.5), loc='center left') # depends on [control=['if'], data=[]]
# Ensure we have a tight fit
plt.tight_layout() |
def get_current(self):
"""Get current forecast."""
now = dt.now().timestamp()
url = build_url(self.api_key, self.spot_id, self.fields,
self.unit, now, now)
return get_msw(url) | def function[get_current, parameter[self]]:
constant[Get current forecast.]
variable[now] assign[=] call[call[name[dt].now, parameter[]].timestamp, parameter[]]
variable[url] assign[=] call[name[build_url], parameter[name[self].api_key, name[self].spot_id, name[self].fields, name[self].unit, name[now], name[now]]]
return[call[name[get_msw], parameter[name[url]]]] | keyword[def] identifier[get_current] ( identifier[self] ):
literal[string]
identifier[now] = identifier[dt] . identifier[now] (). identifier[timestamp] ()
identifier[url] = identifier[build_url] ( identifier[self] . identifier[api_key] , identifier[self] . identifier[spot_id] , identifier[self] . identifier[fields] ,
identifier[self] . identifier[unit] , identifier[now] , identifier[now] )
keyword[return] identifier[get_msw] ( identifier[url] ) | def get_current(self):
"""Get current forecast."""
now = dt.now().timestamp()
url = build_url(self.api_key, self.spot_id, self.fields, self.unit, now, now)
return get_msw(url) |
def subtags(self):
"""
Get the :class:`language_tags.Subtag.Subtag` objects of the tag.
:return: list of :class:`language_tags.Subtag.Subtag` objects that are part of the tag.
The return list can be empty.
"""
data = self.data
subtags = []
# if tag is grandfathered return no subtags
if 'record' in data and self.data['record']['Type'] == 'grandfathered':
return subtags
codes = data['tag'].split('-')
# Try and find the language tag.
for i, code in enumerate(codes):
# Singletons and anything after are unhandled.
if len(code) == 1:
#Stop the loop (stop processing after a singleton).
break
# Check for non-existent tag.
if code not in index:
continue
types = index[code]
# Language subtags may only appear at the beginning of the tag, otherwise the subtag type is indeterminate.
if 'language' in types and i == 0:
subtags.append(Subtag(code, 'language'))
continue
if len(code) == 2:
# Should be a region
if 'region' in types:
subtags.append(Subtag(code, 'region'))
# Error case: language subtag in the wrong place.
elif 'language' in types:
subtags.append(Subtag(code, 'language'))
elif len(code) == 3:
# Could be a numeric region code e.g. '001' for 'World'.
if 'region' in types:
subtags.append(Subtag(code, 'region'))
elif 'extlang' in types:
subtags.append(Subtag(code, 'extlang'))
# Error case: language subtag in the wrong place.
elif 'language' in types:
subtags.append(Subtag(code, 'language'))
elif len(code) == 4:
# Could be a numeric variant
if 'variant' in types:
subtags.append(Subtag(code, 'variant'))
elif 'script' in types:
subtags.append(Subtag(code, 'script'))
else:
# Should be a variant
if 'variant' in types:
subtags.append(Subtag(code, 'variant'))
return subtags | def function[subtags, parameter[self]]:
constant[
Get the :class:`language_tags.Subtag.Subtag` objects of the tag.
:return: list of :class:`language_tags.Subtag.Subtag` objects that are part of the tag.
The return list can be empty.
]
variable[data] assign[=] name[self].data
variable[subtags] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b2513520> begin[:]
return[name[subtags]]
variable[codes] assign[=] call[call[name[data]][constant[tag]].split, parameter[constant[-]]]
for taget[tuple[[<ast.Name object at 0x7da1b25127a0>, <ast.Name object at 0x7da1b2511450>]]] in starred[call[name[enumerate], parameter[name[codes]]]] begin[:]
if compare[call[name[len], parameter[name[code]]] equal[==] constant[1]] begin[:]
break
if compare[name[code] <ast.NotIn object at 0x7da2590d7190> name[index]] begin[:]
continue
variable[types] assign[=] call[name[index]][name[code]]
if <ast.BoolOp object at 0x7da18f00f070> begin[:]
call[name[subtags].append, parameter[call[name[Subtag], parameter[name[code], constant[language]]]]]
continue
if compare[call[name[len], parameter[name[code]]] equal[==] constant[2]] begin[:]
if compare[constant[region] in name[types]] begin[:]
call[name[subtags].append, parameter[call[name[Subtag], parameter[name[code], constant[region]]]]]
return[name[subtags]] | keyword[def] identifier[subtags] ( identifier[self] ):
literal[string]
identifier[data] = identifier[self] . identifier[data]
identifier[subtags] =[]
keyword[if] literal[string] keyword[in] identifier[data] keyword[and] identifier[self] . identifier[data] [ literal[string] ][ literal[string] ]== literal[string] :
keyword[return] identifier[subtags]
identifier[codes] = identifier[data] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[i] , identifier[code] keyword[in] identifier[enumerate] ( identifier[codes] ):
keyword[if] identifier[len] ( identifier[code] )== literal[int] :
keyword[break]
keyword[if] identifier[code] keyword[not] keyword[in] identifier[index] :
keyword[continue]
identifier[types] = identifier[index] [ identifier[code] ]
keyword[if] literal[string] keyword[in] identifier[types] keyword[and] identifier[i] == literal[int] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[continue]
keyword[if] identifier[len] ( identifier[code] )== literal[int] :
keyword[if] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[elif] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[elif] identifier[len] ( identifier[code] )== literal[int] :
keyword[if] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[elif] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[elif] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[elif] identifier[len] ( identifier[code] )== literal[int] :
keyword[if] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[elif] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[types] :
identifier[subtags] . identifier[append] ( identifier[Subtag] ( identifier[code] , literal[string] ))
keyword[return] identifier[subtags] | def subtags(self):
"""
Get the :class:`language_tags.Subtag.Subtag` objects of the tag.
:return: list of :class:`language_tags.Subtag.Subtag` objects that are part of the tag.
The return list can be empty.
"""
data = self.data
subtags = []
# if tag is grandfathered return no subtags
if 'record' in data and self.data['record']['Type'] == 'grandfathered':
return subtags # depends on [control=['if'], data=[]]
codes = data['tag'].split('-')
# Try and find the language tag.
for (i, code) in enumerate(codes):
# Singletons and anything after are unhandled.
if len(code) == 1:
#Stop the loop (stop processing after a singleton).
break # depends on [control=['if'], data=[]]
# Check for non-existent tag.
if code not in index:
continue # depends on [control=['if'], data=[]]
types = index[code]
# Language subtags may only appear at the beginning of the tag, otherwise the subtag type is indeterminate.
if 'language' in types and i == 0:
subtags.append(Subtag(code, 'language'))
continue # depends on [control=['if'], data=[]]
if len(code) == 2:
# Should be a region
if 'region' in types:
subtags.append(Subtag(code, 'region')) # depends on [control=['if'], data=[]]
# Error case: language subtag in the wrong place.
elif 'language' in types:
subtags.append(Subtag(code, 'language')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif len(code) == 3:
# Could be a numeric region code e.g. '001' for 'World'.
if 'region' in types:
subtags.append(Subtag(code, 'region')) # depends on [control=['if'], data=[]]
elif 'extlang' in types:
subtags.append(Subtag(code, 'extlang')) # depends on [control=['if'], data=[]]
# Error case: language subtag in the wrong place.
elif 'language' in types:
subtags.append(Subtag(code, 'language')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif len(code) == 4:
# Could be a numeric variant
if 'variant' in types:
subtags.append(Subtag(code, 'variant')) # depends on [control=['if'], data=[]]
elif 'script' in types:
subtags.append(Subtag(code, 'script')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Should be a variant
elif 'variant' in types:
subtags.append(Subtag(code, 'variant')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return subtags |
def populate(self, priority, address, rtr, data):
"""
:return: None
"""
assert isinstance(data, bytes)
self.needs_low_priority(priority)
self.needs_no_rtr(rtr)
self.needs_data(data, 4)
self.set_attributes(priority, address, rtr)
self._day = data[0]
self._mon = data[1]
self._year = ((data[2] << 8) + data[3]) | def function[populate, parameter[self, priority, address, rtr, data]]:
constant[
:return: None
]
assert[call[name[isinstance], parameter[name[data], name[bytes]]]]
call[name[self].needs_low_priority, parameter[name[priority]]]
call[name[self].needs_no_rtr, parameter[name[rtr]]]
call[name[self].needs_data, parameter[name[data], constant[4]]]
call[name[self].set_attributes, parameter[name[priority], name[address], name[rtr]]]
name[self]._day assign[=] call[name[data]][constant[0]]
name[self]._mon assign[=] call[name[data]][constant[1]]
name[self]._year assign[=] binary_operation[binary_operation[call[name[data]][constant[2]] <ast.LShift object at 0x7da2590d69e0> constant[8]] + call[name[data]][constant[3]]] | keyword[def] identifier[populate] ( identifier[self] , identifier[priority] , identifier[address] , identifier[rtr] , identifier[data] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[data] , identifier[bytes] )
identifier[self] . identifier[needs_low_priority] ( identifier[priority] )
identifier[self] . identifier[needs_no_rtr] ( identifier[rtr] )
identifier[self] . identifier[needs_data] ( identifier[data] , literal[int] )
identifier[self] . identifier[set_attributes] ( identifier[priority] , identifier[address] , identifier[rtr] )
identifier[self] . identifier[_day] = identifier[data] [ literal[int] ]
identifier[self] . identifier[_mon] = identifier[data] [ literal[int] ]
identifier[self] . identifier[_year] =(( identifier[data] [ literal[int] ]<< literal[int] )+ identifier[data] [ literal[int] ]) | def populate(self, priority, address, rtr, data):
"""
:return: None
"""
assert isinstance(data, bytes)
self.needs_low_priority(priority)
self.needs_no_rtr(rtr)
self.needs_data(data, 4)
self.set_attributes(priority, address, rtr)
self._day = data[0]
self._mon = data[1]
self._year = (data[2] << 8) + data[3] |
def add_line(self, string):
"""
Adds a line to the LISP code to execute
:param string: The line to add
:return: None
"""
self.code_strings.append(string)
code = ''
if len(self.code_strings) == 1:
code = '(setv result ' + self.code_strings[0] + ')'
if len(self.code_strings) > 1:
code = '(setv result (and ' + ' '.join(self.code_strings) + '))'
self._compiled_ast_and_expr = self.__compile_code(code_string=code) | def function[add_line, parameter[self, string]]:
constant[
Adds a line to the LISP code to execute
:param string: The line to add
:return: None
]
call[name[self].code_strings.append, parameter[name[string]]]
variable[code] assign[=] constant[]
if compare[call[name[len], parameter[name[self].code_strings]] equal[==] constant[1]] begin[:]
variable[code] assign[=] binary_operation[binary_operation[constant[(setv result ] + call[name[self].code_strings][constant[0]]] + constant[)]]
if compare[call[name[len], parameter[name[self].code_strings]] greater[>] constant[1]] begin[:]
variable[code] assign[=] binary_operation[binary_operation[constant[(setv result (and ] + call[constant[ ].join, parameter[name[self].code_strings]]] + constant[))]]
name[self]._compiled_ast_and_expr assign[=] call[name[self].__compile_code, parameter[]] | keyword[def] identifier[add_line] ( identifier[self] , identifier[string] ):
literal[string]
identifier[self] . identifier[code_strings] . identifier[append] ( identifier[string] )
identifier[code] = literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[code_strings] )== literal[int] :
identifier[code] = literal[string] + identifier[self] . identifier[code_strings] [ literal[int] ]+ literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[code_strings] )> literal[int] :
identifier[code] = literal[string] + literal[string] . identifier[join] ( identifier[self] . identifier[code_strings] )+ literal[string]
identifier[self] . identifier[_compiled_ast_and_expr] = identifier[self] . identifier[__compile_code] ( identifier[code_string] = identifier[code] ) | def add_line(self, string):
"""
Adds a line to the LISP code to execute
:param string: The line to add
:return: None
"""
self.code_strings.append(string)
code = ''
if len(self.code_strings) == 1:
code = '(setv result ' + self.code_strings[0] + ')' # depends on [control=['if'], data=[]]
if len(self.code_strings) > 1:
code = '(setv result (and ' + ' '.join(self.code_strings) + '))' # depends on [control=['if'], data=[]]
self._compiled_ast_and_expr = self.__compile_code(code_string=code) |
def run(self, deploy_attempted=False):
'''
Execute the routine, the routine can be either:
1. Execute a raw shell command
2. Execute a wrapper func
3. Execute a remote Salt command
If a (re)deploy is needed, then retry the operation after a deploy
attempt
Returns tuple of (stdout, stderr, retcode)
'''
stdout = stderr = retcode = None
if self.opts.get('raw_shell', False):
cmd_str = ' '.join([self._escape_arg(arg) for arg in self.argv])
stdout, stderr, retcode = self.shell.exec_cmd(cmd_str)
elif self.fun in self.wfuncs or self.mine:
stdout, retcode = self.run_wfunc()
else:
stdout, stderr, retcode = self.cmd_block()
return stdout, stderr, retcode | def function[run, parameter[self, deploy_attempted]]:
constant[
Execute the routine, the routine can be either:
1. Execute a raw shell command
2. Execute a wrapper func
3. Execute a remote Salt command
If a (re)deploy is needed, then retry the operation after a deploy
attempt
Returns tuple of (stdout, stderr, retcode)
]
variable[stdout] assign[=] constant[None]
if call[name[self].opts.get, parameter[constant[raw_shell], constant[False]]] begin[:]
variable[cmd_str] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18f811bd0>]]
<ast.Tuple object at 0x7da18f813e80> assign[=] call[name[self].shell.exec_cmd, parameter[name[cmd_str]]]
return[tuple[[<ast.Name object at 0x7da18f810c10>, <ast.Name object at 0x7da18f811a80>, <ast.Name object at 0x7da18f8117b0>]]] | keyword[def] identifier[run] ( identifier[self] , identifier[deploy_attempted] = keyword[False] ):
literal[string]
identifier[stdout] = identifier[stderr] = identifier[retcode] = keyword[None]
keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] , keyword[False] ):
identifier[cmd_str] = literal[string] . identifier[join] ([ identifier[self] . identifier[_escape_arg] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[argv] ])
identifier[stdout] , identifier[stderr] , identifier[retcode] = identifier[self] . identifier[shell] . identifier[exec_cmd] ( identifier[cmd_str] )
keyword[elif] identifier[self] . identifier[fun] keyword[in] identifier[self] . identifier[wfuncs] keyword[or] identifier[self] . identifier[mine] :
identifier[stdout] , identifier[retcode] = identifier[self] . identifier[run_wfunc] ()
keyword[else] :
identifier[stdout] , identifier[stderr] , identifier[retcode] = identifier[self] . identifier[cmd_block] ()
keyword[return] identifier[stdout] , identifier[stderr] , identifier[retcode] | def run(self, deploy_attempted=False):
"""
Execute the routine, the routine can be either:
1. Execute a raw shell command
2. Execute a wrapper func
3. Execute a remote Salt command
If a (re)deploy is needed, then retry the operation after a deploy
attempt
Returns tuple of (stdout, stderr, retcode)
"""
stdout = stderr = retcode = None
if self.opts.get('raw_shell', False):
cmd_str = ' '.join([self._escape_arg(arg) for arg in self.argv])
(stdout, stderr, retcode) = self.shell.exec_cmd(cmd_str) # depends on [control=['if'], data=[]]
elif self.fun in self.wfuncs or self.mine:
(stdout, retcode) = self.run_wfunc() # depends on [control=['if'], data=[]]
else:
(stdout, stderr, retcode) = self.cmd_block()
return (stdout, stderr, retcode) |
def readBytes(self):
"""
Reads and returns a utf-8 encoded byte array.
"""
length, is_reference = self._readLength()
if is_reference:
return self.context.getString(length)
if length == 0:
return ''
result = self.stream.read(length)
self.context.addString(result)
return result | def function[readBytes, parameter[self]]:
constant[
Reads and returns a utf-8 encoded byte array.
]
<ast.Tuple object at 0x7da1b1451060> assign[=] call[name[self]._readLength, parameter[]]
if name[is_reference] begin[:]
return[call[name[self].context.getString, parameter[name[length]]]]
if compare[name[length] equal[==] constant[0]] begin[:]
return[constant[]]
variable[result] assign[=] call[name[self].stream.read, parameter[name[length]]]
call[name[self].context.addString, parameter[name[result]]]
return[name[result]] | keyword[def] identifier[readBytes] ( identifier[self] ):
literal[string]
identifier[length] , identifier[is_reference] = identifier[self] . identifier[_readLength] ()
keyword[if] identifier[is_reference] :
keyword[return] identifier[self] . identifier[context] . identifier[getString] ( identifier[length] )
keyword[if] identifier[length] == literal[int] :
keyword[return] literal[string]
identifier[result] = identifier[self] . identifier[stream] . identifier[read] ( identifier[length] )
identifier[self] . identifier[context] . identifier[addString] ( identifier[result] )
keyword[return] identifier[result] | def readBytes(self):
"""
Reads and returns a utf-8 encoded byte array.
"""
(length, is_reference) = self._readLength()
if is_reference:
return self.context.getString(length) # depends on [control=['if'], data=[]]
if length == 0:
return '' # depends on [control=['if'], data=[]]
result = self.stream.read(length)
self.context.addString(result)
return result |
def get_multiplicon_segments(self, value):
""" Return a dictionary describing the genome segments that
contribute to the named multiplicon, keyed by genome, with
(start feature, end feature) tuples.
"""
sql = '''SELECT genome, first, last FROM segments
WHERE multiplicon=:mp'''
cur = self._dbconn.cursor()
cur.execute(sql, {'mp': str(value)})
result = cur.fetchall()
cur.close()
segdict = collections.defaultdict(tuple)
for genome, start, end in result:
segdict[genome] = (start, end)
return segdict | def function[get_multiplicon_segments, parameter[self, value]]:
constant[ Return a dictionary describing the genome segments that
contribute to the named multiplicon, keyed by genome, with
(start feature, end feature) tuples.
]
variable[sql] assign[=] constant[SELECT genome, first, last FROM segments
WHERE multiplicon=:mp]
variable[cur] assign[=] call[name[self]._dbconn.cursor, parameter[]]
call[name[cur].execute, parameter[name[sql], dictionary[[<ast.Constant object at 0x7da1b0ae0b50>], [<ast.Call object at 0x7da1b0ae3a30>]]]]
variable[result] assign[=] call[name[cur].fetchall, parameter[]]
call[name[cur].close, parameter[]]
variable[segdict] assign[=] call[name[collections].defaultdict, parameter[name[tuple]]]
for taget[tuple[[<ast.Name object at 0x7da1b0ae0df0>, <ast.Name object at 0x7da1b0ae3280>, <ast.Name object at 0x7da1b0ae39a0>]]] in starred[name[result]] begin[:]
call[name[segdict]][name[genome]] assign[=] tuple[[<ast.Name object at 0x7da1b0ae2560>, <ast.Name object at 0x7da1b0ae1450>]]
return[name[segdict]] | keyword[def] identifier[get_multiplicon_segments] ( identifier[self] , identifier[value] ):
literal[string]
identifier[sql] = literal[string]
identifier[cur] = identifier[self] . identifier[_dbconn] . identifier[cursor] ()
identifier[cur] . identifier[execute] ( identifier[sql] ,{ literal[string] : identifier[str] ( identifier[value] )})
identifier[result] = identifier[cur] . identifier[fetchall] ()
identifier[cur] . identifier[close] ()
identifier[segdict] = identifier[collections] . identifier[defaultdict] ( identifier[tuple] )
keyword[for] identifier[genome] , identifier[start] , identifier[end] keyword[in] identifier[result] :
identifier[segdict] [ identifier[genome] ]=( identifier[start] , identifier[end] )
keyword[return] identifier[segdict] | def get_multiplicon_segments(self, value):
""" Return a dictionary describing the genome segments that
contribute to the named multiplicon, keyed by genome, with
(start feature, end feature) tuples.
"""
sql = 'SELECT genome, first, last FROM segments\n WHERE multiplicon=:mp'
cur = self._dbconn.cursor()
cur.execute(sql, {'mp': str(value)})
result = cur.fetchall()
cur.close()
segdict = collections.defaultdict(tuple)
for (genome, start, end) in result:
segdict[genome] = (start, end) # depends on [control=['for'], data=[]]
return segdict |
def done(self, taskid):
'''Mark task done'''
if taskid in self.processing:
self.mutex.acquire()
if taskid in self.processing:
del self.processing[taskid]
self.mutex.release()
return True
return False | def function[done, parameter[self, taskid]]:
constant[Mark task done]
if compare[name[taskid] in name[self].processing] begin[:]
call[name[self].mutex.acquire, parameter[]]
if compare[name[taskid] in name[self].processing] begin[:]
<ast.Delete object at 0x7da1b208f5e0>
call[name[self].mutex.release, parameter[]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[done] ( identifier[self] , identifier[taskid] ):
literal[string]
keyword[if] identifier[taskid] keyword[in] identifier[self] . identifier[processing] :
identifier[self] . identifier[mutex] . identifier[acquire] ()
keyword[if] identifier[taskid] keyword[in] identifier[self] . identifier[processing] :
keyword[del] identifier[self] . identifier[processing] [ identifier[taskid] ]
identifier[self] . identifier[mutex] . identifier[release] ()
keyword[return] keyword[True]
keyword[return] keyword[False] | def done(self, taskid):
"""Mark task done"""
if taskid in self.processing:
self.mutex.acquire()
if taskid in self.processing:
del self.processing[taskid] # depends on [control=['if'], data=['taskid']]
self.mutex.release()
return True # depends on [control=['if'], data=['taskid']]
return False |
def symbol(self, ident, bp=0):
'''
Gets (and create if not exists) as named symbol.
Optionally, you can specify a binding power (bp) value, which will be used
to control operator presedence; the higher the value, the tighter a token
binds to the tokens that follow.
'''
try:
s = self.symbols[ident]
except KeyError:
class s(SymbolBase):
pass
s.__name__ = 'symbol-%s' % (ident,)
s.ident = ident
s.lbp = bp
self.symbols[ident] = s
else:
s.lbp = max(bp, s.lbp)
return s | def function[symbol, parameter[self, ident, bp]]:
constant[
Gets (and create if not exists) as named symbol.
Optionally, you can specify a binding power (bp) value, which will be used
to control operator presedence; the higher the value, the tighter a token
binds to the tokens that follow.
]
<ast.Try object at 0x7da20c6a9990>
return[name[s]] | keyword[def] identifier[symbol] ( identifier[self] , identifier[ident] , identifier[bp] = literal[int] ):
literal[string]
keyword[try] :
identifier[s] = identifier[self] . identifier[symbols] [ identifier[ident] ]
keyword[except] identifier[KeyError] :
keyword[class] identifier[s] ( identifier[SymbolBase] ):
keyword[pass]
identifier[s] . identifier[__name__] = literal[string] %( identifier[ident] ,)
identifier[s] . identifier[ident] = identifier[ident]
identifier[s] . identifier[lbp] = identifier[bp]
identifier[self] . identifier[symbols] [ identifier[ident] ]= identifier[s]
keyword[else] :
identifier[s] . identifier[lbp] = identifier[max] ( identifier[bp] , identifier[s] . identifier[lbp] )
keyword[return] identifier[s] | def symbol(self, ident, bp=0):
"""
Gets (and create if not exists) as named symbol.
Optionally, you can specify a binding power (bp) value, which will be used
to control operator presedence; the higher the value, the tighter a token
binds to the tokens that follow.
"""
try:
s = self.symbols[ident] # depends on [control=['try'], data=[]]
except KeyError:
class s(SymbolBase):
pass
s.__name__ = 'symbol-%s' % (ident,)
s.ident = ident
s.lbp = bp
self.symbols[ident] = s # depends on [control=['except'], data=[]]
else:
s.lbp = max(bp, s.lbp)
return s |
def mirror(
src_fs, # type: Union[FS, Text]
dst_fs, # type: Union[FS, Text]
walker=None, # type: Optional[Walker]
copy_if_newer=True, # type: bool
workers=0, # type: int
):
# type: (...) -> None
"""Mirror files / directories from one filesystem to another.
Mirroring a filesystem will create an exact copy of ``src_fs`` on
``dst_fs``, by removing any files / directories on the destination
that aren't on the source, and copying files that aren't.
Arguments:
src_fs (FS or str): Source filesystem (URL or instance).
dst_fs (FS or str): Destination filesystem (URL or instance).
walker (~fs.walk.Walker, optional): An optional walker instance.
copy_if_newer (bool): Only copy newer files (the default).
workers (int): Number of worker threads used
(0 for single threaded). Set to a relatively low number
for network filesystems, 4 would be a good start.
"""
def src():
return manage_fs(src_fs, writeable=False)
def dst():
return manage_fs(dst_fs, create=True)
with src() as _src_fs, dst() as _dst_fs:
with _src_fs.lock(), _dst_fs.lock():
_thread_safe = is_thread_safe(_src_fs, _dst_fs)
with Copier(num_workers=workers if _thread_safe else 0) as copier:
_mirror(
_src_fs,
_dst_fs,
walker=walker,
copy_if_newer=copy_if_newer,
copy_file=copier.copy,
) | def function[mirror, parameter[src_fs, dst_fs, walker, copy_if_newer, workers]]:
constant[Mirror files / directories from one filesystem to another.
Mirroring a filesystem will create an exact copy of ``src_fs`` on
``dst_fs``, by removing any files / directories on the destination
that aren't on the source, and copying files that aren't.
Arguments:
src_fs (FS or str): Source filesystem (URL or instance).
dst_fs (FS or str): Destination filesystem (URL or instance).
walker (~fs.walk.Walker, optional): An optional walker instance.
copy_if_newer (bool): Only copy newer files (the default).
workers (int): Number of worker threads used
(0 for single threaded). Set to a relatively low number
for network filesystems, 4 would be a good start.
]
def function[src, parameter[]]:
return[call[name[manage_fs], parameter[name[src_fs]]]]
def function[dst, parameter[]]:
return[call[name[manage_fs], parameter[name[dst_fs]]]]
with call[name[src], parameter[]] begin[:]
with call[name[_src_fs].lock, parameter[]] begin[:]
variable[_thread_safe] assign[=] call[name[is_thread_safe], parameter[name[_src_fs], name[_dst_fs]]]
with call[name[Copier], parameter[]] begin[:]
call[name[_mirror], parameter[name[_src_fs], name[_dst_fs]]] | keyword[def] identifier[mirror] (
identifier[src_fs] ,
identifier[dst_fs] ,
identifier[walker] = keyword[None] ,
identifier[copy_if_newer] = keyword[True] ,
identifier[workers] = literal[int] ,
):
literal[string]
keyword[def] identifier[src] ():
keyword[return] identifier[manage_fs] ( identifier[src_fs] , identifier[writeable] = keyword[False] )
keyword[def] identifier[dst] ():
keyword[return] identifier[manage_fs] ( identifier[dst_fs] , identifier[create] = keyword[True] )
keyword[with] identifier[src] () keyword[as] identifier[_src_fs] , identifier[dst] () keyword[as] identifier[_dst_fs] :
keyword[with] identifier[_src_fs] . identifier[lock] (), identifier[_dst_fs] . identifier[lock] ():
identifier[_thread_safe] = identifier[is_thread_safe] ( identifier[_src_fs] , identifier[_dst_fs] )
keyword[with] identifier[Copier] ( identifier[num_workers] = identifier[workers] keyword[if] identifier[_thread_safe] keyword[else] literal[int] ) keyword[as] identifier[copier] :
identifier[_mirror] (
identifier[_src_fs] ,
identifier[_dst_fs] ,
identifier[walker] = identifier[walker] ,
identifier[copy_if_newer] = identifier[copy_if_newer] ,
identifier[copy_file] = identifier[copier] . identifier[copy] ,
) | def mirror(src_fs, dst_fs, walker=None, copy_if_newer=True, workers=0): # type: Union[FS, Text]
# type: Union[FS, Text]
# type: Optional[Walker]
# type: bool
# type: int
# type: (...) -> None
"Mirror files / directories from one filesystem to another.\n\n Mirroring a filesystem will create an exact copy of ``src_fs`` on\n ``dst_fs``, by removing any files / directories on the destination\n that aren't on the source, and copying files that aren't.\n\n Arguments:\n src_fs (FS or str): Source filesystem (URL or instance).\n dst_fs (FS or str): Destination filesystem (URL or instance).\n walker (~fs.walk.Walker, optional): An optional walker instance.\n copy_if_newer (bool): Only copy newer files (the default).\n workers (int): Number of worker threads used\n (0 for single threaded). Set to a relatively low number\n for network filesystems, 4 would be a good start.\n "
def src():
return manage_fs(src_fs, writeable=False)
def dst():
return manage_fs(dst_fs, create=True)
with src() as _src_fs, dst() as _dst_fs:
with _src_fs.lock(), _dst_fs.lock():
_thread_safe = is_thread_safe(_src_fs, _dst_fs)
with Copier(num_workers=workers if _thread_safe else 0) as copier:
_mirror(_src_fs, _dst_fs, walker=walker, copy_if_newer=copy_if_newer, copy_file=copier.copy) # depends on [control=['with'], data=['copier']] # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['_src_fs']] |
def _Open(self, path_spec=None, mode='rb'):
"""Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
"""
if not path_spec:
raise ValueError('Missing path specification.')
if path_spec.HasParent():
raise errors.PathSpecError('Unsupported path specification with parent.')
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
# Windows does not support running os.stat on device files so we use
# libsmdev to do an initial check.
try:
is_device = pysmdev.check_device(location)
except IOError as exception:
# Since os.stat() will not recognize Windows device file names and
# will return '[Error 87] The parameter is incorrect' we check here
# if pysmdev exception message contains ' access denied ' and raise
# AccessError instead.
# Note that exception.message no longer works in Python 3.
exception_string = str(exception)
if not isinstance(exception_string, py2to3.UNICODE_TYPE):
exception_string = py2to3.UNICODE_TYPE(
exception_string, errors='replace')
if ' access denied ' in exception_string:
raise errors.AccessError(
'Access denied to file: {0:s} with error: {1!s}'.format(
location, exception_string))
is_device = False
if not is_device:
try:
stat_info = os.stat(location)
except OSError as exception:
raise IOError('Unable to open file with error: {0!s}.'.format(
exception))
# In case the libsmdev check is not able to detect the device also use
# the stat information.
if stat.S_ISCHR(stat_info.st_mode) or stat.S_ISBLK(stat_info.st_mode):
is_device = True
if is_device:
self._file_object = pysmdev.handle()
self._file_object.open(location, mode=mode)
self._size = self._file_object.media_size
else:
self._file_object = open(location, mode=mode)
self._size = stat_info.st_size | def function[_Open, parameter[self, path_spec, mode]]:
constant[Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
]
if <ast.UnaryOp object at 0x7da1b07b9a80> begin[:]
<ast.Raise object at 0x7da1b07bb130>
if call[name[path_spec].HasParent, parameter[]] begin[:]
<ast.Raise object at 0x7da1b07b9450>
variable[location] assign[=] call[name[getattr], parameter[name[path_spec], constant[location], constant[None]]]
if compare[name[location] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b07bb8b0>
<ast.Try object at 0x7da1b07bb970>
if <ast.UnaryOp object at 0x7da1b07bbc10> begin[:]
<ast.Try object at 0x7da1b07bb0d0>
if <ast.BoolOp object at 0x7da1b07ba530> begin[:]
variable[is_device] assign[=] constant[True]
if name[is_device] begin[:]
name[self]._file_object assign[=] call[name[pysmdev].handle, parameter[]]
call[name[self]._file_object.open, parameter[name[location]]]
name[self]._size assign[=] name[self]._file_object.media_size | keyword[def] identifier[_Open] ( identifier[self] , identifier[path_spec] = keyword[None] , identifier[mode] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[path_spec] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[path_spec] . identifier[HasParent] ():
keyword[raise] identifier[errors] . identifier[PathSpecError] ( literal[string] )
identifier[location] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
keyword[if] identifier[location] keyword[is] keyword[None] :
keyword[raise] identifier[errors] . identifier[PathSpecError] ( literal[string] )
keyword[try] :
identifier[is_device] = identifier[pysmdev] . identifier[check_device] ( identifier[location] )
keyword[except] identifier[IOError] keyword[as] identifier[exception] :
identifier[exception_string] = identifier[str] ( identifier[exception] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[exception_string] , identifier[py2to3] . identifier[UNICODE_TYPE] ):
identifier[exception_string] = identifier[py2to3] . identifier[UNICODE_TYPE] (
identifier[exception_string] , identifier[errors] = literal[string] )
keyword[if] literal[string] keyword[in] identifier[exception_string] :
keyword[raise] identifier[errors] . identifier[AccessError] (
literal[string] . identifier[format] (
identifier[location] , identifier[exception_string] ))
identifier[is_device] = keyword[False]
keyword[if] keyword[not] identifier[is_device] :
keyword[try] :
identifier[stat_info] = identifier[os] . identifier[stat] ( identifier[location] )
keyword[except] identifier[OSError] keyword[as] identifier[exception] :
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] (
identifier[exception] ))
keyword[if] identifier[stat] . identifier[S_ISCHR] ( identifier[stat_info] . identifier[st_mode] ) keyword[or] identifier[stat] . identifier[S_ISBLK] ( identifier[stat_info] . identifier[st_mode] ):
identifier[is_device] = keyword[True]
keyword[if] identifier[is_device] :
identifier[self] . identifier[_file_object] = identifier[pysmdev] . identifier[handle] ()
identifier[self] . identifier[_file_object] . identifier[open] ( identifier[location] , identifier[mode] = identifier[mode] )
identifier[self] . identifier[_size] = identifier[self] . identifier[_file_object] . identifier[media_size]
keyword[else] :
identifier[self] . identifier[_file_object] = identifier[open] ( identifier[location] , identifier[mode] = identifier[mode] )
identifier[self] . identifier[_size] = identifier[stat_info] . identifier[st_size] | def _Open(self, path_spec=None, mode='rb'):
"""Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
"""
if not path_spec:
raise ValueError('Missing path specification.') # depends on [control=['if'], data=[]]
if path_spec.HasParent():
raise errors.PathSpecError('Unsupported path specification with parent.') # depends on [control=['if'], data=[]]
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.') # depends on [control=['if'], data=[]]
# Windows does not support running os.stat on device files so we use
# libsmdev to do an initial check.
try:
is_device = pysmdev.check_device(location) # depends on [control=['try'], data=[]]
except IOError as exception:
# Since os.stat() will not recognize Windows device file names and
# will return '[Error 87] The parameter is incorrect' we check here
# if pysmdev exception message contains ' access denied ' and raise
# AccessError instead.
# Note that exception.message no longer works in Python 3.
exception_string = str(exception)
if not isinstance(exception_string, py2to3.UNICODE_TYPE):
exception_string = py2to3.UNICODE_TYPE(exception_string, errors='replace') # depends on [control=['if'], data=[]]
if ' access denied ' in exception_string:
raise errors.AccessError('Access denied to file: {0:s} with error: {1!s}'.format(location, exception_string)) # depends on [control=['if'], data=['exception_string']]
is_device = False # depends on [control=['except'], data=['exception']]
if not is_device:
try:
stat_info = os.stat(location) # depends on [control=['try'], data=[]]
except OSError as exception:
raise IOError('Unable to open file with error: {0!s}.'.format(exception)) # depends on [control=['except'], data=['exception']]
# In case the libsmdev check is not able to detect the device also use
# the stat information.
if stat.S_ISCHR(stat_info.st_mode) or stat.S_ISBLK(stat_info.st_mode):
is_device = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if is_device:
self._file_object = pysmdev.handle()
self._file_object.open(location, mode=mode)
self._size = self._file_object.media_size # depends on [control=['if'], data=[]]
else:
self._file_object = open(location, mode=mode)
self._size = stat_info.st_size |
def patch_namespaced_stateful_set(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_stateful_set # noqa: E501
partially update the specified StatefulSet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_stateful_set(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data | def function[patch_namespaced_stateful_set, parameter[self, name, namespace, body]]:
constant[patch_namespaced_stateful_set # noqa: E501
partially update the specified StatefulSet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_stateful_set(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1StatefulSet
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].patch_namespaced_stateful_set_with_http_info, parameter[name[name], name[namespace], name[body]]]] | keyword[def] identifier[patch_namespaced_stateful_set] ( identifier[self] , identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[patch_namespaced_stateful_set_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[patch_namespaced_stateful_set_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def patch_namespaced_stateful_set(self, name, namespace, body, **kwargs): # noqa: E501
"patch_namespaced_stateful_set # noqa: E501\n\n partially update the specified StatefulSet # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_stateful_set(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the StatefulSet (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param UNKNOWN_BASE_TYPE body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1StatefulSet\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data |
def list_migration_issues_accounts(self, account_id, content_migration_id):
"""
List migration issues.
Returns paginated migration issues
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - content_migration_id
"""ID"""
path["content_migration_id"] = content_migration_id
self.logger.debug("GET /api/v1/accounts/{account_id}/content_migrations/{content_migration_id}/migration_issues with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/content_migrations/{content_migration_id}/migration_issues".format(**path), data=data, params=params, all_pages=True) | def function[list_migration_issues_accounts, parameter[self, account_id, content_migration_id]]:
constant[
List migration issues.
Returns paginated migration issues
]
variable[path] assign[=] dictionary[[], []]
variable[data] assign[=] dictionary[[], []]
variable[params] assign[=] dictionary[[], []]
constant[ID]
call[name[path]][constant[account_id]] assign[=] name[account_id]
constant[ID]
call[name[path]][constant[content_migration_id]] assign[=] name[content_migration_id]
call[name[self].logger.debug, parameter[call[constant[GET /api/v1/accounts/{account_id}/content_migrations/{content_migration_id}/migration_issues with query params: {params} and form data: {data}].format, parameter[]]]]
return[call[name[self].generic_request, parameter[constant[GET], call[constant[/api/v1/accounts/{account_id}/content_migrations/{content_migration_id}/migration_issues].format, parameter[]]]]] | keyword[def] identifier[list_migration_issues_accounts] ( identifier[self] , identifier[account_id] , identifier[content_migration_id] ):
literal[string]
identifier[path] ={}
identifier[data] ={}
identifier[params] ={}
literal[string]
identifier[path] [ literal[string] ]= identifier[account_id]
literal[string]
identifier[path] [ literal[string] ]= identifier[content_migration_id]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[params] = identifier[params] , identifier[data] = identifier[data] ,** identifier[path] ))
keyword[return] identifier[self] . identifier[generic_request] ( literal[string] , literal[string] . identifier[format] (** identifier[path] ), identifier[data] = identifier[data] , identifier[params] = identifier[params] , identifier[all_pages] = keyword[True] ) | def list_migration_issues_accounts(self, account_id, content_migration_id):
"""
List migration issues.
Returns paginated migration issues
"""
path = {}
data = {}
params = {} # REQUIRED - PATH - account_id
'ID'
path['account_id'] = account_id # REQUIRED - PATH - content_migration_id
'ID'
path['content_migration_id'] = content_migration_id
self.logger.debug('GET /api/v1/accounts/{account_id}/content_migrations/{content_migration_id}/migration_issues with query params: {params} and form data: {data}'.format(params=params, data=data, **path))
return self.generic_request('GET', '/api/v1/accounts/{account_id}/content_migrations/{content_migration_id}/migration_issues'.format(**path), data=data, params=params, all_pages=True) |
def normalize(X, norm):
'''Applies the given norm to the input data set
Parameters:
X (numpy.ndarray): A 3D numpy ndarray in which the rows represent examples
while the columns, features of the data set you want to normalize. Every
depth corresponds to data for a particular class
norm (tuple): A tuple containing two 1D numpy ndarrays corresponding to the
normalization parameters extracted with :py:func:`estimated_norm` above.
Returns:
numpy.ndarray: A 3D numpy ndarray with the same dimensions as the input
array ``X``, but with its values normalized according to the norm input.
'''
return numpy.array([(k - norm[0]) / norm[1] for k in X]) | def function[normalize, parameter[X, norm]]:
constant[Applies the given norm to the input data set
Parameters:
X (numpy.ndarray): A 3D numpy ndarray in which the rows represent examples
while the columns, features of the data set you want to normalize. Every
depth corresponds to data for a particular class
norm (tuple): A tuple containing two 1D numpy ndarrays corresponding to the
normalization parameters extracted with :py:func:`estimated_norm` above.
Returns:
numpy.ndarray: A 3D numpy ndarray with the same dimensions as the input
array ``X``, but with its values normalized according to the norm input.
]
return[call[name[numpy].array, parameter[<ast.ListComp object at 0x7da20c7ca3e0>]]] | keyword[def] identifier[normalize] ( identifier[X] , identifier[norm] ):
literal[string]
keyword[return] identifier[numpy] . identifier[array] ([( identifier[k] - identifier[norm] [ literal[int] ])/ identifier[norm] [ literal[int] ] keyword[for] identifier[k] keyword[in] identifier[X] ]) | def normalize(X, norm):
"""Applies the given norm to the input data set
Parameters:
X (numpy.ndarray): A 3D numpy ndarray in which the rows represent examples
while the columns, features of the data set you want to normalize. Every
depth corresponds to data for a particular class
norm (tuple): A tuple containing two 1D numpy ndarrays corresponding to the
normalization parameters extracted with :py:func:`estimated_norm` above.
Returns:
numpy.ndarray: A 3D numpy ndarray with the same dimensions as the input
array ``X``, but with its values normalized according to the norm input.
"""
return numpy.array([(k - norm[0]) / norm[1] for k in X]) |
def msg_curse(self, args=None, max_width=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist, not empty (issue #871) and plugin not disabled
if not self.stats or (self.stats == {}) or self.is_disable():
return ret
# Build the string message
# Header
msg = '{:8}'.format('LOAD')
ret.append(self.curse_add_line(msg, "TITLE"))
# Core number
if 'cpucore' in self.stats and self.stats['cpucore'] > 0:
msg = '{}-core'.format(int(self.stats['cpucore']))
ret.append(self.curse_add_line(msg))
# New line
ret.append(self.curse_new_line())
# 1min load
msg = '{:8}'.format('1 min:')
ret.append(self.curse_add_line(msg))
msg = '{:>6.2f}'.format(self.stats['min1'])
ret.append(self.curse_add_line(msg))
# New line
ret.append(self.curse_new_line())
# 5min load
msg = '{:8}'.format('5 min:')
ret.append(self.curse_add_line(msg))
msg = '{:>6.2f}'.format(self.stats['min5'])
ret.append(self.curse_add_line(
msg, self.get_views(key='min5', option='decoration')))
# New line
ret.append(self.curse_new_line())
# 15min load
msg = '{:8}'.format('15 min:')
ret.append(self.curse_add_line(msg))
msg = '{:>6.2f}'.format(self.stats['min15'])
ret.append(self.curse_add_line(
msg, self.get_views(key='min15', option='decoration')))
return ret | def function[msg_curse, parameter[self, args, max_width]]:
constant[Return the dict to display in the curse interface.]
variable[ret] assign[=] list[[]]
if <ast.BoolOp object at 0x7da204564a30> begin[:]
return[name[ret]]
variable[msg] assign[=] call[constant[{:8}].format, parameter[constant[LOAD]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg], constant[TITLE]]]]]
if <ast.BoolOp object at 0x7da20e955b40> begin[:]
variable[msg] assign[=] call[constant[{}-core].format, parameter[call[name[int], parameter[call[name[self].stats][constant[cpucore]]]]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
call[name[ret].append, parameter[call[name[self].curse_new_line, parameter[]]]]
variable[msg] assign[=] call[constant[{:8}].format, parameter[constant[1 min:]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
variable[msg] assign[=] call[constant[{:>6.2f}].format, parameter[call[name[self].stats][constant[min1]]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
call[name[ret].append, parameter[call[name[self].curse_new_line, parameter[]]]]
variable[msg] assign[=] call[constant[{:8}].format, parameter[constant[5 min:]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
variable[msg] assign[=] call[constant[{:>6.2f}].format, parameter[call[name[self].stats][constant[min5]]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg], call[name[self].get_views, parameter[]]]]]]
call[name[ret].append, parameter[call[name[self].curse_new_line, parameter[]]]]
variable[msg] assign[=] call[constant[{:8}].format, parameter[constant[15 min:]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
variable[msg] assign[=] call[constant[{:>6.2f}].format, parameter[call[name[self].stats][constant[min15]]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg], call[name[self].get_views, parameter[]]]]]]
return[name[ret]] | keyword[def] identifier[msg_curse] ( identifier[self] , identifier[args] = keyword[None] , identifier[max_width] = keyword[None] ):
literal[string]
identifier[ret] =[]
keyword[if] keyword[not] identifier[self] . identifier[stats] keyword[or] ( identifier[self] . identifier[stats] =={}) keyword[or] identifier[self] . identifier[is_disable] ():
keyword[return] identifier[ret]
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] , literal[string] ))
keyword[if] literal[string] keyword[in] identifier[self] . identifier[stats] keyword[and] identifier[self] . identifier[stats] [ literal[string] ]> literal[int] :
identifier[msg] = literal[string] . identifier[format] ( identifier[int] ( identifier[self] . identifier[stats] [ literal[string] ]))
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_new_line] ())
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[msg] = literal[string] . identifier[format] ( identifier[self] . identifier[stats] [ literal[string] ])
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_new_line] ())
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[msg] = literal[string] . identifier[format] ( identifier[self] . identifier[stats] [ literal[string] ])
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] (
identifier[msg] , identifier[self] . identifier[get_views] ( identifier[key] = literal[string] , identifier[option] = literal[string] )))
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_new_line] ())
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[msg] = literal[string] . identifier[format] ( identifier[self] . identifier[stats] [ literal[string] ])
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] (
identifier[msg] , identifier[self] . identifier[get_views] ( identifier[key] = literal[string] , identifier[option] = literal[string] )))
keyword[return] identifier[ret] | def msg_curse(self, args=None, max_width=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist, not empty (issue #871) and plugin not disabled
if not self.stats or self.stats == {} or self.is_disable():
return ret # depends on [control=['if'], data=[]]
# Build the string message
# Header
msg = '{:8}'.format('LOAD')
ret.append(self.curse_add_line(msg, 'TITLE'))
# Core number
if 'cpucore' in self.stats and self.stats['cpucore'] > 0:
msg = '{}-core'.format(int(self.stats['cpucore']))
ret.append(self.curse_add_line(msg)) # depends on [control=['if'], data=[]]
# New line
ret.append(self.curse_new_line())
# 1min load
msg = '{:8}'.format('1 min:')
ret.append(self.curse_add_line(msg))
msg = '{:>6.2f}'.format(self.stats['min1'])
ret.append(self.curse_add_line(msg))
# New line
ret.append(self.curse_new_line())
# 5min load
msg = '{:8}'.format('5 min:')
ret.append(self.curse_add_line(msg))
msg = '{:>6.2f}'.format(self.stats['min5'])
ret.append(self.curse_add_line(msg, self.get_views(key='min5', option='decoration')))
# New line
ret.append(self.curse_new_line())
# 15min load
msg = '{:8}'.format('15 min:')
ret.append(self.curse_add_line(msg))
msg = '{:>6.2f}'.format(self.stats['min15'])
ret.append(self.curse_add_line(msg, self.get_views(key='min15', option='decoration')))
return ret |
def get_optional_attribute(self, element, attribute):
"""Attempt to retrieve an optional attribute from the xml and return None on failure."""
try:
return self.get_attribute(element, attribute)
except self.XmlError:
return None | def function[get_optional_attribute, parameter[self, element, attribute]]:
constant[Attempt to retrieve an optional attribute from the xml and return None on failure.]
<ast.Try object at 0x7da1b1e8c820> | keyword[def] identifier[get_optional_attribute] ( identifier[self] , identifier[element] , identifier[attribute] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[get_attribute] ( identifier[element] , identifier[attribute] )
keyword[except] identifier[self] . identifier[XmlError] :
keyword[return] keyword[None] | def get_optional_attribute(self, element, attribute):
"""Attempt to retrieve an optional attribute from the xml and return None on failure."""
try:
return self.get_attribute(element, attribute) # depends on [control=['try'], data=[]]
except self.XmlError:
return None # depends on [control=['except'], data=[]] |
def _inet6_ntop(addr):
"""Convert an IPv6 address from binary form into text representation,
used when socket.inet_pton is not available.
"""
# IPv6 addresses have 128bits (16 bytes)
if len(addr) != 16:
raise ValueError("invalid length of packed IP address string")
# Decode to hex representation
address = ":".join(plain_str(bytes_hex(addr[idx:idx + 2])).lstrip('0') or '0' # noqa: E501
for idx in range(0, 16, 2))
try:
# Get the longest set of zero blocks. We need to take a look
# at group 1 regarding the length, as 0:0:1:0:0:2:3:4 would
# have two matches: 0:0: and :0:0: where the latter is longer,
# though the first one should be taken. Group 1 is in both
# cases 0:0.
match = max(_IP6_ZEROS.finditer(address),
key=lambda m: m.end(1) - m.start(1))
return '{}::{}'.format(address[:match.start()], address[match.end():])
except ValueError:
return address | def function[_inet6_ntop, parameter[addr]]:
constant[Convert an IPv6 address from binary form into text representation,
used when socket.inet_pton is not available.
]
if compare[call[name[len], parameter[name[addr]]] not_equal[!=] constant[16]] begin[:]
<ast.Raise object at 0x7da1b21af130>
variable[address] assign[=] call[constant[:].join, parameter[<ast.GeneratorExp object at 0x7da1b21af760>]]
<ast.Try object at 0x7da1b21ae3e0> | keyword[def] identifier[_inet6_ntop] ( identifier[addr] ):
literal[string]
keyword[if] identifier[len] ( identifier[addr] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[address] = literal[string] . identifier[join] ( identifier[plain_str] ( identifier[bytes_hex] ( identifier[addr] [ identifier[idx] : identifier[idx] + literal[int] ])). identifier[lstrip] ( literal[string] ) keyword[or] literal[string]
keyword[for] identifier[idx] keyword[in] identifier[range] ( literal[int] , literal[int] , literal[int] ))
keyword[try] :
identifier[match] = identifier[max] ( identifier[_IP6_ZEROS] . identifier[finditer] ( identifier[address] ),
identifier[key] = keyword[lambda] identifier[m] : identifier[m] . identifier[end] ( literal[int] )- identifier[m] . identifier[start] ( literal[int] ))
keyword[return] literal[string] . identifier[format] ( identifier[address] [: identifier[match] . identifier[start] ()], identifier[address] [ identifier[match] . identifier[end] ():])
keyword[except] identifier[ValueError] :
keyword[return] identifier[address] | def _inet6_ntop(addr):
"""Convert an IPv6 address from binary form into text representation,
used when socket.inet_pton is not available.
"""
# IPv6 addresses have 128bits (16 bytes)
if len(addr) != 16:
raise ValueError('invalid length of packed IP address string') # depends on [control=['if'], data=[]]
# Decode to hex representation
# noqa: E501
address = ':'.join((plain_str(bytes_hex(addr[idx:idx + 2])).lstrip('0') or '0' for idx in range(0, 16, 2)))
try:
# Get the longest set of zero blocks. We need to take a look
# at group 1 regarding the length, as 0:0:1:0:0:2:3:4 would
# have two matches: 0:0: and :0:0: where the latter is longer,
# though the first one should be taken. Group 1 is in both
# cases 0:0.
match = max(_IP6_ZEROS.finditer(address), key=lambda m: m.end(1) - m.start(1))
return '{}::{}'.format(address[:match.start()], address[match.end():]) # depends on [control=['try'], data=[]]
except ValueError:
return address # depends on [control=['except'], data=[]] |
def _get_changed_docs(self,
ancestral_commit_sha,
doc_id_from_repo_path,
doc_ids_to_check=None):
"""Returns the set of documents that have changed on the master since
commit `ancestral_commit_sha` or `False` (on an error)
'doc_id_from_repo_path' is a required function
if `doc_ids_to_check` is passed in, it should be an iterable list of
IDs. Only IDs in this list will be returned.
"""
try:
x = git(self.gitdir,
self.gitwd,
"diff-tree",
"--name-only",
"-r",
ancestral_commit_sha,
"master")
except:
_LOG.exception('diff-tree failed')
return False
touched = set()
for f in x.split('\n'):
found_id = doc_id_from_repo_path(f)
if found_id:
touched.add(found_id)
if doc_ids_to_check:
tc = set(doc_ids_to_check)
return tc.intersection(touched)
return touched | def function[_get_changed_docs, parameter[self, ancestral_commit_sha, doc_id_from_repo_path, doc_ids_to_check]]:
constant[Returns the set of documents that have changed on the master since
commit `ancestral_commit_sha` or `False` (on an error)
'doc_id_from_repo_path' is a required function
if `doc_ids_to_check` is passed in, it should be an iterable list of
IDs. Only IDs in this list will be returned.
]
<ast.Try object at 0x7da2043453f0>
variable[touched] assign[=] call[name[set], parameter[]]
for taget[name[f]] in starred[call[name[x].split, parameter[constant[
]]]] begin[:]
variable[found_id] assign[=] call[name[doc_id_from_repo_path], parameter[name[f]]]
if name[found_id] begin[:]
call[name[touched].add, parameter[name[found_id]]]
if name[doc_ids_to_check] begin[:]
variable[tc] assign[=] call[name[set], parameter[name[doc_ids_to_check]]]
return[call[name[tc].intersection, parameter[name[touched]]]]
return[name[touched]] | keyword[def] identifier[_get_changed_docs] ( identifier[self] ,
identifier[ancestral_commit_sha] ,
identifier[doc_id_from_repo_path] ,
identifier[doc_ids_to_check] = keyword[None] ):
literal[string]
keyword[try] :
identifier[x] = identifier[git] ( identifier[self] . identifier[gitdir] ,
identifier[self] . identifier[gitwd] ,
literal[string] ,
literal[string] ,
literal[string] ,
identifier[ancestral_commit_sha] ,
literal[string] )
keyword[except] :
identifier[_LOG] . identifier[exception] ( literal[string] )
keyword[return] keyword[False]
identifier[touched] = identifier[set] ()
keyword[for] identifier[f] keyword[in] identifier[x] . identifier[split] ( literal[string] ):
identifier[found_id] = identifier[doc_id_from_repo_path] ( identifier[f] )
keyword[if] identifier[found_id] :
identifier[touched] . identifier[add] ( identifier[found_id] )
keyword[if] identifier[doc_ids_to_check] :
identifier[tc] = identifier[set] ( identifier[doc_ids_to_check] )
keyword[return] identifier[tc] . identifier[intersection] ( identifier[touched] )
keyword[return] identifier[touched] | def _get_changed_docs(self, ancestral_commit_sha, doc_id_from_repo_path, doc_ids_to_check=None):
"""Returns the set of documents that have changed on the master since
commit `ancestral_commit_sha` or `False` (on an error)
'doc_id_from_repo_path' is a required function
if `doc_ids_to_check` is passed in, it should be an iterable list of
IDs. Only IDs in this list will be returned.
"""
try:
x = git(self.gitdir, self.gitwd, 'diff-tree', '--name-only', '-r', ancestral_commit_sha, 'master') # depends on [control=['try'], data=[]]
except:
_LOG.exception('diff-tree failed')
return False # depends on [control=['except'], data=[]]
touched = set()
for f in x.split('\n'):
found_id = doc_id_from_repo_path(f)
if found_id:
touched.add(found_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
if doc_ids_to_check:
tc = set(doc_ids_to_check)
return tc.intersection(touched) # depends on [control=['if'], data=[]]
return touched |
def _find_metric_value(session_or_group, metric_name):
"""Returns the metric_value for a given metric in a session or session group.
Args:
session_or_group: A Session protobuffer or SessionGroup protobuffer.
metric_name: A MetricName protobuffer. The metric to search for.
Returns:
A MetricValue protobuffer representing the value of the given metric or
None if no such metric was found in session_or_group.
"""
# Note: We can speed this up by converting the metric_values field
# to a dictionary on initialization, to avoid a linear search here. We'll
# need to wrap the SessionGroup and Session protos in a python object for
# that.
for metric_value in session_or_group.metric_values:
if (metric_value.name.tag == metric_name.tag and
metric_value.name.group == metric_name.group):
return metric_value | def function[_find_metric_value, parameter[session_or_group, metric_name]]:
constant[Returns the metric_value for a given metric in a session or session group.
Args:
session_or_group: A Session protobuffer or SessionGroup protobuffer.
metric_name: A MetricName protobuffer. The metric to search for.
Returns:
A MetricValue protobuffer representing the value of the given metric or
None if no such metric was found in session_or_group.
]
for taget[name[metric_value]] in starred[name[session_or_group].metric_values] begin[:]
if <ast.BoolOp object at 0x7da1b1f9a260> begin[:]
return[name[metric_value]] | keyword[def] identifier[_find_metric_value] ( identifier[session_or_group] , identifier[metric_name] ):
literal[string]
keyword[for] identifier[metric_value] keyword[in] identifier[session_or_group] . identifier[metric_values] :
keyword[if] ( identifier[metric_value] . identifier[name] . identifier[tag] == identifier[metric_name] . identifier[tag] keyword[and]
identifier[metric_value] . identifier[name] . identifier[group] == identifier[metric_name] . identifier[group] ):
keyword[return] identifier[metric_value] | def _find_metric_value(session_or_group, metric_name):
"""Returns the metric_value for a given metric in a session or session group.
Args:
session_or_group: A Session protobuffer or SessionGroup protobuffer.
metric_name: A MetricName protobuffer. The metric to search for.
Returns:
A MetricValue protobuffer representing the value of the given metric or
None if no such metric was found in session_or_group.
"""
# Note: We can speed this up by converting the metric_values field
# to a dictionary on initialization, to avoid a linear search here. We'll
# need to wrap the SessionGroup and Session protos in a python object for
# that.
for metric_value in session_or_group.metric_values:
if metric_value.name.tag == metric_name.tag and metric_value.name.group == metric_name.group:
return metric_value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['metric_value']] |
def limit(self, limit_value, key_func=None, per_method=False,
methods=None, error_message=None, exempt_when=None):
"""
decorator to be used for rate limiting individual routes.
:param limit_value: rate limit string or a callable that returns a string.
:ref:`ratelimit-string` for more details.
:param function key_func: function/lambda to extract the unique identifier for
the rate limit. defaults to remote address of the request.
:param bool per_method: whether the limit is sub categorized into the http
method of the request.
:param list methods: if specified, only the methods in this list will be rate
limited (default: None).
:param error_message: string (or callable that returns one) to override the
error message used in the response.
:return:
"""
return self.__limit_decorator(limit_value, key_func, per_method=per_method,
methods=methods, error_message=error_message,
exempt_when=exempt_when) | def function[limit, parameter[self, limit_value, key_func, per_method, methods, error_message, exempt_when]]:
constant[
decorator to be used for rate limiting individual routes.
:param limit_value: rate limit string or a callable that returns a string.
:ref:`ratelimit-string` for more details.
:param function key_func: function/lambda to extract the unique identifier for
the rate limit. defaults to remote address of the request.
:param bool per_method: whether the limit is sub categorized into the http
method of the request.
:param list methods: if specified, only the methods in this list will be rate
limited (default: None).
:param error_message: string (or callable that returns one) to override the
error message used in the response.
:return:
]
return[call[name[self].__limit_decorator, parameter[name[limit_value], name[key_func]]]] | keyword[def] identifier[limit] ( identifier[self] , identifier[limit_value] , identifier[key_func] = keyword[None] , identifier[per_method] = keyword[False] ,
identifier[methods] = keyword[None] , identifier[error_message] = keyword[None] , identifier[exempt_when] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[__limit_decorator] ( identifier[limit_value] , identifier[key_func] , identifier[per_method] = identifier[per_method] ,
identifier[methods] = identifier[methods] , identifier[error_message] = identifier[error_message] ,
identifier[exempt_when] = identifier[exempt_when] ) | def limit(self, limit_value, key_func=None, per_method=False, methods=None, error_message=None, exempt_when=None):
"""
decorator to be used for rate limiting individual routes.
:param limit_value: rate limit string or a callable that returns a string.
:ref:`ratelimit-string` for more details.
:param function key_func: function/lambda to extract the unique identifier for
the rate limit. defaults to remote address of the request.
:param bool per_method: whether the limit is sub categorized into the http
method of the request.
:param list methods: if specified, only the methods in this list will be rate
limited (default: None).
:param error_message: string (or callable that returns one) to override the
error message used in the response.
:return:
"""
return self.__limit_decorator(limit_value, key_func, per_method=per_method, methods=methods, error_message=error_message, exempt_when=exempt_when) |
def connect(self):
"""
Connect the instance to redis by checking the existence of its primary
key. Do nothing if already connected.
"""
if self.connected:
return
pk = self._pk
if self.exists(pk=pk):
self._connected = True
else:
self._pk = None
self._connected = False
raise DoesNotExist("No %s found with pk %s" % (self.__class__.__name__, pk)) | def function[connect, parameter[self]]:
constant[
Connect the instance to redis by checking the existence of its primary
key. Do nothing if already connected.
]
if name[self].connected begin[:]
return[None]
variable[pk] assign[=] name[self]._pk
if call[name[self].exists, parameter[]] begin[:]
name[self]._connected assign[=] constant[True] | keyword[def] identifier[connect] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[connected] :
keyword[return]
identifier[pk] = identifier[self] . identifier[_pk]
keyword[if] identifier[self] . identifier[exists] ( identifier[pk] = identifier[pk] ):
identifier[self] . identifier[_connected] = keyword[True]
keyword[else] :
identifier[self] . identifier[_pk] = keyword[None]
identifier[self] . identifier[_connected] = keyword[False]
keyword[raise] identifier[DoesNotExist] ( literal[string] %( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[pk] )) | def connect(self):
"""
Connect the instance to redis by checking the existence of its primary
key. Do nothing if already connected.
"""
if self.connected:
return # depends on [control=['if'], data=[]]
pk = self._pk
if self.exists(pk=pk):
self._connected = True # depends on [control=['if'], data=[]]
else:
self._pk = None
self._connected = False
raise DoesNotExist('No %s found with pk %s' % (self.__class__.__name__, pk)) |
def roll_mean(input, window):
'''Apply a rolling mean function to an array.
This is a simple rolling aggregation.'''
nobs, i, j, sum_x = 0,0,0,0.
N = len(input)
if window > N:
raise ValueError('Out of bound')
output = np.ndarray(N-window+1,dtype=input.dtype)
for val in input[:window]:
if val == val:
nobs += 1
sum_x += val
output[j] = NaN if not nobs else sum_x / nobs
for val in input[window:]:
prev = input[j]
if prev == prev:
sum_x -= prev
nobs -= 1
if val == val:
nobs += 1
sum_x += val
j += 1
output[j] = NaN if not nobs else sum_x / nobs
return output | def function[roll_mean, parameter[input, window]]:
constant[Apply a rolling mean function to an array.
This is a simple rolling aggregation.]
<ast.Tuple object at 0x7da18ede5db0> assign[=] tuple[[<ast.Constant object at 0x7da18ede6b90>, <ast.Constant object at 0x7da18ede73a0>, <ast.Constant object at 0x7da18ede43d0>, <ast.Constant object at 0x7da18ede6ad0>]]
variable[N] assign[=] call[name[len], parameter[name[input]]]
if compare[name[window] greater[>] name[N]] begin[:]
<ast.Raise object at 0x7da18ede6d40>
variable[output] assign[=] call[name[np].ndarray, parameter[binary_operation[binary_operation[name[N] - name[window]] + constant[1]]]]
for taget[name[val]] in starred[call[name[input]][<ast.Slice object at 0x7da18ede4550>]] begin[:]
if compare[name[val] equal[==] name[val]] begin[:]
<ast.AugAssign object at 0x7da18ede71f0>
<ast.AugAssign object at 0x7da18ede5c90>
call[name[output]][name[j]] assign[=] <ast.IfExp object at 0x7da18ede51e0>
for taget[name[val]] in starred[call[name[input]][<ast.Slice object at 0x7da18ede63b0>]] begin[:]
variable[prev] assign[=] call[name[input]][name[j]]
if compare[name[prev] equal[==] name[prev]] begin[:]
<ast.AugAssign object at 0x7da18c4cc340>
<ast.AugAssign object at 0x7da18c4cf430>
if compare[name[val] equal[==] name[val]] begin[:]
<ast.AugAssign object at 0x7da18c4ce4a0>
<ast.AugAssign object at 0x7da18c4cc100>
<ast.AugAssign object at 0x7da18c4ce140>
call[name[output]][name[j]] assign[=] <ast.IfExp object at 0x7da18c4cfe80>
return[name[output]] | keyword[def] identifier[roll_mean] ( identifier[input] , identifier[window] ):
literal[string]
identifier[nobs] , identifier[i] , identifier[j] , identifier[sum_x] = literal[int] , literal[int] , literal[int] , literal[int]
identifier[N] = identifier[len] ( identifier[input] )
keyword[if] identifier[window] > identifier[N] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[output] = identifier[np] . identifier[ndarray] ( identifier[N] - identifier[window] + literal[int] , identifier[dtype] = identifier[input] . identifier[dtype] )
keyword[for] identifier[val] keyword[in] identifier[input] [: identifier[window] ]:
keyword[if] identifier[val] == identifier[val] :
identifier[nobs] += literal[int]
identifier[sum_x] += identifier[val]
identifier[output] [ identifier[j] ]= identifier[NaN] keyword[if] keyword[not] identifier[nobs] keyword[else] identifier[sum_x] / identifier[nobs]
keyword[for] identifier[val] keyword[in] identifier[input] [ identifier[window] :]:
identifier[prev] = identifier[input] [ identifier[j] ]
keyword[if] identifier[prev] == identifier[prev] :
identifier[sum_x] -= identifier[prev]
identifier[nobs] -= literal[int]
keyword[if] identifier[val] == identifier[val] :
identifier[nobs] += literal[int]
identifier[sum_x] += identifier[val]
identifier[j] += literal[int]
identifier[output] [ identifier[j] ]= identifier[NaN] keyword[if] keyword[not] identifier[nobs] keyword[else] identifier[sum_x] / identifier[nobs]
keyword[return] identifier[output] | def roll_mean(input, window):
"""Apply a rolling mean function to an array.
This is a simple rolling aggregation."""
(nobs, i, j, sum_x) = (0, 0, 0, 0.0)
N = len(input)
if window > N:
raise ValueError('Out of bound') # depends on [control=['if'], data=[]]
output = np.ndarray(N - window + 1, dtype=input.dtype)
for val in input[:window]:
if val == val:
nobs += 1
sum_x += val # depends on [control=['if'], data=['val', 'val']] # depends on [control=['for'], data=['val']]
output[j] = NaN if not nobs else sum_x / nobs
for val in input[window:]:
prev = input[j]
if prev == prev:
sum_x -= prev
nobs -= 1 # depends on [control=['if'], data=['prev', 'prev']]
if val == val:
nobs += 1
sum_x += val # depends on [control=['if'], data=['val', 'val']]
j += 1
output[j] = NaN if not nobs else sum_x / nobs # depends on [control=['for'], data=['val']]
return output |
def update(env):
"""
Update an existing cipr project to the latest intalled version.
"""
files = [path.join(env.project_directory, 'cipr.lua')]
for filename in files:
if path.exists(filename):
os.remove(filename)
app.command.run(['init', env.project_directory]) | def function[update, parameter[env]]:
constant[
Update an existing cipr project to the latest intalled version.
]
variable[files] assign[=] list[[<ast.Call object at 0x7da204344970>]]
for taget[name[filename]] in starred[name[files]] begin[:]
if call[name[path].exists, parameter[name[filename]]] begin[:]
call[name[os].remove, parameter[name[filename]]]
call[name[app].command.run, parameter[list[[<ast.Constant object at 0x7da2054a7b80>, <ast.Attribute object at 0x7da2054a5420>]]]] | keyword[def] identifier[update] ( identifier[env] ):
literal[string]
identifier[files] =[ identifier[path] . identifier[join] ( identifier[env] . identifier[project_directory] , literal[string] )]
keyword[for] identifier[filename] keyword[in] identifier[files] :
keyword[if] identifier[path] . identifier[exists] ( identifier[filename] ):
identifier[os] . identifier[remove] ( identifier[filename] )
identifier[app] . identifier[command] . identifier[run] ([ literal[string] , identifier[env] . identifier[project_directory] ]) | def update(env):
"""
Update an existing cipr project to the latest intalled version.
"""
files = [path.join(env.project_directory, 'cipr.lua')]
for filename in files:
if path.exists(filename):
os.remove(filename) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']]
app.command.run(['init', env.project_directory]) |
def take_function_register(self, rtype = SharedData.TYPES.NO_TYPE):
"""Reserves register for function return value and sets its type"""
reg = SharedData.FUNCTION_REGISTER
if reg not in self.free_registers:
self.error("function register already taken")
self.free_registers.remove(reg)
self.used_registers.append(reg)
self.symtab.set_type(reg, rtype)
return reg | def function[take_function_register, parameter[self, rtype]]:
constant[Reserves register for function return value and sets its type]
variable[reg] assign[=] name[SharedData].FUNCTION_REGISTER
if compare[name[reg] <ast.NotIn object at 0x7da2590d7190> name[self].free_registers] begin[:]
call[name[self].error, parameter[constant[function register already taken]]]
call[name[self].free_registers.remove, parameter[name[reg]]]
call[name[self].used_registers.append, parameter[name[reg]]]
call[name[self].symtab.set_type, parameter[name[reg], name[rtype]]]
return[name[reg]] | keyword[def] identifier[take_function_register] ( identifier[self] , identifier[rtype] = identifier[SharedData] . identifier[TYPES] . identifier[NO_TYPE] ):
literal[string]
identifier[reg] = identifier[SharedData] . identifier[FUNCTION_REGISTER]
keyword[if] identifier[reg] keyword[not] keyword[in] identifier[self] . identifier[free_registers] :
identifier[self] . identifier[error] ( literal[string] )
identifier[self] . identifier[free_registers] . identifier[remove] ( identifier[reg] )
identifier[self] . identifier[used_registers] . identifier[append] ( identifier[reg] )
identifier[self] . identifier[symtab] . identifier[set_type] ( identifier[reg] , identifier[rtype] )
keyword[return] identifier[reg] | def take_function_register(self, rtype=SharedData.TYPES.NO_TYPE):
"""Reserves register for function return value and sets its type"""
reg = SharedData.FUNCTION_REGISTER
if reg not in self.free_registers:
self.error('function register already taken') # depends on [control=['if'], data=[]]
self.free_registers.remove(reg)
self.used_registers.append(reg)
self.symtab.set_type(reg, rtype)
return reg |
def prior_sample(bn):
"""Randomly sample from bn's full joint distribution. The result
is a {variable: value} dict. [Fig. 14.13]"""
event = {}
for node in bn.nodes:
event[node.variable] = node.sample(event)
return event | def function[prior_sample, parameter[bn]]:
constant[Randomly sample from bn's full joint distribution. The result
is a {variable: value} dict. [Fig. 14.13]]
variable[event] assign[=] dictionary[[], []]
for taget[name[node]] in starred[name[bn].nodes] begin[:]
call[name[event]][name[node].variable] assign[=] call[name[node].sample, parameter[name[event]]]
return[name[event]] | keyword[def] identifier[prior_sample] ( identifier[bn] ):
literal[string]
identifier[event] ={}
keyword[for] identifier[node] keyword[in] identifier[bn] . identifier[nodes] :
identifier[event] [ identifier[node] . identifier[variable] ]= identifier[node] . identifier[sample] ( identifier[event] )
keyword[return] identifier[event] | def prior_sample(bn):
"""Randomly sample from bn's full joint distribution. The result
is a {variable: value} dict. [Fig. 14.13]"""
event = {}
for node in bn.nodes:
event[node.variable] = node.sample(event) # depends on [control=['for'], data=['node']]
return event |
def try_checkpoint_metadata(self, trial):
"""Checkpoints metadata.
Args:
trial (Trial): Trial to checkpoint.
"""
if trial._checkpoint.storage == Checkpoint.MEMORY:
logger.debug("Not saving data for trial w/ memory checkpoint.")
return
try:
logger.debug("Saving trial metadata.")
self._cached_trial_state[trial.trial_id] = trial.__getstate__()
except Exception:
logger.exception("Error checkpointing trial metadata.") | def function[try_checkpoint_metadata, parameter[self, trial]]:
constant[Checkpoints metadata.
Args:
trial (Trial): Trial to checkpoint.
]
if compare[name[trial]._checkpoint.storage equal[==] name[Checkpoint].MEMORY] begin[:]
call[name[logger].debug, parameter[constant[Not saving data for trial w/ memory checkpoint.]]]
return[None]
<ast.Try object at 0x7da18f58c190> | keyword[def] identifier[try_checkpoint_metadata] ( identifier[self] , identifier[trial] ):
literal[string]
keyword[if] identifier[trial] . identifier[_checkpoint] . identifier[storage] == identifier[Checkpoint] . identifier[MEMORY] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return]
keyword[try] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_cached_trial_state] [ identifier[trial] . identifier[trial_id] ]= identifier[trial] . identifier[__getstate__] ()
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] ( literal[string] ) | def try_checkpoint_metadata(self, trial):
"""Checkpoints metadata.
Args:
trial (Trial): Trial to checkpoint.
"""
if trial._checkpoint.storage == Checkpoint.MEMORY:
logger.debug('Not saving data for trial w/ memory checkpoint.')
return # depends on [control=['if'], data=[]]
try:
logger.debug('Saving trial metadata.')
self._cached_trial_state[trial.trial_id] = trial.__getstate__() # depends on [control=['try'], data=[]]
except Exception:
logger.exception('Error checkpointing trial metadata.') # depends on [control=['except'], data=[]] |
def delete(self):
""" Delete this node from config files """
cfg_file = "/etc/nago/nago.ini"
config = ConfigParser.ConfigParser()
config.read(cfg_file)
result = {}
token = self.data.pop("token", self.token)
if token not in config.sections():
raise Exception("Cannot find node in config. Delete aborted.")
config.remove_section(self.token)
with open(cfg_file, 'w') as f:
return config.write(f) | def function[delete, parameter[self]]:
constant[ Delete this node from config files ]
variable[cfg_file] assign[=] constant[/etc/nago/nago.ini]
variable[config] assign[=] call[name[ConfigParser].ConfigParser, parameter[]]
call[name[config].read, parameter[name[cfg_file]]]
variable[result] assign[=] dictionary[[], []]
variable[token] assign[=] call[name[self].data.pop, parameter[constant[token], name[self].token]]
if compare[name[token] <ast.NotIn object at 0x7da2590d7190> call[name[config].sections, parameter[]]] begin[:]
<ast.Raise object at 0x7da20e957ca0>
call[name[config].remove_section, parameter[name[self].token]]
with call[name[open], parameter[name[cfg_file], constant[w]]] begin[:]
return[call[name[config].write, parameter[name[f]]]] | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
identifier[cfg_file] = literal[string]
identifier[config] = identifier[ConfigParser] . identifier[ConfigParser] ()
identifier[config] . identifier[read] ( identifier[cfg_file] )
identifier[result] ={}
identifier[token] = identifier[self] . identifier[data] . identifier[pop] ( literal[string] , identifier[self] . identifier[token] )
keyword[if] identifier[token] keyword[not] keyword[in] identifier[config] . identifier[sections] ():
keyword[raise] identifier[Exception] ( literal[string] )
identifier[config] . identifier[remove_section] ( identifier[self] . identifier[token] )
keyword[with] identifier[open] ( identifier[cfg_file] , literal[string] ) keyword[as] identifier[f] :
keyword[return] identifier[config] . identifier[write] ( identifier[f] ) | def delete(self):
""" Delete this node from config files """
cfg_file = '/etc/nago/nago.ini'
config = ConfigParser.ConfigParser()
config.read(cfg_file)
result = {}
token = self.data.pop('token', self.token)
if token not in config.sections():
raise Exception('Cannot find node in config. Delete aborted.') # depends on [control=['if'], data=[]]
config.remove_section(self.token)
with open(cfg_file, 'w') as f:
return config.write(f) # depends on [control=['with'], data=['f']] |
def fetch_pillar(self):
'''
In the event of a cache miss, we need to incur the overhead of caching
a new pillar.
'''
log.debug('Pillar cache getting external pillar with ext: %s', self.ext)
fresh_pillar = Pillar(self.opts,
self.grains,
self.minion_id,
self.saltenv,
ext=self.ext,
functions=self.functions,
pillarenv=self.pillarenv)
return fresh_pillar.compile_pillar() | def function[fetch_pillar, parameter[self]]:
constant[
In the event of a cache miss, we need to incur the overhead of caching
a new pillar.
]
call[name[log].debug, parameter[constant[Pillar cache getting external pillar with ext: %s], name[self].ext]]
variable[fresh_pillar] assign[=] call[name[Pillar], parameter[name[self].opts, name[self].grains, name[self].minion_id, name[self].saltenv]]
return[call[name[fresh_pillar].compile_pillar, parameter[]]] | keyword[def] identifier[fetch_pillar] ( identifier[self] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[ext] )
identifier[fresh_pillar] = identifier[Pillar] ( identifier[self] . identifier[opts] ,
identifier[self] . identifier[grains] ,
identifier[self] . identifier[minion_id] ,
identifier[self] . identifier[saltenv] ,
identifier[ext] = identifier[self] . identifier[ext] ,
identifier[functions] = identifier[self] . identifier[functions] ,
identifier[pillarenv] = identifier[self] . identifier[pillarenv] )
keyword[return] identifier[fresh_pillar] . identifier[compile_pillar] () | def fetch_pillar(self):
"""
In the event of a cache miss, we need to incur the overhead of caching
a new pillar.
"""
log.debug('Pillar cache getting external pillar with ext: %s', self.ext)
fresh_pillar = Pillar(self.opts, self.grains, self.minion_id, self.saltenv, ext=self.ext, functions=self.functions, pillarenv=self.pillarenv)
return fresh_pillar.compile_pillar() |
def _get_names(self):
"""Get the names of the objects to include in the table.
:returns: The names of the objects to include.
:rtype: generator(str)
"""
for line in self.content:
line = line.strip()
if line and re.search("^[a-zA-Z0-9]", line):
yield line | def function[_get_names, parameter[self]]:
constant[Get the names of the objects to include in the table.
:returns: The names of the objects to include.
:rtype: generator(str)
]
for taget[name[line]] in starred[name[self].content] begin[:]
variable[line] assign[=] call[name[line].strip, parameter[]]
if <ast.BoolOp object at 0x7da1b08f4b80> begin[:]
<ast.Yield object at 0x7da1b08f6b00> | keyword[def] identifier[_get_names] ( identifier[self] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[content] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] keyword[and] identifier[re] . identifier[search] ( literal[string] , identifier[line] ):
keyword[yield] identifier[line] | def _get_names(self):
"""Get the names of the objects to include in the table.
:returns: The names of the objects to include.
:rtype: generator(str)
"""
for line in self.content:
line = line.strip()
if line and re.search('^[a-zA-Z0-9]', line):
yield line # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] |
def timestamp(x):
"""Get a timestamp from a date in python 3 and python 2"""
if x.tzinfo is None:
# Naive dates to utc
x = x.replace(tzinfo=utc)
if hasattr(x, 'timestamp'):
return x.timestamp()
else:
return (x - datetime(1970, 1, 1, tzinfo=utc)).total_seconds() | def function[timestamp, parameter[x]]:
constant[Get a timestamp from a date in python 3 and python 2]
if compare[name[x].tzinfo is constant[None]] begin[:]
variable[x] assign[=] call[name[x].replace, parameter[]]
if call[name[hasattr], parameter[name[x], constant[timestamp]]] begin[:]
return[call[name[x].timestamp, parameter[]]] | keyword[def] identifier[timestamp] ( identifier[x] ):
literal[string]
keyword[if] identifier[x] . identifier[tzinfo] keyword[is] keyword[None] :
identifier[x] = identifier[x] . identifier[replace] ( identifier[tzinfo] = identifier[utc] )
keyword[if] identifier[hasattr] ( identifier[x] , literal[string] ):
keyword[return] identifier[x] . identifier[timestamp] ()
keyword[else] :
keyword[return] ( identifier[x] - identifier[datetime] ( literal[int] , literal[int] , literal[int] , identifier[tzinfo] = identifier[utc] )). identifier[total_seconds] () | def timestamp(x):
"""Get a timestamp from a date in python 3 and python 2"""
if x.tzinfo is None:
# Naive dates to utc
x = x.replace(tzinfo=utc) # depends on [control=['if'], data=[]]
if hasattr(x, 'timestamp'):
return x.timestamp() # depends on [control=['if'], data=[]]
else:
return (x - datetime(1970, 1, 1, tzinfo=utc)).total_seconds() |
def get_output_base_path(
self,
output_directory,
output_prefix,
feature_type,
overwrite):
"""Get a full base name path to save the shapefile.
:param output_directory: The directory where to put results.
:type output_directory: str
:param output_prefix: The prefix to add for the shapefile.
:type output_prefix: str
:param feature_type: What kind of features should be downloaded.
Currently 'buildings', 'building-points' or 'roads' are supported.
:type feature_type: str
:param overwrite: Boolean to know if we can overwrite existing files.
:type overwrite: bool
:return: The base path.
:rtype: str
"""
path = os.path.join(
output_directory, '%s%s' % (output_prefix, feature_type))
if overwrite:
# If a shapefile exists, we must remove it (only the .shp)
shp = '%s.shp' % path
if os.path.isfile(shp):
os.remove(shp)
else:
separator = '-'
suffix = self.get_unique_file_path_suffix(
'%s.shp' % path, separator)
if suffix:
path = os.path.join(output_directory, '%s%s%s%s' % (
output_prefix, feature_type, separator, suffix))
return path | def function[get_output_base_path, parameter[self, output_directory, output_prefix, feature_type, overwrite]]:
constant[Get a full base name path to save the shapefile.
:param output_directory: The directory where to put results.
:type output_directory: str
:param output_prefix: The prefix to add for the shapefile.
:type output_prefix: str
:param feature_type: What kind of features should be downloaded.
Currently 'buildings', 'building-points' or 'roads' are supported.
:type feature_type: str
:param overwrite: Boolean to know if we can overwrite existing files.
:type overwrite: bool
:return: The base path.
:rtype: str
]
variable[path] assign[=] call[name[os].path.join, parameter[name[output_directory], binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204344df0>, <ast.Name object at 0x7da2043472b0>]]]]]
if name[overwrite] begin[:]
variable[shp] assign[=] binary_operation[constant[%s.shp] <ast.Mod object at 0x7da2590d6920> name[path]]
if call[name[os].path.isfile, parameter[name[shp]]] begin[:]
call[name[os].remove, parameter[name[shp]]]
return[name[path]] | keyword[def] identifier[get_output_base_path] (
identifier[self] ,
identifier[output_directory] ,
identifier[output_prefix] ,
identifier[feature_type] ,
identifier[overwrite] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[join] (
identifier[output_directory] , literal[string] %( identifier[output_prefix] , identifier[feature_type] ))
keyword[if] identifier[overwrite] :
identifier[shp] = literal[string] % identifier[path]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[shp] ):
identifier[os] . identifier[remove] ( identifier[shp] )
keyword[else] :
identifier[separator] = literal[string]
identifier[suffix] = identifier[self] . identifier[get_unique_file_path_suffix] (
literal[string] % identifier[path] , identifier[separator] )
keyword[if] identifier[suffix] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[output_directory] , literal[string] %(
identifier[output_prefix] , identifier[feature_type] , identifier[separator] , identifier[suffix] ))
keyword[return] identifier[path] | def get_output_base_path(self, output_directory, output_prefix, feature_type, overwrite):
"""Get a full base name path to save the shapefile.
:param output_directory: The directory where to put results.
:type output_directory: str
:param output_prefix: The prefix to add for the shapefile.
:type output_prefix: str
:param feature_type: What kind of features should be downloaded.
Currently 'buildings', 'building-points' or 'roads' are supported.
:type feature_type: str
:param overwrite: Boolean to know if we can overwrite existing files.
:type overwrite: bool
:return: The base path.
:rtype: str
"""
path = os.path.join(output_directory, '%s%s' % (output_prefix, feature_type))
if overwrite:
# If a shapefile exists, we must remove it (only the .shp)
shp = '%s.shp' % path
if os.path.isfile(shp):
os.remove(shp) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
separator = '-'
suffix = self.get_unique_file_path_suffix('%s.shp' % path, separator)
if suffix:
path = os.path.join(output_directory, '%s%s%s%s' % (output_prefix, feature_type, separator, suffix)) # depends on [control=['if'], data=[]]
return path |
def _read_by_weight(self, F, att_weights, value):
"""Read from the value matrix given the attention weights.
Parameters
----------
F : symbol or ndarray
att_weights : Symbol or NDArray
Attention weights.
For single-head attention,
Shape (batch_size, query_length, memory_length).
For multi-head attention,
Shape (batch_size, num_heads, query_length, memory_length).
value : Symbol or NDArray
Value of the memory. Shape (batch_size, memory_length, total_value_dim)
Returns
-------
context_vec: Symbol or NDArray
Shape (batch_size, query_length, context_vec_dim)
"""
output = F.batch_dot(att_weights, value)
return output | def function[_read_by_weight, parameter[self, F, att_weights, value]]:
constant[Read from the value matrix given the attention weights.
Parameters
----------
F : symbol or ndarray
att_weights : Symbol or NDArray
Attention weights.
For single-head attention,
Shape (batch_size, query_length, memory_length).
For multi-head attention,
Shape (batch_size, num_heads, query_length, memory_length).
value : Symbol or NDArray
Value of the memory. Shape (batch_size, memory_length, total_value_dim)
Returns
-------
context_vec: Symbol or NDArray
Shape (batch_size, query_length, context_vec_dim)
]
variable[output] assign[=] call[name[F].batch_dot, parameter[name[att_weights], name[value]]]
return[name[output]] | keyword[def] identifier[_read_by_weight] ( identifier[self] , identifier[F] , identifier[att_weights] , identifier[value] ):
literal[string]
identifier[output] = identifier[F] . identifier[batch_dot] ( identifier[att_weights] , identifier[value] )
keyword[return] identifier[output] | def _read_by_weight(self, F, att_weights, value):
"""Read from the value matrix given the attention weights.
Parameters
----------
F : symbol or ndarray
att_weights : Symbol or NDArray
Attention weights.
For single-head attention,
Shape (batch_size, query_length, memory_length).
For multi-head attention,
Shape (batch_size, num_heads, query_length, memory_length).
value : Symbol or NDArray
Value of the memory. Shape (batch_size, memory_length, total_value_dim)
Returns
-------
context_vec: Symbol or NDArray
Shape (batch_size, query_length, context_vec_dim)
"""
output = F.batch_dot(att_weights, value)
return output |
def replace_value_some(ol,src_value,dst_value,*seqs,**kwargs):
'''
from elist.elist import *
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
new = replace_value_some(ol,'a','AAA',0,1)
ol
new
id(ol)
id(new)
####
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
rslt = replace_value_some(ol,'a','AAA',0,1,mode="original")
ol
rslt
id(ol)
id(rslt)
'''
if('mode' in kwargs):
mode = kwargs["mode"]
else:
mode = "new"
return(replace_value_seqs(ol,src_value,dst_value,list(seqs),mode=mode)) | def function[replace_value_some, parameter[ol, src_value, dst_value]]:
constant[
from elist.elist import *
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
new = replace_value_some(ol,'a','AAA',0,1)
ol
new
id(ol)
id(new)
####
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
rslt = replace_value_some(ol,'a','AAA',0,1,mode="original")
ol
rslt
id(ol)
id(rslt)
]
if compare[constant[mode] in name[kwargs]] begin[:]
variable[mode] assign[=] call[name[kwargs]][constant[mode]]
return[call[name[replace_value_seqs], parameter[name[ol], name[src_value], name[dst_value], call[name[list], parameter[name[seqs]]]]]] | keyword[def] identifier[replace_value_some] ( identifier[ol] , identifier[src_value] , identifier[dst_value] ,* identifier[seqs] ,** identifier[kwargs] ):
literal[string]
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[mode] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[mode] = literal[string]
keyword[return] ( identifier[replace_value_seqs] ( identifier[ol] , identifier[src_value] , identifier[dst_value] , identifier[list] ( identifier[seqs] ), identifier[mode] = identifier[mode] )) | def replace_value_some(ol, src_value, dst_value, *seqs, **kwargs):
"""
from elist.elist import *
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
new = replace_value_some(ol,'a','AAA',0,1)
ol
new
id(ol)
id(new)
####
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
rslt = replace_value_some(ol,'a','AAA',0,1,mode="original")
ol
rslt
id(ol)
id(rslt)
"""
if 'mode' in kwargs:
mode = kwargs['mode'] # depends on [control=['if'], data=['kwargs']]
else:
mode = 'new'
return replace_value_seqs(ol, src_value, dst_value, list(seqs), mode=mode) |
def _disconnect_hanging_devices(self):
"""Periodic callback that checks for devices that haven't been used and disconnects them."""
now = monotonic()
for uuid, data in self._connections.items():
if (now - data['last_touch']) > self.client_timeout:
self._logger.info("Disconnect inactive client %s from device 0x%X", data['client'], uuid)
self._loop.add_callback(self._disconnect_from_device, uuid, data['key'], data['client'], unsolicited=True) | def function[_disconnect_hanging_devices, parameter[self]]:
constant[Periodic callback that checks for devices that haven't been used and disconnects them.]
variable[now] assign[=] call[name[monotonic], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c993130>, <ast.Name object at 0x7da20c992860>]]] in starred[call[name[self]._connections.items, parameter[]]] begin[:]
if compare[binary_operation[name[now] - call[name[data]][constant[last_touch]]] greater[>] name[self].client_timeout] begin[:]
call[name[self]._logger.info, parameter[constant[Disconnect inactive client %s from device 0x%X], call[name[data]][constant[client]], name[uuid]]]
call[name[self]._loop.add_callback, parameter[name[self]._disconnect_from_device, name[uuid], call[name[data]][constant[key]], call[name[data]][constant[client]]]] | keyword[def] identifier[_disconnect_hanging_devices] ( identifier[self] ):
literal[string]
identifier[now] = identifier[monotonic] ()
keyword[for] identifier[uuid] , identifier[data] keyword[in] identifier[self] . identifier[_connections] . identifier[items] ():
keyword[if] ( identifier[now] - identifier[data] [ literal[string] ])> identifier[self] . identifier[client_timeout] :
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[data] [ literal[string] ], identifier[uuid] )
identifier[self] . identifier[_loop] . identifier[add_callback] ( identifier[self] . identifier[_disconnect_from_device] , identifier[uuid] , identifier[data] [ literal[string] ], identifier[data] [ literal[string] ], identifier[unsolicited] = keyword[True] ) | def _disconnect_hanging_devices(self):
"""Periodic callback that checks for devices that haven't been used and disconnects them."""
now = monotonic()
for (uuid, data) in self._connections.items():
if now - data['last_touch'] > self.client_timeout:
self._logger.info('Disconnect inactive client %s from device 0x%X', data['client'], uuid)
self._loop.add_callback(self._disconnect_from_device, uuid, data['key'], data['client'], unsolicited=True) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def replace_braintree_gateway_by_id(cls, braintree_gateway_id, braintree_gateway, **kwargs):
"""Replace BraintreeGateway
Replace all attributes of BraintreeGateway
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_braintree_gateway_by_id(braintree_gateway_id, braintree_gateway, async=True)
>>> result = thread.get()
:param async bool
:param str braintree_gateway_id: ID of braintreeGateway to replace (required)
:param BraintreeGateway braintree_gateway: Attributes of braintreeGateway to replace (required)
:return: BraintreeGateway
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_braintree_gateway_by_id_with_http_info(braintree_gateway_id, braintree_gateway, **kwargs)
else:
(data) = cls._replace_braintree_gateway_by_id_with_http_info(braintree_gateway_id, braintree_gateway, **kwargs)
return data | def function[replace_braintree_gateway_by_id, parameter[cls, braintree_gateway_id, braintree_gateway]]:
constant[Replace BraintreeGateway
Replace all attributes of BraintreeGateway
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_braintree_gateway_by_id(braintree_gateway_id, braintree_gateway, async=True)
>>> result = thread.get()
:param async bool
:param str braintree_gateway_id: ID of braintreeGateway to replace (required)
:param BraintreeGateway braintree_gateway: Attributes of braintreeGateway to replace (required)
:return: BraintreeGateway
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._replace_braintree_gateway_by_id_with_http_info, parameter[name[braintree_gateway_id], name[braintree_gateway]]]] | keyword[def] identifier[replace_braintree_gateway_by_id] ( identifier[cls] , identifier[braintree_gateway_id] , identifier[braintree_gateway] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_replace_braintree_gateway_by_id_with_http_info] ( identifier[braintree_gateway_id] , identifier[braintree_gateway] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_replace_braintree_gateway_by_id_with_http_info] ( identifier[braintree_gateway_id] , identifier[braintree_gateway] ,** identifier[kwargs] )
keyword[return] identifier[data] | def replace_braintree_gateway_by_id(cls, braintree_gateway_id, braintree_gateway, **kwargs):
"""Replace BraintreeGateway
Replace all attributes of BraintreeGateway
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_braintree_gateway_by_id(braintree_gateway_id, braintree_gateway, async=True)
>>> result = thread.get()
:param async bool
:param str braintree_gateway_id: ID of braintreeGateway to replace (required)
:param BraintreeGateway braintree_gateway: Attributes of braintreeGateway to replace (required)
:return: BraintreeGateway
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_braintree_gateway_by_id_with_http_info(braintree_gateway_id, braintree_gateway, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._replace_braintree_gateway_by_id_with_http_info(braintree_gateway_id, braintree_gateway, **kwargs)
return data |
def flag_dipthongs(self, syllables: List[str]) -> List[int]:
"""
Return a list of syllables that contain a dipthong
:param syllables:
:return:
"""
long_positions = []
for idx, syl in enumerate(syllables):
for dipthong in self.constants.DIPTHONGS:
if dipthong in syllables[idx]:
if not string_utils.starts_with_qu(syllables[idx]):
long_positions.append(idx)
return long_positions | def function[flag_dipthongs, parameter[self, syllables]]:
constant[
Return a list of syllables that contain a dipthong
:param syllables:
:return:
]
variable[long_positions] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20e955960>, <ast.Name object at 0x7da20e954070>]]] in starred[call[name[enumerate], parameter[name[syllables]]]] begin[:]
for taget[name[dipthong]] in starred[name[self].constants.DIPTHONGS] begin[:]
if compare[name[dipthong] in call[name[syllables]][name[idx]]] begin[:]
if <ast.UnaryOp object at 0x7da20e955840> begin[:]
call[name[long_positions].append, parameter[name[idx]]]
return[name[long_positions]] | keyword[def] identifier[flag_dipthongs] ( identifier[self] , identifier[syllables] : identifier[List] [ identifier[str] ])-> identifier[List] [ identifier[int] ]:
literal[string]
identifier[long_positions] =[]
keyword[for] identifier[idx] , identifier[syl] keyword[in] identifier[enumerate] ( identifier[syllables] ):
keyword[for] identifier[dipthong] keyword[in] identifier[self] . identifier[constants] . identifier[DIPTHONGS] :
keyword[if] identifier[dipthong] keyword[in] identifier[syllables] [ identifier[idx] ]:
keyword[if] keyword[not] identifier[string_utils] . identifier[starts_with_qu] ( identifier[syllables] [ identifier[idx] ]):
identifier[long_positions] . identifier[append] ( identifier[idx] )
keyword[return] identifier[long_positions] | def flag_dipthongs(self, syllables: List[str]) -> List[int]:
"""
Return a list of syllables that contain a dipthong
:param syllables:
:return:
"""
long_positions = []
for (idx, syl) in enumerate(syllables):
for dipthong in self.constants.DIPTHONGS:
if dipthong in syllables[idx]:
if not string_utils.starts_with_qu(syllables[idx]):
long_positions.append(idx) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dipthong']] # depends on [control=['for'], data=[]]
return long_positions |
def locationFromElement(self, element):
"""
Find the MutatorMath location of this element, either by name or from a child element.
"""
elementLocation = None
for locationElement in element.findall('.location'):
elementLocation = self.readLocationElement(locationElement)
break
return elementLocation | def function[locationFromElement, parameter[self, element]]:
constant[
Find the MutatorMath location of this element, either by name or from a child element.
]
variable[elementLocation] assign[=] constant[None]
for taget[name[locationElement]] in starred[call[name[element].findall, parameter[constant[.location]]]] begin[:]
variable[elementLocation] assign[=] call[name[self].readLocationElement, parameter[name[locationElement]]]
break
return[name[elementLocation]] | keyword[def] identifier[locationFromElement] ( identifier[self] , identifier[element] ):
literal[string]
identifier[elementLocation] = keyword[None]
keyword[for] identifier[locationElement] keyword[in] identifier[element] . identifier[findall] ( literal[string] ):
identifier[elementLocation] = identifier[self] . identifier[readLocationElement] ( identifier[locationElement] )
keyword[break]
keyword[return] identifier[elementLocation] | def locationFromElement(self, element):
"""
Find the MutatorMath location of this element, either by name or from a child element.
"""
elementLocation = None
for locationElement in element.findall('.location'):
elementLocation = self.readLocationElement(locationElement)
break # depends on [control=['for'], data=['locationElement']]
return elementLocation |
def transformer_imagenet64_memory_v0():
"""HParams for training image_imagenet64_gen_flat_rev with memory."""
hparams = transformer_cifar10_memory_v0()
hparams.max_length = 64 * 64 * 3
hparams.split_targets_chunk_length = 64 * 3
hparams.split_targets_max_chunks = int(
hparams.max_length / hparams.split_targets_chunk_length)
hparams.num_memory_items = 128 * 3
# Since this is an image problem, batch size refers to examples (not tokens)
target_images_per_batch = 2
hparams.batch_size = int(target_images_per_batch * (
hparams.max_length / hparams.split_targets_chunk_length))
# The recurrent memory needs to know the actual batch size (in sequences)
hparams.recurrent_memory_batch_size = hparams.batch_size
hparams.max_relative_position = 3072
return hparams | def function[transformer_imagenet64_memory_v0, parameter[]]:
constant[HParams for training image_imagenet64_gen_flat_rev with memory.]
variable[hparams] assign[=] call[name[transformer_cifar10_memory_v0], parameter[]]
name[hparams].max_length assign[=] binary_operation[binary_operation[constant[64] * constant[64]] * constant[3]]
name[hparams].split_targets_chunk_length assign[=] binary_operation[constant[64] * constant[3]]
name[hparams].split_targets_max_chunks assign[=] call[name[int], parameter[binary_operation[name[hparams].max_length / name[hparams].split_targets_chunk_length]]]
name[hparams].num_memory_items assign[=] binary_operation[constant[128] * constant[3]]
variable[target_images_per_batch] assign[=] constant[2]
name[hparams].batch_size assign[=] call[name[int], parameter[binary_operation[name[target_images_per_batch] * binary_operation[name[hparams].max_length / name[hparams].split_targets_chunk_length]]]]
name[hparams].recurrent_memory_batch_size assign[=] name[hparams].batch_size
name[hparams].max_relative_position assign[=] constant[3072]
return[name[hparams]] | keyword[def] identifier[transformer_imagenet64_memory_v0] ():
literal[string]
identifier[hparams] = identifier[transformer_cifar10_memory_v0] ()
identifier[hparams] . identifier[max_length] = literal[int] * literal[int] * literal[int]
identifier[hparams] . identifier[split_targets_chunk_length] = literal[int] * literal[int]
identifier[hparams] . identifier[split_targets_max_chunks] = identifier[int] (
identifier[hparams] . identifier[max_length] / identifier[hparams] . identifier[split_targets_chunk_length] )
identifier[hparams] . identifier[num_memory_items] = literal[int] * literal[int]
identifier[target_images_per_batch] = literal[int]
identifier[hparams] . identifier[batch_size] = identifier[int] ( identifier[target_images_per_batch] *(
identifier[hparams] . identifier[max_length] / identifier[hparams] . identifier[split_targets_chunk_length] ))
identifier[hparams] . identifier[recurrent_memory_batch_size] = identifier[hparams] . identifier[batch_size]
identifier[hparams] . identifier[max_relative_position] = literal[int]
keyword[return] identifier[hparams] | def transformer_imagenet64_memory_v0():
"""HParams for training image_imagenet64_gen_flat_rev with memory."""
hparams = transformer_cifar10_memory_v0()
hparams.max_length = 64 * 64 * 3
hparams.split_targets_chunk_length = 64 * 3
hparams.split_targets_max_chunks = int(hparams.max_length / hparams.split_targets_chunk_length)
hparams.num_memory_items = 128 * 3
# Since this is an image problem, batch size refers to examples (not tokens)
target_images_per_batch = 2
hparams.batch_size = int(target_images_per_batch * (hparams.max_length / hparams.split_targets_chunk_length))
# The recurrent memory needs to know the actual batch size (in sequences)
hparams.recurrent_memory_batch_size = hparams.batch_size
hparams.max_relative_position = 3072
return hparams |
def load(self, name, parent_path=None):
"""Loads a template.
:param str name: The template name
:param str parent_path: The optional path for a parent document
:rtype: tornado.template.Template
"""
if name not in self.templates:
self.templates[name] = self._create_template(name)
return self.templates[name] | def function[load, parameter[self, name, parent_path]]:
constant[Loads a template.
:param str name: The template name
:param str parent_path: The optional path for a parent document
:rtype: tornado.template.Template
]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].templates] begin[:]
call[name[self].templates][name[name]] assign[=] call[name[self]._create_template, parameter[name[name]]]
return[call[name[self].templates][name[name]]] | keyword[def] identifier[load] ( identifier[self] , identifier[name] , identifier[parent_path] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[templates] :
identifier[self] . identifier[templates] [ identifier[name] ]= identifier[self] . identifier[_create_template] ( identifier[name] )
keyword[return] identifier[self] . identifier[templates] [ identifier[name] ] | def load(self, name, parent_path=None):
"""Loads a template.
:param str name: The template name
:param str parent_path: The optional path for a parent document
:rtype: tornado.template.Template
"""
if name not in self.templates:
self.templates[name] = self._create_template(name) # depends on [control=['if'], data=['name']]
return self.templates[name] |
def inject_mode(arg_namespace):
"""Check command line arguments and run build function."""
try:
injector.inject_into_files(arg_namespace.scheme, arg_namespace.file)
except (IndexError, FileNotFoundError,
PermissionError, IsADirectoryError) as exception:
if isinstance(exception, IndexError):
print('"{}" has no valid injection marker lines.'.format(
exception.args[0]))
if isinstance(exception, FileNotFoundError):
print('Lacking resource "{}" to complete operation.'.format(
exception.filename))
if isinstance(exception, PermissionError):
print('No write permission for current working directory.')
if isinstance(exception, IsADirectoryError):
print('"{}" is a directory. Provide a *.yaml scheme file instead.'
.format(exception.filename)) | def function[inject_mode, parameter[arg_namespace]]:
constant[Check command line arguments and run build function.]
<ast.Try object at 0x7da1b07f6170> | keyword[def] identifier[inject_mode] ( identifier[arg_namespace] ):
literal[string]
keyword[try] :
identifier[injector] . identifier[inject_into_files] ( identifier[arg_namespace] . identifier[scheme] , identifier[arg_namespace] . identifier[file] )
keyword[except] ( identifier[IndexError] , identifier[FileNotFoundError] ,
identifier[PermissionError] , identifier[IsADirectoryError] ) keyword[as] identifier[exception] :
keyword[if] identifier[isinstance] ( identifier[exception] , identifier[IndexError] ):
identifier[print] ( literal[string] . identifier[format] (
identifier[exception] . identifier[args] [ literal[int] ]))
keyword[if] identifier[isinstance] ( identifier[exception] , identifier[FileNotFoundError] ):
identifier[print] ( literal[string] . identifier[format] (
identifier[exception] . identifier[filename] ))
keyword[if] identifier[isinstance] ( identifier[exception] , identifier[PermissionError] ):
identifier[print] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[exception] , identifier[IsADirectoryError] ):
identifier[print] ( literal[string]
. identifier[format] ( identifier[exception] . identifier[filename] )) | def inject_mode(arg_namespace):
"""Check command line arguments and run build function."""
try:
injector.inject_into_files(arg_namespace.scheme, arg_namespace.file) # depends on [control=['try'], data=[]]
except (IndexError, FileNotFoundError, PermissionError, IsADirectoryError) as exception:
if isinstance(exception, IndexError):
print('"{}" has no valid injection marker lines.'.format(exception.args[0])) # depends on [control=['if'], data=[]]
if isinstance(exception, FileNotFoundError):
print('Lacking resource "{}" to complete operation.'.format(exception.filename)) # depends on [control=['if'], data=[]]
if isinstance(exception, PermissionError):
print('No write permission for current working directory.') # depends on [control=['if'], data=[]]
if isinstance(exception, IsADirectoryError):
print('"{}" is a directory. Provide a *.yaml scheme file instead.'.format(exception.filename)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exception']] |
def eliminate_local_candidates(x, AggOp, A, T, Ca=1.0, **kwargs):
"""Eliminate canidates locally.
Helper function that determines where to eliminate candidates locally
on a per aggregate basis.
Parameters
---------
x : array
n x 1 vector of new candidate
AggOp : CSR or CSC sparse matrix
Aggregation operator for the level that x was generated for
A : sparse matrix
Operator for the level that x was generated for
T : sparse matrix
Tentative prolongation operator for the level that x was generated for
Ca : scalar
Constant threshold parameter to decide when to drop candidates
Returns
-------
Nothing, x is modified in place
"""
if not (isspmatrix_csr(AggOp) or isspmatrix_csc(AggOp)):
raise TypeError('AggOp must be a CSR or CSC matrix')
else:
AggOp = AggOp.tocsc()
ndof = max(x.shape)
nPDEs = int(ndof/AggOp.shape[0])
def aggregate_wise_inner_product(z, AggOp, nPDEs, ndof):
"""Inner products per aggregate.
Helper function that calculates <z, z>_i, i.e., the
inner product of z only over aggregate i
Returns a vector of length num_aggregates where entry i is <z, z>_i
"""
z = np.ravel(z)*np.ravel(z)
innerp = np.zeros((1, AggOp.shape[1]), dtype=z.dtype)
for j in range(nPDEs):
innerp += z[slice(j, ndof, nPDEs)].reshape(1, -1) * AggOp
return innerp.reshape(-1, 1)
def get_aggregate_weights(AggOp, A, z, nPDEs, ndof):
"""Weights per aggregate.
Calculate local aggregate quantities
Return a vector of length num_aggregates where entry i is
(card(agg_i)/A.shape[0]) ( <Az, z>/rho(A) )
"""
rho = approximate_spectral_radius(A)
zAz = np.dot(z.reshape(1, -1), A*z.reshape(-1, 1))
card = nPDEs*(AggOp.indptr[1:]-AggOp.indptr[:-1])
weights = (np.ravel(card)*zAz)/(A.shape[0]*rho)
return weights.reshape(-1, 1)
# Run test 1, which finds where x is small relative to its energy
weights = Ca*get_aggregate_weights(AggOp, A, x, nPDEs, ndof)
mask1 = aggregate_wise_inner_product(x, AggOp, nPDEs, ndof) <= weights
# Run test 2, which finds where x is already approximated
# accurately by the existing T
projected_x = x - T*(T.T*x)
mask2 = aggregate_wise_inner_product(projected_x,
AggOp, nPDEs, ndof) <= weights
# Combine masks and zero out corresponding aggregates in x
mask = np.ravel(mask1 + mask2).nonzero()[0]
if mask.shape[0] > 0:
mask = nPDEs*AggOp[:, mask].indices
for j in range(nPDEs):
x[mask+j] = 0.0 | def function[eliminate_local_candidates, parameter[x, AggOp, A, T, Ca]]:
constant[Eliminate canidates locally.
Helper function that determines where to eliminate candidates locally
on a per aggregate basis.
Parameters
---------
x : array
n x 1 vector of new candidate
AggOp : CSR or CSC sparse matrix
Aggregation operator for the level that x was generated for
A : sparse matrix
Operator for the level that x was generated for
T : sparse matrix
Tentative prolongation operator for the level that x was generated for
Ca : scalar
Constant threshold parameter to decide when to drop candidates
Returns
-------
Nothing, x is modified in place
]
if <ast.UnaryOp object at 0x7da2044c1ea0> begin[:]
<ast.Raise object at 0x7da2044c33a0>
def function[aggregate_wise_inner_product, parameter[z, AggOp, nPDEs, ndof]]:
constant[Inner products per aggregate.
Helper function that calculates <z, z>_i, i.e., the
inner product of z only over aggregate i
Returns a vector of length num_aggregates where entry i is <z, z>_i
]
variable[z] assign[=] binary_operation[call[name[np].ravel, parameter[name[z]]] * call[name[np].ravel, parameter[name[z]]]]
variable[innerp] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da2044c2020>, <ast.Subscript object at 0x7da2044c14b0>]]]]
for taget[name[j]] in starred[call[name[range], parameter[name[nPDEs]]]] begin[:]
<ast.AugAssign object at 0x7da2044c1780>
return[call[name[innerp].reshape, parameter[<ast.UnaryOp object at 0x7da2044c3a00>, constant[1]]]]
def function[get_aggregate_weights, parameter[AggOp, A, z, nPDEs, ndof]]:
constant[Weights per aggregate.
Calculate local aggregate quantities
Return a vector of length num_aggregates where entry i is
(card(agg_i)/A.shape[0]) ( <Az, z>/rho(A) )
]
variable[rho] assign[=] call[name[approximate_spectral_radius], parameter[name[A]]]
variable[zAz] assign[=] call[name[np].dot, parameter[call[name[z].reshape, parameter[constant[1], <ast.UnaryOp object at 0x7da20c6aaf20>]], binary_operation[name[A] * call[name[z].reshape, parameter[<ast.UnaryOp object at 0x7da20c6aa740>, constant[1]]]]]]
variable[card] assign[=] binary_operation[name[nPDEs] * binary_operation[call[name[AggOp].indptr][<ast.Slice object at 0x7da20c6a91b0>] - call[name[AggOp].indptr][<ast.Slice object at 0x7da20c6a9120>]]]
variable[weights] assign[=] binary_operation[binary_operation[call[name[np].ravel, parameter[name[card]]] * name[zAz]] / binary_operation[call[name[A].shape][constant[0]] * name[rho]]]
return[call[name[weights].reshape, parameter[<ast.UnaryOp object at 0x7da20c6aa710>, constant[1]]]]
variable[weights] assign[=] binary_operation[name[Ca] * call[name[get_aggregate_weights], parameter[name[AggOp], name[A], name[x], name[nPDEs], name[ndof]]]]
variable[mask1] assign[=] compare[call[name[aggregate_wise_inner_product], parameter[name[x], name[AggOp], name[nPDEs], name[ndof]]] less_or_equal[<=] name[weights]]
variable[projected_x] assign[=] binary_operation[name[x] - binary_operation[name[T] * binary_operation[name[T].T * name[x]]]]
variable[mask2] assign[=] compare[call[name[aggregate_wise_inner_product], parameter[name[projected_x], name[AggOp], name[nPDEs], name[ndof]]] less_or_equal[<=] name[weights]]
variable[mask] assign[=] call[call[call[name[np].ravel, parameter[binary_operation[name[mask1] + name[mask2]]]].nonzero, parameter[]]][constant[0]]
if compare[call[name[mask].shape][constant[0]] greater[>] constant[0]] begin[:]
variable[mask] assign[=] binary_operation[name[nPDEs] * call[name[AggOp]][tuple[[<ast.Slice object at 0x7da20c6aa290>, <ast.Name object at 0x7da20c6ab550>]]].indices]
for taget[name[j]] in starred[call[name[range], parameter[name[nPDEs]]]] begin[:]
call[name[x]][binary_operation[name[mask] + name[j]]] assign[=] constant[0.0] | keyword[def] identifier[eliminate_local_candidates] ( identifier[x] , identifier[AggOp] , identifier[A] , identifier[T] , identifier[Ca] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] ( identifier[isspmatrix_csr] ( identifier[AggOp] ) keyword[or] identifier[isspmatrix_csc] ( identifier[AggOp] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[else] :
identifier[AggOp] = identifier[AggOp] . identifier[tocsc] ()
identifier[ndof] = identifier[max] ( identifier[x] . identifier[shape] )
identifier[nPDEs] = identifier[int] ( identifier[ndof] / identifier[AggOp] . identifier[shape] [ literal[int] ])
keyword[def] identifier[aggregate_wise_inner_product] ( identifier[z] , identifier[AggOp] , identifier[nPDEs] , identifier[ndof] ):
literal[string]
identifier[z] = identifier[np] . identifier[ravel] ( identifier[z] )* identifier[np] . identifier[ravel] ( identifier[z] )
identifier[innerp] = identifier[np] . identifier[zeros] (( literal[int] , identifier[AggOp] . identifier[shape] [ literal[int] ]), identifier[dtype] = identifier[z] . identifier[dtype] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[nPDEs] ):
identifier[innerp] += identifier[z] [ identifier[slice] ( identifier[j] , identifier[ndof] , identifier[nPDEs] )]. identifier[reshape] ( literal[int] ,- literal[int] )* identifier[AggOp]
keyword[return] identifier[innerp] . identifier[reshape] (- literal[int] , literal[int] )
keyword[def] identifier[get_aggregate_weights] ( identifier[AggOp] , identifier[A] , identifier[z] , identifier[nPDEs] , identifier[ndof] ):
literal[string]
identifier[rho] = identifier[approximate_spectral_radius] ( identifier[A] )
identifier[zAz] = identifier[np] . identifier[dot] ( identifier[z] . identifier[reshape] ( literal[int] ,- literal[int] ), identifier[A] * identifier[z] . identifier[reshape] (- literal[int] , literal[int] ))
identifier[card] = identifier[nPDEs] *( identifier[AggOp] . identifier[indptr] [ literal[int] :]- identifier[AggOp] . identifier[indptr] [:- literal[int] ])
identifier[weights] =( identifier[np] . identifier[ravel] ( identifier[card] )* identifier[zAz] )/( identifier[A] . identifier[shape] [ literal[int] ]* identifier[rho] )
keyword[return] identifier[weights] . identifier[reshape] (- literal[int] , literal[int] )
identifier[weights] = identifier[Ca] * identifier[get_aggregate_weights] ( identifier[AggOp] , identifier[A] , identifier[x] , identifier[nPDEs] , identifier[ndof] )
identifier[mask1] = identifier[aggregate_wise_inner_product] ( identifier[x] , identifier[AggOp] , identifier[nPDEs] , identifier[ndof] )<= identifier[weights]
identifier[projected_x] = identifier[x] - identifier[T] *( identifier[T] . identifier[T] * identifier[x] )
identifier[mask2] = identifier[aggregate_wise_inner_product] ( identifier[projected_x] ,
identifier[AggOp] , identifier[nPDEs] , identifier[ndof] )<= identifier[weights]
identifier[mask] = identifier[np] . identifier[ravel] ( identifier[mask1] + identifier[mask2] ). identifier[nonzero] ()[ literal[int] ]
keyword[if] identifier[mask] . identifier[shape] [ literal[int] ]> literal[int] :
identifier[mask] = identifier[nPDEs] * identifier[AggOp] [:, identifier[mask] ]. identifier[indices]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[nPDEs] ):
identifier[x] [ identifier[mask] + identifier[j] ]= literal[int] | def eliminate_local_candidates(x, AggOp, A, T, Ca=1.0, **kwargs):
"""Eliminate canidates locally.
Helper function that determines where to eliminate candidates locally
on a per aggregate basis.
Parameters
---------
x : array
n x 1 vector of new candidate
AggOp : CSR or CSC sparse matrix
Aggregation operator for the level that x was generated for
A : sparse matrix
Operator for the level that x was generated for
T : sparse matrix
Tentative prolongation operator for the level that x was generated for
Ca : scalar
Constant threshold parameter to decide when to drop candidates
Returns
-------
Nothing, x is modified in place
"""
if not (isspmatrix_csr(AggOp) or isspmatrix_csc(AggOp)):
raise TypeError('AggOp must be a CSR or CSC matrix') # depends on [control=['if'], data=[]]
else:
AggOp = AggOp.tocsc()
ndof = max(x.shape)
nPDEs = int(ndof / AggOp.shape[0])
def aggregate_wise_inner_product(z, AggOp, nPDEs, ndof):
"""Inner products per aggregate.
Helper function that calculates <z, z>_i, i.e., the
inner product of z only over aggregate i
Returns a vector of length num_aggregates where entry i is <z, z>_i
"""
z = np.ravel(z) * np.ravel(z)
innerp = np.zeros((1, AggOp.shape[1]), dtype=z.dtype)
for j in range(nPDEs):
innerp += z[slice(j, ndof, nPDEs)].reshape(1, -1) * AggOp # depends on [control=['for'], data=['j']]
return innerp.reshape(-1, 1)
def get_aggregate_weights(AggOp, A, z, nPDEs, ndof):
"""Weights per aggregate.
Calculate local aggregate quantities
Return a vector of length num_aggregates where entry i is
(card(agg_i)/A.shape[0]) ( <Az, z>/rho(A) )
"""
rho = approximate_spectral_radius(A)
zAz = np.dot(z.reshape(1, -1), A * z.reshape(-1, 1))
card = nPDEs * (AggOp.indptr[1:] - AggOp.indptr[:-1])
weights = np.ravel(card) * zAz / (A.shape[0] * rho)
return weights.reshape(-1, 1)
# Run test 1, which finds where x is small relative to its energy
weights = Ca * get_aggregate_weights(AggOp, A, x, nPDEs, ndof)
mask1 = aggregate_wise_inner_product(x, AggOp, nPDEs, ndof) <= weights
# Run test 2, which finds where x is already approximated
# accurately by the existing T
projected_x = x - T * (T.T * x)
mask2 = aggregate_wise_inner_product(projected_x, AggOp, nPDEs, ndof) <= weights
# Combine masks and zero out corresponding aggregates in x
mask = np.ravel(mask1 + mask2).nonzero()[0]
if mask.shape[0] > 0:
mask = nPDEs * AggOp[:, mask].indices
for j in range(nPDEs):
x[mask + j] = 0.0 # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]] |
def words_and_tags_from_wsj_tree(tree_string):
"""Generates linearized trees and tokens from the wsj tree format.
It uses the linearized algorithm described in https://arxiv.org/abs/1412.7449.
Args:
tree_string: tree in wsj format
Returns:
tuple: (words, linearized tree)
"""
stack, tags, words = [], [], []
for tok in tree_string.strip().split():
if tok[0] == "(":
symbol = tok[1:]
tags.append(symbol)
stack.append(symbol)
else:
assert tok[-1] == ")"
stack.pop() # Pop the POS-tag.
while tok[-2] == ")":
tags.append("/" + stack.pop())
tok = tok[:-1]
words.append(tok[:-1])
return str.join(" ", words), str.join(" ", tags[1:-1]) | def function[words_and_tags_from_wsj_tree, parameter[tree_string]]:
constant[Generates linearized trees and tokens from the wsj tree format.
It uses the linearized algorithm described in https://arxiv.org/abs/1412.7449.
Args:
tree_string: tree in wsj format
Returns:
tuple: (words, linearized tree)
]
<ast.Tuple object at 0x7da1b1e15630> assign[=] tuple[[<ast.List object at 0x7da1b1e15600>, <ast.List object at 0x7da1b1e166e0>, <ast.List object at 0x7da1b1e17d00>]]
for taget[name[tok]] in starred[call[call[name[tree_string].strip, parameter[]].split, parameter[]]] begin[:]
if compare[call[name[tok]][constant[0]] equal[==] constant[(]] begin[:]
variable[symbol] assign[=] call[name[tok]][<ast.Slice object at 0x7da1b1e15660>]
call[name[tags].append, parameter[name[symbol]]]
call[name[stack].append, parameter[name[symbol]]]
return[tuple[[<ast.Call object at 0x7da1b20e4fd0>, <ast.Call object at 0x7da1b20e5de0>]]] | keyword[def] identifier[words_and_tags_from_wsj_tree] ( identifier[tree_string] ):
literal[string]
identifier[stack] , identifier[tags] , identifier[words] =[],[],[]
keyword[for] identifier[tok] keyword[in] identifier[tree_string] . identifier[strip] (). identifier[split] ():
keyword[if] identifier[tok] [ literal[int] ]== literal[string] :
identifier[symbol] = identifier[tok] [ literal[int] :]
identifier[tags] . identifier[append] ( identifier[symbol] )
identifier[stack] . identifier[append] ( identifier[symbol] )
keyword[else] :
keyword[assert] identifier[tok] [- literal[int] ]== literal[string]
identifier[stack] . identifier[pop] ()
keyword[while] identifier[tok] [- literal[int] ]== literal[string] :
identifier[tags] . identifier[append] ( literal[string] + identifier[stack] . identifier[pop] ())
identifier[tok] = identifier[tok] [:- literal[int] ]
identifier[words] . identifier[append] ( identifier[tok] [:- literal[int] ])
keyword[return] identifier[str] . identifier[join] ( literal[string] , identifier[words] ), identifier[str] . identifier[join] ( literal[string] , identifier[tags] [ literal[int] :- literal[int] ]) | def words_and_tags_from_wsj_tree(tree_string):
"""Generates linearized trees and tokens from the wsj tree format.
It uses the linearized algorithm described in https://arxiv.org/abs/1412.7449.
Args:
tree_string: tree in wsj format
Returns:
tuple: (words, linearized tree)
"""
(stack, tags, words) = ([], [], [])
for tok in tree_string.strip().split():
if tok[0] == '(':
symbol = tok[1:]
tags.append(symbol)
stack.append(symbol) # depends on [control=['if'], data=[]]
else:
assert tok[-1] == ')'
stack.pop() # Pop the POS-tag.
while tok[-2] == ')':
tags.append('/' + stack.pop())
tok = tok[:-1] # depends on [control=['while'], data=[]]
words.append(tok[:-1]) # depends on [control=['for'], data=['tok']]
return (str.join(' ', words), str.join(' ', tags[1:-1])) |
def from_list(cls, actions):
'''convert list of actions into the corresponding bitmask'''
bitmask = 0
for a in actions:
bitmask |= cls.action_bitmask(a)
return Action(bitmask) | def function[from_list, parameter[cls, actions]]:
constant[convert list of actions into the corresponding bitmask]
variable[bitmask] assign[=] constant[0]
for taget[name[a]] in starred[name[actions]] begin[:]
<ast.AugAssign object at 0x7da1b26af7c0>
return[call[name[Action], parameter[name[bitmask]]]] | keyword[def] identifier[from_list] ( identifier[cls] , identifier[actions] ):
literal[string]
identifier[bitmask] = literal[int]
keyword[for] identifier[a] keyword[in] identifier[actions] :
identifier[bitmask] |= identifier[cls] . identifier[action_bitmask] ( identifier[a] )
keyword[return] identifier[Action] ( identifier[bitmask] ) | def from_list(cls, actions):
"""convert list of actions into the corresponding bitmask"""
bitmask = 0
for a in actions:
bitmask |= cls.action_bitmask(a) # depends on [control=['for'], data=['a']]
return Action(bitmask) |
def p_qualifierType_1(p):
"""qualifierType_1 : ':' dataType array
| ':' dataType array defaultValue
"""
dv = None
if len(p) == 5:
dv = p[4]
p[0] = (p[2], True, p[3], dv) | def function[p_qualifierType_1, parameter[p]]:
constant[qualifierType_1 : ':' dataType array
| ':' dataType array defaultValue
]
variable[dv] assign[=] constant[None]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[5]] begin[:]
variable[dv] assign[=] call[name[p]][constant[4]]
call[name[p]][constant[0]] assign[=] tuple[[<ast.Subscript object at 0x7da204622ec0>, <ast.Constant object at 0x7da204621a80>, <ast.Subscript object at 0x7da204620d00>, <ast.Name object at 0x7da204622080>]] | keyword[def] identifier[p_qualifierType_1] ( identifier[p] ):
literal[string]
identifier[dv] = keyword[None]
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[dv] = identifier[p] [ literal[int] ]
identifier[p] [ literal[int] ]=( identifier[p] [ literal[int] ], keyword[True] , identifier[p] [ literal[int] ], identifier[dv] ) | def p_qualifierType_1(p):
"""qualifierType_1 : ':' dataType array
| ':' dataType array defaultValue
"""
dv = None
if len(p) == 5:
dv = p[4] # depends on [control=['if'], data=[]]
p[0] = (p[2], True, p[3], dv) |
def save_file(self, sequence, array=None):
"""Write the date stored in |IOSequence.series| of the given
|IOSequence| into an "external" data file. """
if array is None:
array = sequence.aggregate_series()
try:
if sequence.filetype_ext == 'nc':
self._save_nc(sequence, array)
else:
filepath = sequence.filepath_ext
if ((array is not None) and
(array.info['type'] != 'unmodified')):
filepath = (f'{filepath[:-4]}_{array.info["type"]}'
f'{filepath[-4:]}')
if not sequence.overwrite_ext and os.path.exists(filepath):
raise OSError(
f'Sequence {objecttools.devicephrase(sequence)} '
f'is not allowed to overwrite the existing file '
f'`{sequence.filepath_ext}`.')
if sequence.filetype_ext == 'npy':
self._save_npy(array, filepath)
elif sequence.filetype_ext == 'asc':
self._save_asc(array, filepath)
except BaseException:
objecttools.augment_excmessage(
'While trying to save the external data of sequence %s'
% objecttools.devicephrase(sequence)) | def function[save_file, parameter[self, sequence, array]]:
constant[Write the date stored in |IOSequence.series| of the given
|IOSequence| into an "external" data file. ]
if compare[name[array] is constant[None]] begin[:]
variable[array] assign[=] call[name[sequence].aggregate_series, parameter[]]
<ast.Try object at 0x7da18bcca9b0> | keyword[def] identifier[save_file] ( identifier[self] , identifier[sequence] , identifier[array] = keyword[None] ):
literal[string]
keyword[if] identifier[array] keyword[is] keyword[None] :
identifier[array] = identifier[sequence] . identifier[aggregate_series] ()
keyword[try] :
keyword[if] identifier[sequence] . identifier[filetype_ext] == literal[string] :
identifier[self] . identifier[_save_nc] ( identifier[sequence] , identifier[array] )
keyword[else] :
identifier[filepath] = identifier[sequence] . identifier[filepath_ext]
keyword[if] (( identifier[array] keyword[is] keyword[not] keyword[None] ) keyword[and]
( identifier[array] . identifier[info] [ literal[string] ]!= literal[string] )):
identifier[filepath] =( literal[string]
literal[string] )
keyword[if] keyword[not] identifier[sequence] . identifier[overwrite_ext] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[filepath] ):
keyword[raise] identifier[OSError] (
literal[string]
literal[string]
literal[string] )
keyword[if] identifier[sequence] . identifier[filetype_ext] == literal[string] :
identifier[self] . identifier[_save_npy] ( identifier[array] , identifier[filepath] )
keyword[elif] identifier[sequence] . identifier[filetype_ext] == literal[string] :
identifier[self] . identifier[_save_asc] ( identifier[array] , identifier[filepath] )
keyword[except] identifier[BaseException] :
identifier[objecttools] . identifier[augment_excmessage] (
literal[string]
% identifier[objecttools] . identifier[devicephrase] ( identifier[sequence] )) | def save_file(self, sequence, array=None):
"""Write the date stored in |IOSequence.series| of the given
|IOSequence| into an "external" data file. """
if array is None:
array = sequence.aggregate_series() # depends on [control=['if'], data=['array']]
try:
if sequence.filetype_ext == 'nc':
self._save_nc(sequence, array) # depends on [control=['if'], data=[]]
else:
filepath = sequence.filepath_ext
if array is not None and array.info['type'] != 'unmodified':
filepath = f"{filepath[:-4]}_{array.info['type']}{filepath[-4:]}" # depends on [control=['if'], data=[]]
if not sequence.overwrite_ext and os.path.exists(filepath):
raise OSError(f'Sequence {objecttools.devicephrase(sequence)} is not allowed to overwrite the existing file `{sequence.filepath_ext}`.') # depends on [control=['if'], data=[]]
if sequence.filetype_ext == 'npy':
self._save_npy(array, filepath) # depends on [control=['if'], data=[]]
elif sequence.filetype_ext == 'asc':
self._save_asc(array, filepath) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except BaseException:
objecttools.augment_excmessage('While trying to save the external data of sequence %s' % objecttools.devicephrase(sequence)) # depends on [control=['except'], data=[]] |
def get_dec(self):
"""
Get Dec. corresponding to the current position (ICRS, J2000)
:return: Declination
"""
try:
return self.dec.value
except AttributeError:
# Transform from L,B to R.A., Dec
return self.sky_coord.transform_to('icrs').dec.value | def function[get_dec, parameter[self]]:
constant[
Get Dec. corresponding to the current position (ICRS, J2000)
:return: Declination
]
<ast.Try object at 0x7da1b0e26e60> | keyword[def] identifier[get_dec] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[dec] . identifier[value]
keyword[except] identifier[AttributeError] :
keyword[return] identifier[self] . identifier[sky_coord] . identifier[transform_to] ( literal[string] ). identifier[dec] . identifier[value] | def get_dec(self):
"""
Get Dec. corresponding to the current position (ICRS, J2000)
:return: Declination
"""
try:
return self.dec.value # depends on [control=['try'], data=[]]
except AttributeError:
# Transform from L,B to R.A., Dec
return self.sky_coord.transform_to('icrs').dec.value # depends on [control=['except'], data=[]] |
def backfill_fields(self, fields, forms):
"""
Properly backfill fields to explicitly request specific
keys. The issue is that >6.X servers *only* return requested fields
so to improve backwards compatiblity for PyCap clients, add specific fields
when required.
Parameters
----------
fields: list
requested fields
forms: list
requested forms
Returns
-------
new fields, forms
"""
if forms and not fields:
new_fields = [self.def_field]
elif fields and self.def_field not in fields:
new_fields = list(fields)
if self.def_field not in fields:
new_fields.append(self.def_field)
elif not fields:
new_fields = self.field_names
else:
new_fields = list(fields)
return new_fields | def function[backfill_fields, parameter[self, fields, forms]]:
constant[
Properly backfill fields to explicitly request specific
keys. The issue is that >6.X servers *only* return requested fields
so to improve backwards compatiblity for PyCap clients, add specific fields
when required.
Parameters
----------
fields: list
requested fields
forms: list
requested forms
Returns
-------
new fields, forms
]
if <ast.BoolOp object at 0x7da1b060a260> begin[:]
variable[new_fields] assign[=] list[[<ast.Attribute object at 0x7da1b060b1f0>]]
return[name[new_fields]] | keyword[def] identifier[backfill_fields] ( identifier[self] , identifier[fields] , identifier[forms] ):
literal[string]
keyword[if] identifier[forms] keyword[and] keyword[not] identifier[fields] :
identifier[new_fields] =[ identifier[self] . identifier[def_field] ]
keyword[elif] identifier[fields] keyword[and] identifier[self] . identifier[def_field] keyword[not] keyword[in] identifier[fields] :
identifier[new_fields] = identifier[list] ( identifier[fields] )
keyword[if] identifier[self] . identifier[def_field] keyword[not] keyword[in] identifier[fields] :
identifier[new_fields] . identifier[append] ( identifier[self] . identifier[def_field] )
keyword[elif] keyword[not] identifier[fields] :
identifier[new_fields] = identifier[self] . identifier[field_names]
keyword[else] :
identifier[new_fields] = identifier[list] ( identifier[fields] )
keyword[return] identifier[new_fields] | def backfill_fields(self, fields, forms):
"""
Properly backfill fields to explicitly request specific
keys. The issue is that >6.X servers *only* return requested fields
so to improve backwards compatiblity for PyCap clients, add specific fields
when required.
Parameters
----------
fields: list
requested fields
forms: list
requested forms
Returns
-------
new fields, forms
"""
if forms and (not fields):
new_fields = [self.def_field] # depends on [control=['if'], data=[]]
elif fields and self.def_field not in fields:
new_fields = list(fields)
if self.def_field not in fields:
new_fields.append(self.def_field) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not fields:
new_fields = self.field_names # depends on [control=['if'], data=[]]
else:
new_fields = list(fields)
return new_fields |
def random_outdir(): # type: () -> Text
""" Return the random directory name chosen to use for tool / workflow output """
# compute this once and store it as a function attribute - each subsequent call will return the same value
if not hasattr(random_outdir, 'outdir'):
random_outdir.outdir = '/' + ''.join([random.choice(string.ascii_letters) for _ in range(6)]) # type: ignore # nosec
return random_outdir.outdir | def function[random_outdir, parameter[]]:
constant[ Return the random directory name chosen to use for tool / workflow output ]
if <ast.UnaryOp object at 0x7da18bc70220> begin[:]
name[random_outdir].outdir assign[=] binary_operation[constant[/] + call[constant[].join, parameter[<ast.ListComp object at 0x7da18bc71a50>]]]
return[name[random_outdir].outdir] | keyword[def] identifier[random_outdir] ():
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[random_outdir] , literal[string] ):
identifier[random_outdir] . identifier[outdir] = literal[string] + literal[string] . identifier[join] ([ identifier[random] . identifier[choice] ( identifier[string] . identifier[ascii_letters] ) keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] )])
keyword[return] identifier[random_outdir] . identifier[outdir] | def random_outdir(): # type: () -> Text
' Return the random directory name chosen to use for tool / workflow output '
# compute this once and store it as a function attribute - each subsequent call will return the same value
if not hasattr(random_outdir, 'outdir'):
random_outdir.outdir = '/' + ''.join([random.choice(string.ascii_letters) for _ in range(6)]) # type: ignore # nosec # depends on [control=['if'], data=[]]
return random_outdir.outdir |
def orcid_uri_to_orcid(value):
"Strip the uri schema from the start of ORCID URL strings"
if value is None:
return value
replace_values = ['http://orcid.org/', 'https://orcid.org/']
for replace_value in replace_values:
value = value.replace(replace_value, '')
return value | def function[orcid_uri_to_orcid, parameter[value]]:
constant[Strip the uri schema from the start of ORCID URL strings]
if compare[name[value] is constant[None]] begin[:]
return[name[value]]
variable[replace_values] assign[=] list[[<ast.Constant object at 0x7da18eb56980>, <ast.Constant object at 0x7da18eb54550>]]
for taget[name[replace_value]] in starred[name[replace_values]] begin[:]
variable[value] assign[=] call[name[value].replace, parameter[name[replace_value], constant[]]]
return[name[value]] | keyword[def] identifier[orcid_uri_to_orcid] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[value]
identifier[replace_values] =[ literal[string] , literal[string] ]
keyword[for] identifier[replace_value] keyword[in] identifier[replace_values] :
identifier[value] = identifier[value] . identifier[replace] ( identifier[replace_value] , literal[string] )
keyword[return] identifier[value] | def orcid_uri_to_orcid(value):
"""Strip the uri schema from the start of ORCID URL strings"""
if value is None:
return value # depends on [control=['if'], data=['value']]
replace_values = ['http://orcid.org/', 'https://orcid.org/']
for replace_value in replace_values:
value = value.replace(replace_value, '') # depends on [control=['for'], data=['replace_value']]
return value |
def train(sess, loss, x_train, y_train,
init_all=False, evaluate=None, feed=None, args=None,
rng=None, var_list=None, fprop_args=None, optimizer=None,
devices=None, x_batch_preprocessor=None, use_ema=False,
ema_decay=.998, run_canary=None,
loss_threshold=1e5, dataset_train=None, dataset_size=None):
"""
Run (optionally multi-replica, synchronous) training to minimize `loss`
:param sess: TF session to use when training the graph
:param loss: tensor, the loss to minimize
:param x_train: numpy array with training inputs or tf Dataset
:param y_train: numpy array with training outputs or tf Dataset
:param init_all: (boolean) If set to true, all TF variables in the session
are (re)initialized, otherwise only previously
uninitialized variables are initialized before training.
:param evaluate: function that is run after each training iteration
(typically to display the test/validation accuracy).
:param feed: An optional dictionary that is appended to the feeding
dictionary before the session runs. Can be used to feed
the learning phase of a Keras model for instance.
:param args: dict or argparse `Namespace` object.
Should contain `nb_epochs`, `learning_rate`,
`batch_size`
:param rng: Instance of numpy.random.RandomState
:param var_list: Optional list of parameters to train.
:param fprop_args: dict, extra arguments to pass to fprop (loss and model).
:param optimizer: Optimizer to be used for training
:param devices: list of device names to use for training
If None, defaults to: all GPUs, if GPUs are available
all devices, if no GPUs are available
:param x_batch_preprocessor: callable
Takes a single tensor containing an x_train batch as input
Returns a single tensor containing an x_train batch as output
Called to preprocess the data before passing the data to the Loss
:param use_ema: bool
If true, uses an exponential moving average of the model parameters
:param ema_decay: float or callable
The decay parameter for EMA, if EMA is used
If a callable rather than a float, this is a callable that takes
the epoch and batch as arguments and returns the ema_decay for
the current batch.
:param loss_threshold: float
Raise an exception if the loss exceeds this value.
This is intended to rapidly detect numerical problems.
Sometimes the loss may legitimately be higher than this value. In
such cases, raise the value. If needed it can be np.inf.
:param dataset_train: tf Dataset instance.
Used as a replacement for x_train, y_train for faster performance.
:param dataset_size: integer, the size of the dataset_train.
:return: True if model trained
"""
# Check whether the hardware is working correctly
canary.run_canary()
if run_canary is not None:
warnings.warn("The `run_canary` argument is deprecated. The canary "
"is now much cheaper and thus runs all the time. The "
"canary now uses its own loss function so it is not "
"necessary to turn off the canary when training with "
" a stochastic loss. Simply quit passing `run_canary`."
"Passing `run_canary` may become an error on or after "
"2019-10-16.")
args = _ArgsWrapper(args or {})
fprop_args = fprop_args or {}
# Check that necessary arguments were given (see doc above)
# Be sure to support 0 epochs for debugging purposes
if args.nb_epochs is None:
raise ValueError("`args` must specify number of epochs")
if optimizer is None:
if args.learning_rate is None:
raise ValueError("Learning rate was not given in args dict")
assert args.batch_size, "Batch size was not given in args dict"
if rng is None:
rng = np.random.RandomState()
if optimizer is None:
optimizer = tf.train.AdamOptimizer(learning_rate=args.learning_rate)
else:
if not isinstance(optimizer, tf.train.Optimizer):
raise ValueError("optimizer object must be from a child class of "
"tf.train.Optimizer")
grads = []
xs = []
preprocessed_xs = []
ys = []
if dataset_train is not None:
assert x_train is None and y_train is None and x_batch_preprocessor is None
if dataset_size is None:
raise ValueError("You must provide a dataset size")
data_iterator = dataset_train.make_one_shot_iterator().get_next()
x_train, y_train = sess.run(data_iterator)
devices = infer_devices(devices)
for device in devices:
with tf.device(device):
x = tf.placeholder(x_train.dtype, (None,) + x_train.shape[1:])
y = tf.placeholder(y_train.dtype, (None,) + y_train.shape[1:])
xs.append(x)
ys.append(y)
if x_batch_preprocessor is not None:
x = x_batch_preprocessor(x)
# We need to keep track of these so that the canary can feed
# preprocessed values. If the canary had to feed raw values,
# stochastic preprocessing could make the canary fail.
preprocessed_xs.append(x)
loss_value = loss.fprop(x, y, **fprop_args)
grads.append(optimizer.compute_gradients(
loss_value, var_list=var_list))
num_devices = len(devices)
print("num_devices: ", num_devices)
grad = avg_grads(grads)
# Trigger update operations within the default graph (such as batch_norm).
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
train_step = optimizer.apply_gradients(grad)
epoch_tf = tf.placeholder(tf.int32, [])
batch_tf = tf.placeholder(tf.int32, [])
if use_ema:
if callable(ema_decay):
ema_decay = ema_decay(epoch_tf, batch_tf)
ema = tf.train.ExponentialMovingAverage(decay=ema_decay)
with tf.control_dependencies([train_step]):
train_step = ema.apply(var_list)
# Get pointers to the EMA's running average variables
avg_params = [ema.average(param) for param in var_list]
# Make temporary buffers used for swapping the live and running average
# parameters
tmp_params = [tf.Variable(param, trainable=False)
for param in var_list]
# Define the swapping operation
param_to_tmp = [tf.assign(tmp, param)
for tmp, param in safe_zip(tmp_params, var_list)]
with tf.control_dependencies(param_to_tmp):
avg_to_param = [tf.assign(param, avg)
for param, avg in safe_zip(var_list, avg_params)]
with tf.control_dependencies(avg_to_param):
tmp_to_avg = [tf.assign(avg, tmp)
for avg, tmp in safe_zip(avg_params, tmp_params)]
swap = tmp_to_avg
batch_size = args.batch_size
assert batch_size % num_devices == 0
device_batch_size = batch_size // num_devices
if init_all:
sess.run(tf.global_variables_initializer())
else:
initialize_uninitialized_global_variables(sess)
for epoch in xrange(args.nb_epochs):
if dataset_train is not None:
nb_batches = int(math.ceil(float(dataset_size) / batch_size))
else:
# Indices to shuffle training set
index_shuf = list(range(len(x_train)))
# Randomly repeat a few training examples each epoch to avoid
# having a too-small batch
while len(index_shuf) % batch_size != 0:
index_shuf.append(rng.randint(len(x_train)))
nb_batches = len(index_shuf) // batch_size
rng.shuffle(index_shuf)
# Shuffling here versus inside the loop doesn't seem to affect
# timing very much, but shuffling here makes the code slightly
# easier to read
x_train_shuffled = x_train[index_shuf]
y_train_shuffled = y_train[index_shuf]
prev = time.time()
for batch in range(nb_batches):
if dataset_train is not None:
x_train_shuffled, y_train_shuffled = sess.run(data_iterator)
start, end = 0, batch_size
else:
# Compute batch start and end indices
start = batch * batch_size
end = (batch + 1) * batch_size
# Perform one training step
diff = end - start
assert diff == batch_size
feed_dict = {epoch_tf: epoch, batch_tf: batch}
for dev_idx in xrange(num_devices):
cur_start = start + dev_idx * device_batch_size
cur_end = start + (dev_idx + 1) * device_batch_size
feed_dict[xs[dev_idx]] = x_train_shuffled[cur_start:cur_end]
feed_dict[ys[dev_idx]] = y_train_shuffled[cur_start:cur_end]
if cur_end != end and dataset_train is None:
msg = ("batch_size (%d) must be a multiple of num_devices "
"(%d).\nCUDA_VISIBLE_DEVICES: %s"
"\ndevices: %s")
args = (batch_size, num_devices,
os.environ['CUDA_VISIBLE_DEVICES'],
str(devices))
raise ValueError(msg % args)
if feed is not None:
feed_dict.update(feed)
_, loss_numpy = sess.run(
[train_step, loss_value], feed_dict=feed_dict)
if np.abs(loss_numpy) > loss_threshold:
raise ValueError("Extreme loss during training: ", loss_numpy)
if np.isnan(loss_numpy) or np.isinf(loss_numpy):
raise ValueError("NaN/Inf loss during training")
assert (dataset_train is not None or
end == len(index_shuf)) # Check that all examples were used
cur = time.time()
_logger.info("Epoch " + str(epoch) + " took " +
str(cur - prev) + " seconds")
if evaluate is not None:
if use_ema:
# Before running evaluation, load the running average
# parameters into the live slot, so we can see how well
# the EMA parameters are performing
sess.run(swap)
evaluate()
if use_ema:
# Swap the parameters back, so that we continue training
# on the live parameters
sess.run(swap)
if use_ema:
# When training is done, swap the running average parameters into
# the live slot, so that we use them when we deploy the model
sess.run(swap)
return True | def function[train, parameter[sess, loss, x_train, y_train, init_all, evaluate, feed, args, rng, var_list, fprop_args, optimizer, devices, x_batch_preprocessor, use_ema, ema_decay, run_canary, loss_threshold, dataset_train, dataset_size]]:
constant[
Run (optionally multi-replica, synchronous) training to minimize `loss`
:param sess: TF session to use when training the graph
:param loss: tensor, the loss to minimize
:param x_train: numpy array with training inputs or tf Dataset
:param y_train: numpy array with training outputs or tf Dataset
:param init_all: (boolean) If set to true, all TF variables in the session
are (re)initialized, otherwise only previously
uninitialized variables are initialized before training.
:param evaluate: function that is run after each training iteration
(typically to display the test/validation accuracy).
:param feed: An optional dictionary that is appended to the feeding
dictionary before the session runs. Can be used to feed
the learning phase of a Keras model for instance.
:param args: dict or argparse `Namespace` object.
Should contain `nb_epochs`, `learning_rate`,
`batch_size`
:param rng: Instance of numpy.random.RandomState
:param var_list: Optional list of parameters to train.
:param fprop_args: dict, extra arguments to pass to fprop (loss and model).
:param optimizer: Optimizer to be used for training
:param devices: list of device names to use for training
If None, defaults to: all GPUs, if GPUs are available
all devices, if no GPUs are available
:param x_batch_preprocessor: callable
Takes a single tensor containing an x_train batch as input
Returns a single tensor containing an x_train batch as output
Called to preprocess the data before passing the data to the Loss
:param use_ema: bool
If true, uses an exponential moving average of the model parameters
:param ema_decay: float or callable
The decay parameter for EMA, if EMA is used
If a callable rather than a float, this is a callable that takes
the epoch and batch as arguments and returns the ema_decay for
the current batch.
:param loss_threshold: float
Raise an exception if the loss exceeds this value.
This is intended to rapidly detect numerical problems.
Sometimes the loss may legitimately be higher than this value. In
such cases, raise the value. If needed it can be np.inf.
:param dataset_train: tf Dataset instance.
Used as a replacement for x_train, y_train for faster performance.
:param dataset_size: integer, the size of the dataset_train.
:return: True if model trained
]
call[name[canary].run_canary, parameter[]]
if compare[name[run_canary] is_not constant[None]] begin[:]
call[name[warnings].warn, parameter[constant[The `run_canary` argument is deprecated. The canary is now much cheaper and thus runs all the time. The canary now uses its own loss function so it is not necessary to turn off the canary when training with a stochastic loss. Simply quit passing `run_canary`.Passing `run_canary` may become an error on or after 2019-10-16.]]]
variable[args] assign[=] call[name[_ArgsWrapper], parameter[<ast.BoolOp object at 0x7da1b1fc9ff0>]]
variable[fprop_args] assign[=] <ast.BoolOp object at 0x7da1b1fc97e0>
if compare[name[args].nb_epochs is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1fc9240>
if compare[name[optimizer] is constant[None]] begin[:]
if compare[name[args].learning_rate is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1fcace0>
assert[name[args].batch_size]
if compare[name[rng] is constant[None]] begin[:]
variable[rng] assign[=] call[name[np].random.RandomState, parameter[]]
if compare[name[optimizer] is constant[None]] begin[:]
variable[optimizer] assign[=] call[name[tf].train.AdamOptimizer, parameter[]]
variable[grads] assign[=] list[[]]
variable[xs] assign[=] list[[]]
variable[preprocessed_xs] assign[=] list[[]]
variable[ys] assign[=] list[[]]
if compare[name[dataset_train] is_not constant[None]] begin[:]
assert[<ast.BoolOp object at 0x7da1b1fca260>]
if compare[name[dataset_size] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1fc8eb0>
variable[data_iterator] assign[=] call[call[name[dataset_train].make_one_shot_iterator, parameter[]].get_next, parameter[]]
<ast.Tuple object at 0x7da1b1fca6e0> assign[=] call[name[sess].run, parameter[name[data_iterator]]]
variable[devices] assign[=] call[name[infer_devices], parameter[name[devices]]]
for taget[name[device]] in starred[name[devices]] begin[:]
with call[name[tf].device, parameter[name[device]]] begin[:]
variable[x] assign[=] call[name[tf].placeholder, parameter[name[x_train].dtype, binary_operation[tuple[[<ast.Constant object at 0x7da1b1fc98d0>]] + call[name[x_train].shape][<ast.Slice object at 0x7da1b1fcbc10>]]]]
variable[y] assign[=] call[name[tf].placeholder, parameter[name[y_train].dtype, binary_operation[tuple[[<ast.Constant object at 0x7da1b1fcba60>]] + call[name[y_train].shape][<ast.Slice object at 0x7da1b1fcb7c0>]]]]
call[name[xs].append, parameter[name[x]]]
call[name[ys].append, parameter[name[y]]]
if compare[name[x_batch_preprocessor] is_not constant[None]] begin[:]
variable[x] assign[=] call[name[x_batch_preprocessor], parameter[name[x]]]
call[name[preprocessed_xs].append, parameter[name[x]]]
variable[loss_value] assign[=] call[name[loss].fprop, parameter[name[x], name[y]]]
call[name[grads].append, parameter[call[name[optimizer].compute_gradients, parameter[name[loss_value]]]]]
variable[num_devices] assign[=] call[name[len], parameter[name[devices]]]
call[name[print], parameter[constant[num_devices: ], name[num_devices]]]
variable[grad] assign[=] call[name[avg_grads], parameter[name[grads]]]
with call[name[tf].control_dependencies, parameter[call[name[tf].get_collection, parameter[name[tf].GraphKeys.UPDATE_OPS]]]] begin[:]
variable[train_step] assign[=] call[name[optimizer].apply_gradients, parameter[name[grad]]]
variable[epoch_tf] assign[=] call[name[tf].placeholder, parameter[name[tf].int32, list[[]]]]
variable[batch_tf] assign[=] call[name[tf].placeholder, parameter[name[tf].int32, list[[]]]]
if name[use_ema] begin[:]
if call[name[callable], parameter[name[ema_decay]]] begin[:]
variable[ema_decay] assign[=] call[name[ema_decay], parameter[name[epoch_tf], name[batch_tf]]]
variable[ema] assign[=] call[name[tf].train.ExponentialMovingAverage, parameter[]]
with call[name[tf].control_dependencies, parameter[list[[<ast.Name object at 0x7da1b1fd74f0>]]]] begin[:]
variable[train_step] assign[=] call[name[ema].apply, parameter[name[var_list]]]
variable[avg_params] assign[=] <ast.ListComp object at 0x7da1b1fd6bf0>
variable[tmp_params] assign[=] <ast.ListComp object at 0x7da1b1fd7040>
variable[param_to_tmp] assign[=] <ast.ListComp object at 0x7da1b1fd61d0>
with call[name[tf].control_dependencies, parameter[name[param_to_tmp]]] begin[:]
variable[avg_to_param] assign[=] <ast.ListComp object at 0x7da1b1fd6e60>
with call[name[tf].control_dependencies, parameter[name[avg_to_param]]] begin[:]
variable[tmp_to_avg] assign[=] <ast.ListComp object at 0x7da1b1fd4040>
variable[swap] assign[=] name[tmp_to_avg]
variable[batch_size] assign[=] name[args].batch_size
assert[compare[binary_operation[name[batch_size] <ast.Mod object at 0x7da2590d6920> name[num_devices]] equal[==] constant[0]]]
variable[device_batch_size] assign[=] binary_operation[name[batch_size] <ast.FloorDiv object at 0x7da2590d6bc0> name[num_devices]]
if name[init_all] begin[:]
call[name[sess].run, parameter[call[name[tf].global_variables_initializer, parameter[]]]]
for taget[name[epoch]] in starred[call[name[xrange], parameter[name[args].nb_epochs]]] begin[:]
if compare[name[dataset_train] is_not constant[None]] begin[:]
variable[nb_batches] assign[=] call[name[int], parameter[call[name[math].ceil, parameter[binary_operation[call[name[float], parameter[name[dataset_size]]] / name[batch_size]]]]]]
variable[prev] assign[=] call[name[time].time, parameter[]]
for taget[name[batch]] in starred[call[name[range], parameter[name[nb_batches]]]] begin[:]
if compare[name[dataset_train] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b1fd7af0> assign[=] call[name[sess].run, parameter[name[data_iterator]]]
<ast.Tuple object at 0x7da1b1fd51e0> assign[=] tuple[[<ast.Constant object at 0x7da1b1fd74c0>, <ast.Name object at 0x7da1b1fd5c60>]]
variable[feed_dict] assign[=] dictionary[[<ast.Name object at 0x7da1b1fd4a30>, <ast.Name object at 0x7da1b1fd67a0>], [<ast.Name object at 0x7da1b1fd7a30>, <ast.Name object at 0x7da1b1fd6950>]]
for taget[name[dev_idx]] in starred[call[name[xrange], parameter[name[num_devices]]]] begin[:]
variable[cur_start] assign[=] binary_operation[name[start] + binary_operation[name[dev_idx] * name[device_batch_size]]]
variable[cur_end] assign[=] binary_operation[name[start] + binary_operation[binary_operation[name[dev_idx] + constant[1]] * name[device_batch_size]]]
call[name[feed_dict]][call[name[xs]][name[dev_idx]]] assign[=] call[name[x_train_shuffled]][<ast.Slice object at 0x7da1b1fd5900>]
call[name[feed_dict]][call[name[ys]][name[dev_idx]]] assign[=] call[name[y_train_shuffled]][<ast.Slice object at 0x7da1b1f75660>]
if <ast.BoolOp object at 0x7da1b1f77ac0> begin[:]
variable[msg] assign[=] constant[batch_size (%d) must be a multiple of num_devices (%d).
CUDA_VISIBLE_DEVICES: %s
devices: %s]
variable[args] assign[=] tuple[[<ast.Name object at 0x7da1b1f75780>, <ast.Name object at 0x7da1b1f773d0>, <ast.Subscript object at 0x7da1b1f77b80>, <ast.Call object at 0x7da1b1f77ca0>]]
<ast.Raise object at 0x7da1b1f74160>
if compare[name[feed] is_not constant[None]] begin[:]
call[name[feed_dict].update, parameter[name[feed]]]
<ast.Tuple object at 0x7da1b1f77040> assign[=] call[name[sess].run, parameter[list[[<ast.Name object at 0x7da1b1f754e0>, <ast.Name object at 0x7da1b1f76c20>]]]]
if compare[call[name[np].abs, parameter[name[loss_numpy]]] greater[>] name[loss_threshold]] begin[:]
<ast.Raise object at 0x7da1b1f74430>
if <ast.BoolOp object at 0x7da1b1f77640> begin[:]
<ast.Raise object at 0x7da1b1f75570>
assert[<ast.BoolOp object at 0x7da1b1f75810>]
variable[cur] assign[=] call[name[time].time, parameter[]]
call[name[_logger].info, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[Epoch ] + call[name[str], parameter[name[epoch]]]] + constant[ took ]] + call[name[str], parameter[binary_operation[name[cur] - name[prev]]]]] + constant[ seconds]]]]
if compare[name[evaluate] is_not constant[None]] begin[:]
if name[use_ema] begin[:]
call[name[sess].run, parameter[name[swap]]]
call[name[evaluate], parameter[]]
if name[use_ema] begin[:]
call[name[sess].run, parameter[name[swap]]]
if name[use_ema] begin[:]
call[name[sess].run, parameter[name[swap]]]
return[constant[True]] | keyword[def] identifier[train] ( identifier[sess] , identifier[loss] , identifier[x_train] , identifier[y_train] ,
identifier[init_all] = keyword[False] , identifier[evaluate] = keyword[None] , identifier[feed] = keyword[None] , identifier[args] = keyword[None] ,
identifier[rng] = keyword[None] , identifier[var_list] = keyword[None] , identifier[fprop_args] = keyword[None] , identifier[optimizer] = keyword[None] ,
identifier[devices] = keyword[None] , identifier[x_batch_preprocessor] = keyword[None] , identifier[use_ema] = keyword[False] ,
identifier[ema_decay] = literal[int] , identifier[run_canary] = keyword[None] ,
identifier[loss_threshold] = literal[int] , identifier[dataset_train] = keyword[None] , identifier[dataset_size] = keyword[None] ):
literal[string]
identifier[canary] . identifier[run_canary] ()
keyword[if] identifier[run_canary] keyword[is] keyword[not] keyword[None] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] )
identifier[args] = identifier[_ArgsWrapper] ( identifier[args] keyword[or] {})
identifier[fprop_args] = identifier[fprop_args] keyword[or] {}
keyword[if] identifier[args] . identifier[nb_epochs] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[optimizer] keyword[is] keyword[None] :
keyword[if] identifier[args] . identifier[learning_rate] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[assert] identifier[args] . identifier[batch_size] , literal[string]
keyword[if] identifier[rng] keyword[is] keyword[None] :
identifier[rng] = identifier[np] . identifier[random] . identifier[RandomState] ()
keyword[if] identifier[optimizer] keyword[is] keyword[None] :
identifier[optimizer] = identifier[tf] . identifier[train] . identifier[AdamOptimizer] ( identifier[learning_rate] = identifier[args] . identifier[learning_rate] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[optimizer] , identifier[tf] . identifier[train] . identifier[Optimizer] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[grads] =[]
identifier[xs] =[]
identifier[preprocessed_xs] =[]
identifier[ys] =[]
keyword[if] identifier[dataset_train] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[x_train] keyword[is] keyword[None] keyword[and] identifier[y_train] keyword[is] keyword[None] keyword[and] identifier[x_batch_preprocessor] keyword[is] keyword[None]
keyword[if] identifier[dataset_size] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[data_iterator] = identifier[dataset_train] . identifier[make_one_shot_iterator] (). identifier[get_next] ()
identifier[x_train] , identifier[y_train] = identifier[sess] . identifier[run] ( identifier[data_iterator] )
identifier[devices] = identifier[infer_devices] ( identifier[devices] )
keyword[for] identifier[device] keyword[in] identifier[devices] :
keyword[with] identifier[tf] . identifier[device] ( identifier[device] ):
identifier[x] = identifier[tf] . identifier[placeholder] ( identifier[x_train] . identifier[dtype] ,( keyword[None] ,)+ identifier[x_train] . identifier[shape] [ literal[int] :])
identifier[y] = identifier[tf] . identifier[placeholder] ( identifier[y_train] . identifier[dtype] ,( keyword[None] ,)+ identifier[y_train] . identifier[shape] [ literal[int] :])
identifier[xs] . identifier[append] ( identifier[x] )
identifier[ys] . identifier[append] ( identifier[y] )
keyword[if] identifier[x_batch_preprocessor] keyword[is] keyword[not] keyword[None] :
identifier[x] = identifier[x_batch_preprocessor] ( identifier[x] )
identifier[preprocessed_xs] . identifier[append] ( identifier[x] )
identifier[loss_value] = identifier[loss] . identifier[fprop] ( identifier[x] , identifier[y] ,** identifier[fprop_args] )
identifier[grads] . identifier[append] ( identifier[optimizer] . identifier[compute_gradients] (
identifier[loss_value] , identifier[var_list] = identifier[var_list] ))
identifier[num_devices] = identifier[len] ( identifier[devices] )
identifier[print] ( literal[string] , identifier[num_devices] )
identifier[grad] = identifier[avg_grads] ( identifier[grads] )
keyword[with] identifier[tf] . identifier[control_dependencies] ( identifier[tf] . identifier[get_collection] ( identifier[tf] . identifier[GraphKeys] . identifier[UPDATE_OPS] )):
identifier[train_step] = identifier[optimizer] . identifier[apply_gradients] ( identifier[grad] )
identifier[epoch_tf] = identifier[tf] . identifier[placeholder] ( identifier[tf] . identifier[int32] ,[])
identifier[batch_tf] = identifier[tf] . identifier[placeholder] ( identifier[tf] . identifier[int32] ,[])
keyword[if] identifier[use_ema] :
keyword[if] identifier[callable] ( identifier[ema_decay] ):
identifier[ema_decay] = identifier[ema_decay] ( identifier[epoch_tf] , identifier[batch_tf] )
identifier[ema] = identifier[tf] . identifier[train] . identifier[ExponentialMovingAverage] ( identifier[decay] = identifier[ema_decay] )
keyword[with] identifier[tf] . identifier[control_dependencies] ([ identifier[train_step] ]):
identifier[train_step] = identifier[ema] . identifier[apply] ( identifier[var_list] )
identifier[avg_params] =[ identifier[ema] . identifier[average] ( identifier[param] ) keyword[for] identifier[param] keyword[in] identifier[var_list] ]
identifier[tmp_params] =[ identifier[tf] . identifier[Variable] ( identifier[param] , identifier[trainable] = keyword[False] )
keyword[for] identifier[param] keyword[in] identifier[var_list] ]
identifier[param_to_tmp] =[ identifier[tf] . identifier[assign] ( identifier[tmp] , identifier[param] )
keyword[for] identifier[tmp] , identifier[param] keyword[in] identifier[safe_zip] ( identifier[tmp_params] , identifier[var_list] )]
keyword[with] identifier[tf] . identifier[control_dependencies] ( identifier[param_to_tmp] ):
identifier[avg_to_param] =[ identifier[tf] . identifier[assign] ( identifier[param] , identifier[avg] )
keyword[for] identifier[param] , identifier[avg] keyword[in] identifier[safe_zip] ( identifier[var_list] , identifier[avg_params] )]
keyword[with] identifier[tf] . identifier[control_dependencies] ( identifier[avg_to_param] ):
identifier[tmp_to_avg] =[ identifier[tf] . identifier[assign] ( identifier[avg] , identifier[tmp] )
keyword[for] identifier[avg] , identifier[tmp] keyword[in] identifier[safe_zip] ( identifier[avg_params] , identifier[tmp_params] )]
identifier[swap] = identifier[tmp_to_avg]
identifier[batch_size] = identifier[args] . identifier[batch_size]
keyword[assert] identifier[batch_size] % identifier[num_devices] == literal[int]
identifier[device_batch_size] = identifier[batch_size] // identifier[num_devices]
keyword[if] identifier[init_all] :
identifier[sess] . identifier[run] ( identifier[tf] . identifier[global_variables_initializer] ())
keyword[else] :
identifier[initialize_uninitialized_global_variables] ( identifier[sess] )
keyword[for] identifier[epoch] keyword[in] identifier[xrange] ( identifier[args] . identifier[nb_epochs] ):
keyword[if] identifier[dataset_train] keyword[is] keyword[not] keyword[None] :
identifier[nb_batches] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[float] ( identifier[dataset_size] )/ identifier[batch_size] ))
keyword[else] :
identifier[index_shuf] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[x_train] )))
keyword[while] identifier[len] ( identifier[index_shuf] )% identifier[batch_size] != literal[int] :
identifier[index_shuf] . identifier[append] ( identifier[rng] . identifier[randint] ( identifier[len] ( identifier[x_train] )))
identifier[nb_batches] = identifier[len] ( identifier[index_shuf] )// identifier[batch_size]
identifier[rng] . identifier[shuffle] ( identifier[index_shuf] )
identifier[x_train_shuffled] = identifier[x_train] [ identifier[index_shuf] ]
identifier[y_train_shuffled] = identifier[y_train] [ identifier[index_shuf] ]
identifier[prev] = identifier[time] . identifier[time] ()
keyword[for] identifier[batch] keyword[in] identifier[range] ( identifier[nb_batches] ):
keyword[if] identifier[dataset_train] keyword[is] keyword[not] keyword[None] :
identifier[x_train_shuffled] , identifier[y_train_shuffled] = identifier[sess] . identifier[run] ( identifier[data_iterator] )
identifier[start] , identifier[end] = literal[int] , identifier[batch_size]
keyword[else] :
identifier[start] = identifier[batch] * identifier[batch_size]
identifier[end] =( identifier[batch] + literal[int] )* identifier[batch_size]
identifier[diff] = identifier[end] - identifier[start]
keyword[assert] identifier[diff] == identifier[batch_size]
identifier[feed_dict] ={ identifier[epoch_tf] : identifier[epoch] , identifier[batch_tf] : identifier[batch] }
keyword[for] identifier[dev_idx] keyword[in] identifier[xrange] ( identifier[num_devices] ):
identifier[cur_start] = identifier[start] + identifier[dev_idx] * identifier[device_batch_size]
identifier[cur_end] = identifier[start] +( identifier[dev_idx] + literal[int] )* identifier[device_batch_size]
identifier[feed_dict] [ identifier[xs] [ identifier[dev_idx] ]]= identifier[x_train_shuffled] [ identifier[cur_start] : identifier[cur_end] ]
identifier[feed_dict] [ identifier[ys] [ identifier[dev_idx] ]]= identifier[y_train_shuffled] [ identifier[cur_start] : identifier[cur_end] ]
keyword[if] identifier[cur_end] != identifier[end] keyword[and] identifier[dataset_train] keyword[is] keyword[None] :
identifier[msg] =( literal[string]
literal[string]
literal[string] )
identifier[args] =( identifier[batch_size] , identifier[num_devices] ,
identifier[os] . identifier[environ] [ literal[string] ],
identifier[str] ( identifier[devices] ))
keyword[raise] identifier[ValueError] ( identifier[msg] % identifier[args] )
keyword[if] identifier[feed] keyword[is] keyword[not] keyword[None] :
identifier[feed_dict] . identifier[update] ( identifier[feed] )
identifier[_] , identifier[loss_numpy] = identifier[sess] . identifier[run] (
[ identifier[train_step] , identifier[loss_value] ], identifier[feed_dict] = identifier[feed_dict] )
keyword[if] identifier[np] . identifier[abs] ( identifier[loss_numpy] )> identifier[loss_threshold] :
keyword[raise] identifier[ValueError] ( literal[string] , identifier[loss_numpy] )
keyword[if] identifier[np] . identifier[isnan] ( identifier[loss_numpy] ) keyword[or] identifier[np] . identifier[isinf] ( identifier[loss_numpy] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[assert] ( identifier[dataset_train] keyword[is] keyword[not] keyword[None] keyword[or]
identifier[end] == identifier[len] ( identifier[index_shuf] ))
identifier[cur] = identifier[time] . identifier[time] ()
identifier[_logger] . identifier[info] ( literal[string] + identifier[str] ( identifier[epoch] )+ literal[string] +
identifier[str] ( identifier[cur] - identifier[prev] )+ literal[string] )
keyword[if] identifier[evaluate] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[use_ema] :
identifier[sess] . identifier[run] ( identifier[swap] )
identifier[evaluate] ()
keyword[if] identifier[use_ema] :
identifier[sess] . identifier[run] ( identifier[swap] )
keyword[if] identifier[use_ema] :
identifier[sess] . identifier[run] ( identifier[swap] )
keyword[return] keyword[True] | def train(sess, loss, x_train, y_train, init_all=False, evaluate=None, feed=None, args=None, rng=None, var_list=None, fprop_args=None, optimizer=None, devices=None, x_batch_preprocessor=None, use_ema=False, ema_decay=0.998, run_canary=None, loss_threshold=100000.0, dataset_train=None, dataset_size=None):
"""
Run (optionally multi-replica, synchronous) training to minimize `loss`
:param sess: TF session to use when training the graph
:param loss: tensor, the loss to minimize
:param x_train: numpy array with training inputs or tf Dataset
:param y_train: numpy array with training outputs or tf Dataset
:param init_all: (boolean) If set to true, all TF variables in the session
are (re)initialized, otherwise only previously
uninitialized variables are initialized before training.
:param evaluate: function that is run after each training iteration
(typically to display the test/validation accuracy).
:param feed: An optional dictionary that is appended to the feeding
dictionary before the session runs. Can be used to feed
the learning phase of a Keras model for instance.
:param args: dict or argparse `Namespace` object.
Should contain `nb_epochs`, `learning_rate`,
`batch_size`
:param rng: Instance of numpy.random.RandomState
:param var_list: Optional list of parameters to train.
:param fprop_args: dict, extra arguments to pass to fprop (loss and model).
:param optimizer: Optimizer to be used for training
:param devices: list of device names to use for training
If None, defaults to: all GPUs, if GPUs are available
all devices, if no GPUs are available
:param x_batch_preprocessor: callable
Takes a single tensor containing an x_train batch as input
Returns a single tensor containing an x_train batch as output
Called to preprocess the data before passing the data to the Loss
:param use_ema: bool
If true, uses an exponential moving average of the model parameters
:param ema_decay: float or callable
The decay parameter for EMA, if EMA is used
If a callable rather than a float, this is a callable that takes
the epoch and batch as arguments and returns the ema_decay for
the current batch.
:param loss_threshold: float
Raise an exception if the loss exceeds this value.
This is intended to rapidly detect numerical problems.
Sometimes the loss may legitimately be higher than this value. In
such cases, raise the value. If needed it can be np.inf.
:param dataset_train: tf Dataset instance.
Used as a replacement for x_train, y_train for faster performance.
:param dataset_size: integer, the size of the dataset_train.
:return: True if model trained
"""
# Check whether the hardware is working correctly
canary.run_canary()
if run_canary is not None:
warnings.warn('The `run_canary` argument is deprecated. The canary is now much cheaper and thus runs all the time. The canary now uses its own loss function so it is not necessary to turn off the canary when training with a stochastic loss. Simply quit passing `run_canary`.Passing `run_canary` may become an error on or after 2019-10-16.') # depends on [control=['if'], data=[]]
args = _ArgsWrapper(args or {})
fprop_args = fprop_args or {}
# Check that necessary arguments were given (see doc above)
# Be sure to support 0 epochs for debugging purposes
if args.nb_epochs is None:
raise ValueError('`args` must specify number of epochs') # depends on [control=['if'], data=[]]
if optimizer is None:
if args.learning_rate is None:
raise ValueError('Learning rate was not given in args dict') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
assert args.batch_size, 'Batch size was not given in args dict'
if rng is None:
rng = np.random.RandomState() # depends on [control=['if'], data=['rng']]
if optimizer is None:
optimizer = tf.train.AdamOptimizer(learning_rate=args.learning_rate) # depends on [control=['if'], data=['optimizer']]
elif not isinstance(optimizer, tf.train.Optimizer):
raise ValueError('optimizer object must be from a child class of tf.train.Optimizer') # depends on [control=['if'], data=[]]
grads = []
xs = []
preprocessed_xs = []
ys = []
if dataset_train is not None:
assert x_train is None and y_train is None and (x_batch_preprocessor is None)
if dataset_size is None:
raise ValueError('You must provide a dataset size') # depends on [control=['if'], data=[]]
data_iterator = dataset_train.make_one_shot_iterator().get_next()
(x_train, y_train) = sess.run(data_iterator) # depends on [control=['if'], data=['dataset_train']]
devices = infer_devices(devices)
for device in devices:
with tf.device(device):
x = tf.placeholder(x_train.dtype, (None,) + x_train.shape[1:])
y = tf.placeholder(y_train.dtype, (None,) + y_train.shape[1:])
xs.append(x)
ys.append(y)
if x_batch_preprocessor is not None:
x = x_batch_preprocessor(x) # depends on [control=['if'], data=['x_batch_preprocessor']]
# We need to keep track of these so that the canary can feed
# preprocessed values. If the canary had to feed raw values,
# stochastic preprocessing could make the canary fail.
preprocessed_xs.append(x)
loss_value = loss.fprop(x, y, **fprop_args)
grads.append(optimizer.compute_gradients(loss_value, var_list=var_list)) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['device']]
num_devices = len(devices)
print('num_devices: ', num_devices)
grad = avg_grads(grads)
# Trigger update operations within the default graph (such as batch_norm).
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
train_step = optimizer.apply_gradients(grad) # depends on [control=['with'], data=[]]
epoch_tf = tf.placeholder(tf.int32, [])
batch_tf = tf.placeholder(tf.int32, [])
if use_ema:
if callable(ema_decay):
ema_decay = ema_decay(epoch_tf, batch_tf) # depends on [control=['if'], data=[]]
ema = tf.train.ExponentialMovingAverage(decay=ema_decay)
with tf.control_dependencies([train_step]):
train_step = ema.apply(var_list) # depends on [control=['with'], data=[]]
# Get pointers to the EMA's running average variables
avg_params = [ema.average(param) for param in var_list]
# Make temporary buffers used for swapping the live and running average
# parameters
tmp_params = [tf.Variable(param, trainable=False) for param in var_list]
# Define the swapping operation
param_to_tmp = [tf.assign(tmp, param) for (tmp, param) in safe_zip(tmp_params, var_list)]
with tf.control_dependencies(param_to_tmp):
avg_to_param = [tf.assign(param, avg) for (param, avg) in safe_zip(var_list, avg_params)] # depends on [control=['with'], data=[]]
with tf.control_dependencies(avg_to_param):
tmp_to_avg = [tf.assign(avg, tmp) for (avg, tmp) in safe_zip(avg_params, tmp_params)] # depends on [control=['with'], data=[]]
swap = tmp_to_avg # depends on [control=['if'], data=[]]
batch_size = args.batch_size
assert batch_size % num_devices == 0
device_batch_size = batch_size // num_devices
if init_all:
sess.run(tf.global_variables_initializer()) # depends on [control=['if'], data=[]]
else:
initialize_uninitialized_global_variables(sess)
for epoch in xrange(args.nb_epochs):
if dataset_train is not None:
nb_batches = int(math.ceil(float(dataset_size) / batch_size)) # depends on [control=['if'], data=[]]
else:
# Indices to shuffle training set
index_shuf = list(range(len(x_train)))
# Randomly repeat a few training examples each epoch to avoid
# having a too-small batch
while len(index_shuf) % batch_size != 0:
index_shuf.append(rng.randint(len(x_train))) # depends on [control=['while'], data=[]]
nb_batches = len(index_shuf) // batch_size
rng.shuffle(index_shuf)
# Shuffling here versus inside the loop doesn't seem to affect
# timing very much, but shuffling here makes the code slightly
# easier to read
x_train_shuffled = x_train[index_shuf]
y_train_shuffled = y_train[index_shuf]
prev = time.time()
for batch in range(nb_batches):
if dataset_train is not None:
(x_train_shuffled, y_train_shuffled) = sess.run(data_iterator)
(start, end) = (0, batch_size) # depends on [control=['if'], data=[]]
else:
# Compute batch start and end indices
start = batch * batch_size
end = (batch + 1) * batch_size
# Perform one training step
diff = end - start
assert diff == batch_size
feed_dict = {epoch_tf: epoch, batch_tf: batch}
for dev_idx in xrange(num_devices):
cur_start = start + dev_idx * device_batch_size
cur_end = start + (dev_idx + 1) * device_batch_size
feed_dict[xs[dev_idx]] = x_train_shuffled[cur_start:cur_end]
feed_dict[ys[dev_idx]] = y_train_shuffled[cur_start:cur_end] # depends on [control=['for'], data=['dev_idx']]
if cur_end != end and dataset_train is None:
msg = 'batch_size (%d) must be a multiple of num_devices (%d).\nCUDA_VISIBLE_DEVICES: %s\ndevices: %s'
args = (batch_size, num_devices, os.environ['CUDA_VISIBLE_DEVICES'], str(devices))
raise ValueError(msg % args) # depends on [control=['if'], data=[]]
if feed is not None:
feed_dict.update(feed) # depends on [control=['if'], data=['feed']]
(_, loss_numpy) = sess.run([train_step, loss_value], feed_dict=feed_dict)
if np.abs(loss_numpy) > loss_threshold:
raise ValueError('Extreme loss during training: ', loss_numpy) # depends on [control=['if'], data=[]]
if np.isnan(loss_numpy) or np.isinf(loss_numpy):
raise ValueError('NaN/Inf loss during training') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['batch']]
assert dataset_train is not None or end == len(index_shuf) # Check that all examples were used
cur = time.time()
_logger.info('Epoch ' + str(epoch) + ' took ' + str(cur - prev) + ' seconds')
if evaluate is not None:
if use_ema:
# Before running evaluation, load the running average
# parameters into the live slot, so we can see how well
# the EMA parameters are performing
sess.run(swap) # depends on [control=['if'], data=[]]
evaluate()
if use_ema:
# Swap the parameters back, so that we continue training
# on the live parameters
sess.run(swap) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['evaluate']] # depends on [control=['for'], data=['epoch']]
if use_ema:
# When training is done, swap the running average parameters into
# the live slot, so that we use them when we deploy the model
sess.run(swap) # depends on [control=['if'], data=[]]
return True |
def hotstart(self):
"""
Prepare simulation hotstart info
"""
if self.write_hotstart:
hotstart_time_str = self.event_manager.simulation_end.strftime("%Y%m%d_%H%M")
try:
os.mkdir('hotstart')
except OSError:
pass
ov_hotstart_path = os.path.join('..', 'hotstart',
'{0}_ov_hotstart_{1}.ovh'.format(self.project_manager.name,
hotstart_time_str))
self._update_card("WRITE_OV_HOTSTART", ov_hotstart_path, True)
chan_hotstart_path = os.path.join('..', 'hotstart',
'{0}_chan_hotstart_{1}'.format(self.project_manager.name,
hotstart_time_str))
self._update_card("WRITE_CHAN_HOTSTART", chan_hotstart_path, True)
sm_hotstart_path = os.path.join('..', 'hotstart',
'{0}_sm_hotstart_{1}.smh'.format(self.project_manager.name,
hotstart_time_str))
self._update_card("WRITE_SM_HOTSTART", sm_hotstart_path, True)
else:
self._delete_card("WRITE_OV_HOTSTART")
self._delete_card("WRITE_CHAN_HOTSTART")
self._delete_card("WRITE_SM_HOTSTART")
if self.read_hotstart:
hotstart_time_str = self.event_manager.simulation_start.strftime("%Y%m%d_%H%M")
# OVERLAND
expected_ov_hotstart = os.path.join('hotstart',
'{0}_ov_hotstart_{1}.ovh'.format(self.project_manager.name,
hotstart_time_str))
if os.path.exists(expected_ov_hotstart):
self._update_card("READ_OV_HOTSTART", os.path.join("..", expected_ov_hotstart), True)
else:
self._delete_card("READ_OV_HOTSTART")
log.warning("READ_OV_HOTSTART not included as "
"{0} does not exist ...".format(expected_ov_hotstart))
# CHANNEL
expected_chan_hotstart = os.path.join('hotstart',
'{0}_chan_hotstart_{1}'.format(self.project_manager.name,
hotstart_time_str))
if os.path.exists("{0}.qht".format(expected_chan_hotstart)) \
and os.path.exists("{0}.dht".format(expected_chan_hotstart)):
self._update_card("READ_CHAN_HOTSTART", os.path.join("..", expected_chan_hotstart), True)
else:
self._delete_card("READ_CHAN_HOTSTART")
log.warning("READ_CHAN_HOTSTART not included as "
"{0}.qht and/or {0}.dht does not exist ...".format(expected_chan_hotstart))
# INFILTRATION
expected_sm_hotstart = os.path.join('hotstart',
'{0}_sm_hotstart_{1}.smh'.format(self.project_manager.name,
hotstart_time_str))
if os.path.exists(expected_sm_hotstart):
self._update_card("READ_SM_HOTSTART", os.path.join("..", expected_sm_hotstart), True)
else:
self._delete_card("READ_SM_HOTSTART")
log.warning("READ_SM_HOTSTART not included as"
" {0} does not exist ...".format(expected_sm_hotstart)) | def function[hotstart, parameter[self]]:
constant[
Prepare simulation hotstart info
]
if name[self].write_hotstart begin[:]
variable[hotstart_time_str] assign[=] call[name[self].event_manager.simulation_end.strftime, parameter[constant[%Y%m%d_%H%M]]]
<ast.Try object at 0x7da18f09ebc0>
variable[ov_hotstart_path] assign[=] call[name[os].path.join, parameter[constant[..], constant[hotstart], call[constant[{0}_ov_hotstart_{1}.ovh].format, parameter[name[self].project_manager.name, name[hotstart_time_str]]]]]
call[name[self]._update_card, parameter[constant[WRITE_OV_HOTSTART], name[ov_hotstart_path], constant[True]]]
variable[chan_hotstart_path] assign[=] call[name[os].path.join, parameter[constant[..], constant[hotstart], call[constant[{0}_chan_hotstart_{1}].format, parameter[name[self].project_manager.name, name[hotstart_time_str]]]]]
call[name[self]._update_card, parameter[constant[WRITE_CHAN_HOTSTART], name[chan_hotstart_path], constant[True]]]
variable[sm_hotstart_path] assign[=] call[name[os].path.join, parameter[constant[..], constant[hotstart], call[constant[{0}_sm_hotstart_{1}.smh].format, parameter[name[self].project_manager.name, name[hotstart_time_str]]]]]
call[name[self]._update_card, parameter[constant[WRITE_SM_HOTSTART], name[sm_hotstart_path], constant[True]]]
if name[self].read_hotstart begin[:]
variable[hotstart_time_str] assign[=] call[name[self].event_manager.simulation_start.strftime, parameter[constant[%Y%m%d_%H%M]]]
variable[expected_ov_hotstart] assign[=] call[name[os].path.join, parameter[constant[hotstart], call[constant[{0}_ov_hotstart_{1}.ovh].format, parameter[name[self].project_manager.name, name[hotstart_time_str]]]]]
if call[name[os].path.exists, parameter[name[expected_ov_hotstart]]] begin[:]
call[name[self]._update_card, parameter[constant[READ_OV_HOTSTART], call[name[os].path.join, parameter[constant[..], name[expected_ov_hotstart]]], constant[True]]]
variable[expected_chan_hotstart] assign[=] call[name[os].path.join, parameter[constant[hotstart], call[constant[{0}_chan_hotstart_{1}].format, parameter[name[self].project_manager.name, name[hotstart_time_str]]]]]
if <ast.BoolOp object at 0x7da18f09df60> begin[:]
call[name[self]._update_card, parameter[constant[READ_CHAN_HOTSTART], call[name[os].path.join, parameter[constant[..], name[expected_chan_hotstart]]], constant[True]]]
variable[expected_sm_hotstart] assign[=] call[name[os].path.join, parameter[constant[hotstart], call[constant[{0}_sm_hotstart_{1}.smh].format, parameter[name[self].project_manager.name, name[hotstart_time_str]]]]]
if call[name[os].path.exists, parameter[name[expected_sm_hotstart]]] begin[:]
call[name[self]._update_card, parameter[constant[READ_SM_HOTSTART], call[name[os].path.join, parameter[constant[..], name[expected_sm_hotstart]]], constant[True]]] | keyword[def] identifier[hotstart] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[write_hotstart] :
identifier[hotstart_time_str] = identifier[self] . identifier[event_manager] . identifier[simulation_end] . identifier[strftime] ( literal[string] )
keyword[try] :
identifier[os] . identifier[mkdir] ( literal[string] )
keyword[except] identifier[OSError] :
keyword[pass]
identifier[ov_hotstart_path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ,
identifier[hotstart_time_str] ))
identifier[self] . identifier[_update_card] ( literal[string] , identifier[ov_hotstart_path] , keyword[True] )
identifier[chan_hotstart_path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ,
identifier[hotstart_time_str] ))
identifier[self] . identifier[_update_card] ( literal[string] , identifier[chan_hotstart_path] , keyword[True] )
identifier[sm_hotstart_path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ,
identifier[hotstart_time_str] ))
identifier[self] . identifier[_update_card] ( literal[string] , identifier[sm_hotstart_path] , keyword[True] )
keyword[else] :
identifier[self] . identifier[_delete_card] ( literal[string] )
identifier[self] . identifier[_delete_card] ( literal[string] )
identifier[self] . identifier[_delete_card] ( literal[string] )
keyword[if] identifier[self] . identifier[read_hotstart] :
identifier[hotstart_time_str] = identifier[self] . identifier[event_manager] . identifier[simulation_start] . identifier[strftime] ( literal[string] )
identifier[expected_ov_hotstart] = identifier[os] . identifier[path] . identifier[join] ( literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ,
identifier[hotstart_time_str] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[expected_ov_hotstart] ):
identifier[self] . identifier[_update_card] ( literal[string] , identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[expected_ov_hotstart] ), keyword[True] )
keyword[else] :
identifier[self] . identifier[_delete_card] ( literal[string] )
identifier[log] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[expected_ov_hotstart] ))
identifier[expected_chan_hotstart] = identifier[os] . identifier[path] . identifier[join] ( literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ,
identifier[hotstart_time_str] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( literal[string] . identifier[format] ( identifier[expected_chan_hotstart] )) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( literal[string] . identifier[format] ( identifier[expected_chan_hotstart] )):
identifier[self] . identifier[_update_card] ( literal[string] , identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[expected_chan_hotstart] ), keyword[True] )
keyword[else] :
identifier[self] . identifier[_delete_card] ( literal[string] )
identifier[log] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[expected_chan_hotstart] ))
identifier[expected_sm_hotstart] = identifier[os] . identifier[path] . identifier[join] ( literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ,
identifier[hotstart_time_str] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[expected_sm_hotstart] ):
identifier[self] . identifier[_update_card] ( literal[string] , identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[expected_sm_hotstart] ), keyword[True] )
keyword[else] :
identifier[self] . identifier[_delete_card] ( literal[string] )
identifier[log] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[expected_sm_hotstart] )) | def hotstart(self):
"""
Prepare simulation hotstart info
"""
if self.write_hotstart:
hotstart_time_str = self.event_manager.simulation_end.strftime('%Y%m%d_%H%M')
try:
os.mkdir('hotstart') # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]]
ov_hotstart_path = os.path.join('..', 'hotstart', '{0}_ov_hotstart_{1}.ovh'.format(self.project_manager.name, hotstart_time_str))
self._update_card('WRITE_OV_HOTSTART', ov_hotstart_path, True)
chan_hotstart_path = os.path.join('..', 'hotstart', '{0}_chan_hotstart_{1}'.format(self.project_manager.name, hotstart_time_str))
self._update_card('WRITE_CHAN_HOTSTART', chan_hotstart_path, True)
sm_hotstart_path = os.path.join('..', 'hotstart', '{0}_sm_hotstart_{1}.smh'.format(self.project_manager.name, hotstart_time_str))
self._update_card('WRITE_SM_HOTSTART', sm_hotstart_path, True) # depends on [control=['if'], data=[]]
else:
self._delete_card('WRITE_OV_HOTSTART')
self._delete_card('WRITE_CHAN_HOTSTART')
self._delete_card('WRITE_SM_HOTSTART')
if self.read_hotstart:
hotstart_time_str = self.event_manager.simulation_start.strftime('%Y%m%d_%H%M')
# OVERLAND
expected_ov_hotstart = os.path.join('hotstart', '{0}_ov_hotstart_{1}.ovh'.format(self.project_manager.name, hotstart_time_str))
if os.path.exists(expected_ov_hotstart):
self._update_card('READ_OV_HOTSTART', os.path.join('..', expected_ov_hotstart), True) # depends on [control=['if'], data=[]]
else:
self._delete_card('READ_OV_HOTSTART')
log.warning('READ_OV_HOTSTART not included as {0} does not exist ...'.format(expected_ov_hotstart))
# CHANNEL
expected_chan_hotstart = os.path.join('hotstart', '{0}_chan_hotstart_{1}'.format(self.project_manager.name, hotstart_time_str))
if os.path.exists('{0}.qht'.format(expected_chan_hotstart)) and os.path.exists('{0}.dht'.format(expected_chan_hotstart)):
self._update_card('READ_CHAN_HOTSTART', os.path.join('..', expected_chan_hotstart), True) # depends on [control=['if'], data=[]]
else:
self._delete_card('READ_CHAN_HOTSTART')
log.warning('READ_CHAN_HOTSTART not included as {0}.qht and/or {0}.dht does not exist ...'.format(expected_chan_hotstart))
# INFILTRATION
expected_sm_hotstart = os.path.join('hotstart', '{0}_sm_hotstart_{1}.smh'.format(self.project_manager.name, hotstart_time_str))
if os.path.exists(expected_sm_hotstart):
self._update_card('READ_SM_HOTSTART', os.path.join('..', expected_sm_hotstart), True) # depends on [control=['if'], data=[]]
else:
self._delete_card('READ_SM_HOTSTART')
log.warning('READ_SM_HOTSTART not included as {0} does not exist ...'.format(expected_sm_hotstart)) # depends on [control=['if'], data=[]] |
def _set_ip_anycast_address(self, v, load=False):
"""
Setter method for ip_anycast_address, mapped from YANG variable /routing_system/interface/ve/ip/ip_anycast_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_anycast_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_anycast_address() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("ip_address",ip_anycast_address.ip_anycast_address, yang_name="ip-anycast-address", rest_name="anycast-address", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-address', extensions={u'tailf-common': {u'callpoint': u'IntfVeAnycastIpAddrCallpoint', u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'alt-name': u'anycast-address', u'info': u'Set the IP address of an interface'}}), is_container='list', yang_name="ip-anycast-address", rest_name="anycast-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'IntfVeAnycastIpAddrCallpoint', u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'alt-name': u'anycast-address', u'info': u'Set the IP address of an interface'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip_anycast_address must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("ip_address",ip_anycast_address.ip_anycast_address, yang_name="ip-anycast-address", rest_name="anycast-address", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-address', extensions={u'tailf-common': {u'callpoint': u'IntfVeAnycastIpAddrCallpoint', u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'alt-name': u'anycast-address', u'info': u'Set the IP address of an interface'}}), is_container='list', yang_name="ip-anycast-address", rest_name="anycast-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'IntfVeAnycastIpAddrCallpoint', u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'alt-name': u'anycast-address', u'info': u'Set the IP address of an interface'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='list', is_config=True)""",
})
self.__ip_anycast_address = t
if hasattr(self, '_set'):
self._set() | def function[_set_ip_anycast_address, parameter[self, v, load]]:
constant[
Setter method for ip_anycast_address, mapped from YANG variable /routing_system/interface/ve/ip/ip_anycast_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_anycast_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_anycast_address() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f00fac0>
name[self].__ip_anycast_address assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_ip_anycast_address] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[ip_anycast_address] . identifier[ip_anycast_address] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__ip_anycast_address] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_ip_anycast_address(self, v, load=False):
"""
Setter method for ip_anycast_address, mapped from YANG variable /routing_system/interface/ve/ip/ip_anycast_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_anycast_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_anycast_address() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('ip_address', ip_anycast_address.ip_anycast_address, yang_name='ip-anycast-address', rest_name='anycast-address', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-address', extensions={u'tailf-common': {u'callpoint': u'IntfVeAnycastIpAddrCallpoint', u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'alt-name': u'anycast-address', u'info': u'Set the IP address of an interface'}}), is_container='list', yang_name='ip-anycast-address', rest_name='anycast-address', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'IntfVeAnycastIpAddrCallpoint', u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'alt-name': u'anycast-address', u'info': u'Set the IP address of an interface'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'ip_anycast_address must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("ip_address",ip_anycast_address.ip_anycast_address, yang_name="ip-anycast-address", rest_name="anycast-address", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'ip-address\', extensions={u\'tailf-common\': {u\'callpoint\': u\'IntfVeAnycastIpAddrCallpoint\', u\'cli-suppress-mode\': None, u\'cli-compact-syntax\': None, u\'alt-name\': u\'anycast-address\', u\'info\': u\'Set the IP address of an interface\'}}), is_container=\'list\', yang_name="ip-anycast-address", rest_name="anycast-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'IntfVeAnycastIpAddrCallpoint\', u\'cli-suppress-mode\': None, u\'cli-compact-syntax\': None, u\'alt-name\': u\'anycast-address\', u\'info\': u\'Set the IP address of an interface\'}}, namespace=\'urn:brocade.com:mgmt:brocade-vrrp\', defining_module=\'brocade-vrrp\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__ip_anycast_address = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def delete(name):
'''
Delete the namespace from the register
USAGE:
.. code-block:: yaml
deletens:
reg.delete:
- name: myregister
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if name in __reg__:
del __reg__[name]
return ret | def function[delete, parameter[name]]:
constant[
Delete the namespace from the register
USAGE:
.. code-block:: yaml
deletens:
reg.delete:
- name: myregister
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f3e9e0>, <ast.Constant object at 0x7da1b1f3e380>, <ast.Constant object at 0x7da1b1f3e980>, <ast.Constant object at 0x7da1b1f3e7a0>], [<ast.Name object at 0x7da1b1f3dd20>, <ast.Dict object at 0x7da1b1f3c760>, <ast.Constant object at 0x7da1b1f3c940>, <ast.Constant object at 0x7da1b1f3cd00>]]
if compare[name[name] in name[__reg__]] begin[:]
<ast.Delete object at 0x7da1b1f3db70>
return[name[ret]] | keyword[def] identifier[delete] ( identifier[name] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : literal[string] ,
literal[string] : keyword[True] }
keyword[if] identifier[name] keyword[in] identifier[__reg__] :
keyword[del] identifier[__reg__] [ identifier[name] ]
keyword[return] identifier[ret] | def delete(name):
"""
Delete the namespace from the register
USAGE:
.. code-block:: yaml
deletens:
reg.delete:
- name: myregister
"""
ret = {'name': name, 'changes': {}, 'comment': '', 'result': True}
if name in __reg__:
del __reg__[name] # depends on [control=['if'], data=['name', '__reg__']]
return ret |
def _bytes_lines(self):
"""Map byte offsets to line numbers in `code`.
Uses co_lnotab described in Python/compile.c to map byte offsets to
line numbers. Produces a sequence: (b0, l0), (b1, l1), ...
Only byte offsets that correspond to line numbers are included in the
results.
"""
# Adapted from dis.py in the standard library.
byte_increments = bytes_to_ints(self.code.co_lnotab[0::2])
line_increments = bytes_to_ints(self.code.co_lnotab[1::2])
last_line_num = None
line_num = self.code.co_firstlineno
byte_num = 0
for byte_incr, line_incr in zip(byte_increments, line_increments):
if byte_incr:
if line_num != last_line_num:
yield (byte_num, line_num)
last_line_num = line_num
byte_num += byte_incr
line_num += line_incr
if line_num != last_line_num:
yield (byte_num, line_num) | def function[_bytes_lines, parameter[self]]:
constant[Map byte offsets to line numbers in `code`.
Uses co_lnotab described in Python/compile.c to map byte offsets to
line numbers. Produces a sequence: (b0, l0), (b1, l1), ...
Only byte offsets that correspond to line numbers are included in the
results.
]
variable[byte_increments] assign[=] call[name[bytes_to_ints], parameter[call[name[self].code.co_lnotab][<ast.Slice object at 0x7da20e9b1a20>]]]
variable[line_increments] assign[=] call[name[bytes_to_ints], parameter[call[name[self].code.co_lnotab][<ast.Slice object at 0x7da20e9b2b90>]]]
variable[last_line_num] assign[=] constant[None]
variable[line_num] assign[=] name[self].code.co_firstlineno
variable[byte_num] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18ede7730>, <ast.Name object at 0x7da18ede4970>]]] in starred[call[name[zip], parameter[name[byte_increments], name[line_increments]]]] begin[:]
if name[byte_incr] begin[:]
if compare[name[line_num] not_equal[!=] name[last_line_num]] begin[:]
<ast.Yield object at 0x7da18ede4730>
variable[last_line_num] assign[=] name[line_num]
<ast.AugAssign object at 0x7da18ede7e20>
<ast.AugAssign object at 0x7da18ede6800>
if compare[name[line_num] not_equal[!=] name[last_line_num]] begin[:]
<ast.Yield object at 0x7da18ede5630> | keyword[def] identifier[_bytes_lines] ( identifier[self] ):
literal[string]
identifier[byte_increments] = identifier[bytes_to_ints] ( identifier[self] . identifier[code] . identifier[co_lnotab] [ literal[int] :: literal[int] ])
identifier[line_increments] = identifier[bytes_to_ints] ( identifier[self] . identifier[code] . identifier[co_lnotab] [ literal[int] :: literal[int] ])
identifier[last_line_num] = keyword[None]
identifier[line_num] = identifier[self] . identifier[code] . identifier[co_firstlineno]
identifier[byte_num] = literal[int]
keyword[for] identifier[byte_incr] , identifier[line_incr] keyword[in] identifier[zip] ( identifier[byte_increments] , identifier[line_increments] ):
keyword[if] identifier[byte_incr] :
keyword[if] identifier[line_num] != identifier[last_line_num] :
keyword[yield] ( identifier[byte_num] , identifier[line_num] )
identifier[last_line_num] = identifier[line_num]
identifier[byte_num] += identifier[byte_incr]
identifier[line_num] += identifier[line_incr]
keyword[if] identifier[line_num] != identifier[last_line_num] :
keyword[yield] ( identifier[byte_num] , identifier[line_num] ) | def _bytes_lines(self):
"""Map byte offsets to line numbers in `code`.
Uses co_lnotab described in Python/compile.c to map byte offsets to
line numbers. Produces a sequence: (b0, l0), (b1, l1), ...
Only byte offsets that correspond to line numbers are included in the
results.
"""
# Adapted from dis.py in the standard library.
byte_increments = bytes_to_ints(self.code.co_lnotab[0::2])
line_increments = bytes_to_ints(self.code.co_lnotab[1::2])
last_line_num = None
line_num = self.code.co_firstlineno
byte_num = 0
for (byte_incr, line_incr) in zip(byte_increments, line_increments):
if byte_incr:
if line_num != last_line_num:
yield (byte_num, line_num)
last_line_num = line_num # depends on [control=['if'], data=['line_num', 'last_line_num']]
byte_num += byte_incr # depends on [control=['if'], data=[]]
line_num += line_incr # depends on [control=['for'], data=[]]
if line_num != last_line_num:
yield (byte_num, line_num) # depends on [control=['if'], data=['line_num']] |
def can_create_objective_bank_with_record_types(self, objective_bank_record_types):
"""Tests if this user can create a single ``ObjectiveBank`` using the desired record types.
While ``LearningManager.getObjectiveBankRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``ObjectiveBank``. Providing an empty array tests if an
``ObjectiveBank`` can be created with no records.
arg: objective_bank_record_types (osid.type.Type[]): array of
objective bank record types
return: (boolean) - ``true`` if ``ObjectiveBank`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``objective_bank_record_types`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=objective_bank_record_types)
return True | def function[can_create_objective_bank_with_record_types, parameter[self, objective_bank_record_types]]:
constant[Tests if this user can create a single ``ObjectiveBank`` using the desired record types.
While ``LearningManager.getObjectiveBankRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``ObjectiveBank``. Providing an empty array tests if an
``ObjectiveBank`` can be created with no records.
arg: objective_bank_record_types (osid.type.Type[]): array of
objective bank record types
return: (boolean) - ``true`` if ``ObjectiveBank`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``objective_bank_record_types`` is
``null``
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.can_create_catalog_with_record_types, parameter[]]]
return[constant[True]] | keyword[def] identifier[can_create_objective_bank_with_record_types] ( identifier[self] , identifier[objective_bank_record_types] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[can_create_catalog_with_record_types] ( identifier[catalog_record_types] = identifier[objective_bank_record_types] )
keyword[return] keyword[True] | def can_create_objective_bank_with_record_types(self, objective_bank_record_types):
"""Tests if this user can create a single ``ObjectiveBank`` using the desired record types.
While ``LearningManager.getObjectiveBankRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``ObjectiveBank``. Providing an empty array tests if an
``ObjectiveBank`` can be created with no records.
arg: objective_bank_record_types (osid.type.Type[]): array of
objective bank record types
return: (boolean) - ``true`` if ``ObjectiveBank`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``objective_bank_record_types`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=objective_bank_record_types) # depends on [control=['if'], data=[]]
return True |
def save_pytables(fpath, data, verbose=False):
"""
sudo pip install numexpr
sudo pip install tables
References:
https://pytables.github.io/cookbook/py2exe_howto.html
https://gist.github.com/andrewgiessel/7515520
http://stackoverflow.com/questions/8843062/python-how-to-store-a-numpy-multidimensional-array-in-pytables
http://pytables.github.io/usersguide/tutorials.html#creating-new-array-objects
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_io import * # NOQA
>>> import numpy as np
>>> import utool as ut
>>> # build test data
>>> verbose = True
>>> fpath = 'myfile.pytables.hdf5'
>>> np.random.seed(0)
>>> compression = 'gzip'
>>> data = (np.random.rand(100000, 128) * 255).astype(np.uint8).copy()
>>> # execute function
>>> ut.delete(fpath)
>>> save_pytables(fpath, data, verbose)
>>> data2 = load_pytables(fpath, verbose)
>>> assert data is not data2
>>> assert np.all(data == data2)
>>> assert ut.delete(fpath)
"""
import tables
#from os.path import basename
#fname = basename(fpath)
#shape = data.shape
#dtype = data.dtype
#file_ = tables.open_file(fpath)
verbose = _rectify_verb_write(verbose)
if verbose:
print('[util_io] * save_pytables(%r, data)' % (util_path.tail(fpath),))
with tables.open_file(fpath, 'w') as file_:
atom = tables.Atom.from_dtype(data.dtype)
filters = tables.Filters(complib='blosc', complevel=5)
dset = file_.createCArray(file_.root, 'data', atom, data.shape, filters=filters)
# save w/o compressive filter
#dset = file_.createCArray(file_.root, 'all_data', atom, all_data.shape)
dset[:] = data | def function[save_pytables, parameter[fpath, data, verbose]]:
constant[
sudo pip install numexpr
sudo pip install tables
References:
https://pytables.github.io/cookbook/py2exe_howto.html
https://gist.github.com/andrewgiessel/7515520
http://stackoverflow.com/questions/8843062/python-how-to-store-a-numpy-multidimensional-array-in-pytables
http://pytables.github.io/usersguide/tutorials.html#creating-new-array-objects
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_io import * # NOQA
>>> import numpy as np
>>> import utool as ut
>>> # build test data
>>> verbose = True
>>> fpath = 'myfile.pytables.hdf5'
>>> np.random.seed(0)
>>> compression = 'gzip'
>>> data = (np.random.rand(100000, 128) * 255).astype(np.uint8).copy()
>>> # execute function
>>> ut.delete(fpath)
>>> save_pytables(fpath, data, verbose)
>>> data2 = load_pytables(fpath, verbose)
>>> assert data is not data2
>>> assert np.all(data == data2)
>>> assert ut.delete(fpath)
]
import module[tables]
variable[verbose] assign[=] call[name[_rectify_verb_write], parameter[name[verbose]]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[[util_io] * save_pytables(%r, data)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b24b3b80>]]]]]
with call[name[tables].open_file, parameter[name[fpath], constant[w]]] begin[:]
variable[atom] assign[=] call[name[tables].Atom.from_dtype, parameter[name[data].dtype]]
variable[filters] assign[=] call[name[tables].Filters, parameter[]]
variable[dset] assign[=] call[name[file_].createCArray, parameter[name[file_].root, constant[data], name[atom], name[data].shape]]
call[name[dset]][<ast.Slice object at 0x7da1b2507dc0>] assign[=] name[data] | keyword[def] identifier[save_pytables] ( identifier[fpath] , identifier[data] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[import] identifier[tables]
identifier[verbose] = identifier[_rectify_verb_write] ( identifier[verbose] )
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] %( identifier[util_path] . identifier[tail] ( identifier[fpath] ),))
keyword[with] identifier[tables] . identifier[open_file] ( identifier[fpath] , literal[string] ) keyword[as] identifier[file_] :
identifier[atom] = identifier[tables] . identifier[Atom] . identifier[from_dtype] ( identifier[data] . identifier[dtype] )
identifier[filters] = identifier[tables] . identifier[Filters] ( identifier[complib] = literal[string] , identifier[complevel] = literal[int] )
identifier[dset] = identifier[file_] . identifier[createCArray] ( identifier[file_] . identifier[root] , literal[string] , identifier[atom] , identifier[data] . identifier[shape] , identifier[filters] = identifier[filters] )
identifier[dset] [:]= identifier[data] | def save_pytables(fpath, data, verbose=False):
"""
sudo pip install numexpr
sudo pip install tables
References:
https://pytables.github.io/cookbook/py2exe_howto.html
https://gist.github.com/andrewgiessel/7515520
http://stackoverflow.com/questions/8843062/python-how-to-store-a-numpy-multidimensional-array-in-pytables
http://pytables.github.io/usersguide/tutorials.html#creating-new-array-objects
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_io import * # NOQA
>>> import numpy as np
>>> import utool as ut
>>> # build test data
>>> verbose = True
>>> fpath = 'myfile.pytables.hdf5'
>>> np.random.seed(0)
>>> compression = 'gzip'
>>> data = (np.random.rand(100000, 128) * 255).astype(np.uint8).copy()
>>> # execute function
>>> ut.delete(fpath)
>>> save_pytables(fpath, data, verbose)
>>> data2 = load_pytables(fpath, verbose)
>>> assert data is not data2
>>> assert np.all(data == data2)
>>> assert ut.delete(fpath)
"""
import tables
#from os.path import basename
#fname = basename(fpath)
#shape = data.shape
#dtype = data.dtype
#file_ = tables.open_file(fpath)
verbose = _rectify_verb_write(verbose)
if verbose:
print('[util_io] * save_pytables(%r, data)' % (util_path.tail(fpath),)) # depends on [control=['if'], data=[]]
with tables.open_file(fpath, 'w') as file_:
atom = tables.Atom.from_dtype(data.dtype)
filters = tables.Filters(complib='blosc', complevel=5)
dset = file_.createCArray(file_.root, 'data', atom, data.shape, filters=filters)
# save w/o compressive filter
#dset = file_.createCArray(file_.root, 'all_data', atom, all_data.shape)
dset[:] = data # depends on [control=['with'], data=['file_']] |
def plugin(tree, file_tokens):
"""Walk the tree and detect invalid escape sequences."""
for token in file_tokens:
if token[0] != STRING: # token[0] == token.type
continue
# token[1] == token.string # python 3
invalid_sequence_match = invalid_escape_sequence_match(token[1])
if invalid_sequence_match:
yield (
token[2][0], # line_number
token[2][1], # offset
'IES: invalid escape sequence %s' %
invalid_sequence_match.group(1), # text
None # check # unused
) | def function[plugin, parameter[tree, file_tokens]]:
constant[Walk the tree and detect invalid escape sequences.]
for taget[name[token]] in starred[name[file_tokens]] begin[:]
if compare[call[name[token]][constant[0]] not_equal[!=] name[STRING]] begin[:]
continue
variable[invalid_sequence_match] assign[=] call[name[invalid_escape_sequence_match], parameter[call[name[token]][constant[1]]]]
if name[invalid_sequence_match] begin[:]
<ast.Yield object at 0x7da18fe90340> | keyword[def] identifier[plugin] ( identifier[tree] , identifier[file_tokens] ):
literal[string]
keyword[for] identifier[token] keyword[in] identifier[file_tokens] :
keyword[if] identifier[token] [ literal[int] ]!= identifier[STRING] :
keyword[continue]
identifier[invalid_sequence_match] = identifier[invalid_escape_sequence_match] ( identifier[token] [ literal[int] ])
keyword[if] identifier[invalid_sequence_match] :
keyword[yield] (
identifier[token] [ literal[int] ][ literal[int] ],
identifier[token] [ literal[int] ][ literal[int] ],
literal[string] %
identifier[invalid_sequence_match] . identifier[group] ( literal[int] ),
keyword[None]
) | def plugin(tree, file_tokens):
"""Walk the tree and detect invalid escape sequences."""
for token in file_tokens:
if token[0] != STRING: # token[0] == token.type
continue # depends on [control=['if'], data=[]]
# token[1] == token.string # python 3
invalid_sequence_match = invalid_escape_sequence_match(token[1])
if invalid_sequence_match: # line_number
# offset
# text
# check # unused
yield (token[2][0], token[2][1], 'IES: invalid escape sequence %s' % invalid_sequence_match.group(1), None) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['token']] |
def sparsity_pattern(self, reordered = True, symmetric = True):
"""
Returns a sparse matrix with the filled pattern. By default,
the routine uses the reordered pattern, and the inverse
permutation is applied if `reordered` is `False`.
:param reordered: boolean (default: `True`)
:param symmetric: boolean (default: `True`)
"""
return cspmatrix(self, 1.0).spmatrix(reordered = reordered, symmetric = symmetric) | def function[sparsity_pattern, parameter[self, reordered, symmetric]]:
constant[
Returns a sparse matrix with the filled pattern. By default,
the routine uses the reordered pattern, and the inverse
permutation is applied if `reordered` is `False`.
:param reordered: boolean (default: `True`)
:param symmetric: boolean (default: `True`)
]
return[call[call[name[cspmatrix], parameter[name[self], constant[1.0]]].spmatrix, parameter[]]] | keyword[def] identifier[sparsity_pattern] ( identifier[self] , identifier[reordered] = keyword[True] , identifier[symmetric] = keyword[True] ):
literal[string]
keyword[return] identifier[cspmatrix] ( identifier[self] , literal[int] ). identifier[spmatrix] ( identifier[reordered] = identifier[reordered] , identifier[symmetric] = identifier[symmetric] ) | def sparsity_pattern(self, reordered=True, symmetric=True):
"""
Returns a sparse matrix with the filled pattern. By default,
the routine uses the reordered pattern, and the inverse
permutation is applied if `reordered` is `False`.
:param reordered: boolean (default: `True`)
:param symmetric: boolean (default: `True`)
"""
return cspmatrix(self, 1.0).spmatrix(reordered=reordered, symmetric=symmetric) |
def select(self, select, table_name, where=None, extra=None):
"""
Send a SELECT query to the database.
:param str select: Attribute for the ``SELECT`` query.
:param str table_name: |arg_select_table_name|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Result of the query execution.
:rtype: sqlite3.Cursor
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
"""
self.verify_table_existence(table_name)
return self.execute_query(
six.text_type(Select(select, table_name, where, extra)),
logging.getLogger().findCaller(),
) | def function[select, parameter[self, select, table_name, where, extra]]:
constant[
Send a SELECT query to the database.
:param str select: Attribute for the ``SELECT`` query.
:param str table_name: |arg_select_table_name|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Result of the query execution.
:rtype: sqlite3.Cursor
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
]
call[name[self].verify_table_existence, parameter[name[table_name]]]
return[call[name[self].execute_query, parameter[call[name[six].text_type, parameter[call[name[Select], parameter[name[select], name[table_name], name[where], name[extra]]]]], call[call[name[logging].getLogger, parameter[]].findCaller, parameter[]]]]] | keyword[def] identifier[select] ( identifier[self] , identifier[select] , identifier[table_name] , identifier[where] = keyword[None] , identifier[extra] = keyword[None] ):
literal[string]
identifier[self] . identifier[verify_table_existence] ( identifier[table_name] )
keyword[return] identifier[self] . identifier[execute_query] (
identifier[six] . identifier[text_type] ( identifier[Select] ( identifier[select] , identifier[table_name] , identifier[where] , identifier[extra] )),
identifier[logging] . identifier[getLogger] (). identifier[findCaller] (),
) | def select(self, select, table_name, where=None, extra=None):
"""
Send a SELECT query to the database.
:param str select: Attribute for the ``SELECT`` query.
:param str table_name: |arg_select_table_name|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Result of the query execution.
:rtype: sqlite3.Cursor
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
"""
self.verify_table_existence(table_name)
return self.execute_query(six.text_type(Select(select, table_name, where, extra)), logging.getLogger().findCaller()) |
def code(self):
"""
The HTTP response code associated with this ResponseObject.
If instantiated directly without overriding the code, returns
200 even if the default for the method is some other value.
Can be set or deleted; in the latter case, the default will be
restored.
"""
if self._code is not None:
return self._code
elif self._defcode is not None:
return self._defcode
return 200 | def function[code, parameter[self]]:
constant[
The HTTP response code associated with this ResponseObject.
If instantiated directly without overriding the code, returns
200 even if the default for the method is some other value.
Can be set or deleted; in the latter case, the default will be
restored.
]
if compare[name[self]._code is_not constant[None]] begin[:]
return[name[self]._code]
return[constant[200]] | keyword[def] identifier[code] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_code] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_code]
keyword[elif] identifier[self] . identifier[_defcode] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_defcode]
keyword[return] literal[int] | def code(self):
"""
The HTTP response code associated with this ResponseObject.
If instantiated directly without overriding the code, returns
200 even if the default for the method is some other value.
Can be set or deleted; in the latter case, the default will be
restored.
"""
if self._code is not None:
return self._code # depends on [control=['if'], data=[]]
elif self._defcode is not None:
return self._defcode # depends on [control=['if'], data=[]]
return 200 |
def start(self):
""" Starts the agent. Among other things, this means connecting
to the master agent, if configured that way. """
if self._status != netsnmpAgentStatus.CONNECTED \
and self._status != netsnmpAgentStatus.RECONNECTING:
self._status = netsnmpAgentStatus.FIRSTCONNECT
libnsa.init_snmp(b(self.AgentName))
if self._status == netsnmpAgentStatus.CONNECTFAILED:
msg = "Error connecting to snmpd instance at \"{0}\" -- " \
"incorrect \"MasterSocket\" or snmpd not running?"
msg = msg.format(self.MasterSocket)
raise netsnmpAgentException(msg) | def function[start, parameter[self]]:
constant[ Starts the agent. Among other things, this means connecting
to the master agent, if configured that way. ]
if <ast.BoolOp object at 0x7da18f58d2d0> begin[:]
name[self]._status assign[=] name[netsnmpAgentStatus].FIRSTCONNECT
call[name[libnsa].init_snmp, parameter[call[name[b], parameter[name[self].AgentName]]]]
if compare[name[self]._status equal[==] name[netsnmpAgentStatus].CONNECTFAILED] begin[:]
variable[msg] assign[=] constant[Error connecting to snmpd instance at "{0}" -- incorrect "MasterSocket" or snmpd not running?]
variable[msg] assign[=] call[name[msg].format, parameter[name[self].MasterSocket]]
<ast.Raise object at 0x7da18f58c580> | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_status] != identifier[netsnmpAgentStatus] . identifier[CONNECTED] keyword[and] identifier[self] . identifier[_status] != identifier[netsnmpAgentStatus] . identifier[RECONNECTING] :
identifier[self] . identifier[_status] = identifier[netsnmpAgentStatus] . identifier[FIRSTCONNECT]
identifier[libnsa] . identifier[init_snmp] ( identifier[b] ( identifier[self] . identifier[AgentName] ))
keyword[if] identifier[self] . identifier[_status] == identifier[netsnmpAgentStatus] . identifier[CONNECTFAILED] :
identifier[msg] = literal[string] literal[string]
identifier[msg] = identifier[msg] . identifier[format] ( identifier[self] . identifier[MasterSocket] )
keyword[raise] identifier[netsnmpAgentException] ( identifier[msg] ) | def start(self):
""" Starts the agent. Among other things, this means connecting
to the master agent, if configured that way. """
if self._status != netsnmpAgentStatus.CONNECTED and self._status != netsnmpAgentStatus.RECONNECTING:
self._status = netsnmpAgentStatus.FIRSTCONNECT
libnsa.init_snmp(b(self.AgentName))
if self._status == netsnmpAgentStatus.CONNECTFAILED:
msg = 'Error connecting to snmpd instance at "{0}" -- incorrect "MasterSocket" or snmpd not running?'
msg = msg.format(self.MasterSocket)
raise netsnmpAgentException(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.