code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def create_services(self, compose_str: str) -> list:
"""Create new docker services.
Args:
compose_str (string): Docker compose 'file' string
Return:
service_names, list
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Services can only be run on '
'swarm manager nodes')
# Initialise empty list
services_ids = []
try:
service_config = yaml.load(compose_str)
# Deepcopy the service config
service_list = copy.deepcopy(service_config)
# Removing version and service from the dict
service_config.pop('version')
service_config.pop('services')
for service_name in service_list['services']:
service_exist = self._client.services.list(
filters={'name': service_name})
if not service_exist:
service_config['name'] = service_name
service_spec = self._parse_services(
service_config, service_name, service_list)
created_service = self._client.services.create(
**service_spec)
service_id = created_service.short_id
LOG.debug('Service created: %s', service_id)
services_ids.append(service_id)
else:
LOG.debug('Services already exists')
except yaml.YAMLError as exc:
print(exc)
# Returning list of services created
return services_ids | def function[create_services, parameter[self, compose_str]]:
constant[Create new docker services.
Args:
compose_str (string): Docker compose 'file' string
Return:
service_names, list
]
if <ast.UnaryOp object at 0x7da1b02d97e0> begin[:]
<ast.Raise object at 0x7da1b02d92d0>
variable[services_ids] assign[=] list[[]]
<ast.Try object at 0x7da1b02d8f70>
return[name[services_ids]] | keyword[def] identifier[create_services] ( identifier[self] , identifier[compose_str] : identifier[str] )-> identifier[list] :
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_manager] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] )
identifier[services_ids] =[]
keyword[try] :
identifier[service_config] = identifier[yaml] . identifier[load] ( identifier[compose_str] )
identifier[service_list] = identifier[copy] . identifier[deepcopy] ( identifier[service_config] )
identifier[service_config] . identifier[pop] ( literal[string] )
identifier[service_config] . identifier[pop] ( literal[string] )
keyword[for] identifier[service_name] keyword[in] identifier[service_list] [ literal[string] ]:
identifier[service_exist] = identifier[self] . identifier[_client] . identifier[services] . identifier[list] (
identifier[filters] ={ literal[string] : identifier[service_name] })
keyword[if] keyword[not] identifier[service_exist] :
identifier[service_config] [ literal[string] ]= identifier[service_name]
identifier[service_spec] = identifier[self] . identifier[_parse_services] (
identifier[service_config] , identifier[service_name] , identifier[service_list] )
identifier[created_service] = identifier[self] . identifier[_client] . identifier[services] . identifier[create] (
** identifier[service_spec] )
identifier[service_id] = identifier[created_service] . identifier[short_id]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[service_id] )
identifier[services_ids] . identifier[append] ( identifier[service_id] )
keyword[else] :
identifier[LOG] . identifier[debug] ( literal[string] )
keyword[except] identifier[yaml] . identifier[YAMLError] keyword[as] identifier[exc] :
identifier[print] ( identifier[exc] )
keyword[return] identifier[services_ids] | def create_services(self, compose_str: str) -> list:
"""Create new docker services.
Args:
compose_str (string): Docker compose 'file' string
Return:
service_names, list
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Services can only be run on swarm manager nodes') # depends on [control=['if'], data=[]]
# Initialise empty list
services_ids = []
try:
service_config = yaml.load(compose_str)
# Deepcopy the service config
service_list = copy.deepcopy(service_config)
# Removing version and service from the dict
service_config.pop('version')
service_config.pop('services')
for service_name in service_list['services']:
service_exist = self._client.services.list(filters={'name': service_name})
if not service_exist:
service_config['name'] = service_name
service_spec = self._parse_services(service_config, service_name, service_list)
created_service = self._client.services.create(**service_spec)
service_id = created_service.short_id
LOG.debug('Service created: %s', service_id)
services_ids.append(service_id) # depends on [control=['if'], data=[]]
else:
LOG.debug('Services already exists') # depends on [control=['for'], data=['service_name']] # depends on [control=['try'], data=[]]
except yaml.YAMLError as exc:
print(exc) # depends on [control=['except'], data=['exc']]
# Returning list of services created
return services_ids |
def get_monitor_physical_size(monitor):
"""
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return width_value.value, height_value.value | def function[get_monitor_physical_size, parameter[monitor]]:
constant[
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
]
variable[width_value] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
variable[width] assign[=] call[name[ctypes].pointer, parameter[name[width_value]]]
variable[height_value] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
variable[height] assign[=] call[name[ctypes].pointer, parameter[name[height_value]]]
call[name[_glfw].glfwGetMonitorPhysicalSize, parameter[name[monitor], name[width], name[height]]]
return[tuple[[<ast.Attribute object at 0x7da18bc70700>, <ast.Attribute object at 0x7da18bc70130>]]] | keyword[def] identifier[get_monitor_physical_size] ( identifier[monitor] ):
literal[string]
identifier[width_value] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[width] = identifier[ctypes] . identifier[pointer] ( identifier[width_value] )
identifier[height_value] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[height] = identifier[ctypes] . identifier[pointer] ( identifier[height_value] )
identifier[_glfw] . identifier[glfwGetMonitorPhysicalSize] ( identifier[monitor] , identifier[width] , identifier[height] )
keyword[return] identifier[width_value] . identifier[value] , identifier[height_value] . identifier[value] | def get_monitor_physical_size(monitor):
"""
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return (width_value.value, height_value.value) |
def remove_mea(mea_name):
'''Adds the mea design defined by the yaml file in the install folder
Parameters
----------
mea_yaml_file
Returns
-------
'''
this_dir, this_filename = os.path.split(__file__)
electrodes = [f for f in os.listdir(os.path.join(this_dir, "electrodes"))]
for e in electrodes:
if mea_name in e:
if os.path.isfile(os.path.join(this_dir, "electrodes", mea_name + '.yaml')):
os.remove(os.path.join(this_dir, "electrodes", mea_name + '.yaml'))
print("Removed: ", os.path.join(this_dir, "electrodes", mea_name + '.yaml'))
elif os.path.isfile(os.path.join(this_dir, "electrodes", mea_name + '.yml')):
os.remove(os.path.join(this_dir, "electrodes", mea_name + '.yml'))
print("Removed: ", os.path.join(this_dir, "electrodes", mea_name + '.yml'))
electrodes = [f[:-5] for f in os.listdir(os.path.join(this_dir, "electrodes"))]
print('Available MEA: \n', electrodes)
return | def function[remove_mea, parameter[mea_name]]:
constant[Adds the mea design defined by the yaml file in the install folder
Parameters
----------
mea_yaml_file
Returns
-------
]
<ast.Tuple object at 0x7da20c7c8520> assign[=] call[name[os].path.split, parameter[name[__file__]]]
variable[electrodes] assign[=] <ast.ListComp object at 0x7da20c7cbaf0>
for taget[name[e]] in starred[name[electrodes]] begin[:]
if compare[name[mea_name] in name[e]] begin[:]
if call[name[os].path.isfile, parameter[call[name[os].path.join, parameter[name[this_dir], constant[electrodes], binary_operation[name[mea_name] + constant[.yaml]]]]]] begin[:]
call[name[os].remove, parameter[call[name[os].path.join, parameter[name[this_dir], constant[electrodes], binary_operation[name[mea_name] + constant[.yaml]]]]]]
call[name[print], parameter[constant[Removed: ], call[name[os].path.join, parameter[name[this_dir], constant[electrodes], binary_operation[name[mea_name] + constant[.yaml]]]]]]
variable[electrodes] assign[=] <ast.ListComp object at 0x7da18ede4a60>
call[name[print], parameter[constant[Available MEA:
], name[electrodes]]]
return[None] | keyword[def] identifier[remove_mea] ( identifier[mea_name] ):
literal[string]
identifier[this_dir] , identifier[this_filename] = identifier[os] . identifier[path] . identifier[split] ( identifier[__file__] )
identifier[electrodes] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] ))]
keyword[for] identifier[e] keyword[in] identifier[electrodes] :
keyword[if] identifier[mea_name] keyword[in] identifier[e] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] , identifier[mea_name] + literal[string] )):
identifier[os] . identifier[remove] ( identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] , identifier[mea_name] + literal[string] ))
identifier[print] ( literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] , identifier[mea_name] + literal[string] ))
keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] , identifier[mea_name] + literal[string] )):
identifier[os] . identifier[remove] ( identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] , identifier[mea_name] + literal[string] ))
identifier[print] ( literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] , identifier[mea_name] + literal[string] ))
identifier[electrodes] =[ identifier[f] [:- literal[int] ] keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] ))]
identifier[print] ( literal[string] , identifier[electrodes] )
keyword[return] | def remove_mea(mea_name):
"""Adds the mea design defined by the yaml file in the install folder
Parameters
----------
mea_yaml_file
Returns
-------
"""
(this_dir, this_filename) = os.path.split(__file__)
electrodes = [f for f in os.listdir(os.path.join(this_dir, 'electrodes'))]
for e in electrodes:
if mea_name in e:
if os.path.isfile(os.path.join(this_dir, 'electrodes', mea_name + '.yaml')):
os.remove(os.path.join(this_dir, 'electrodes', mea_name + '.yaml'))
print('Removed: ', os.path.join(this_dir, 'electrodes', mea_name + '.yaml')) # depends on [control=['if'], data=[]]
elif os.path.isfile(os.path.join(this_dir, 'electrodes', mea_name + '.yml')):
os.remove(os.path.join(this_dir, 'electrodes', mea_name + '.yml'))
print('Removed: ', os.path.join(this_dir, 'electrodes', mea_name + '.yml')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['mea_name']] # depends on [control=['for'], data=['e']]
electrodes = [f[:-5] for f in os.listdir(os.path.join(this_dir, 'electrodes'))]
print('Available MEA: \n', electrodes)
return |
def ls(self, what):
"""List actuators, programs or sensors (what is string)"""
for i in getattr(self.system, what):
self.logger.info('%s: %s: %s', i.__class__.__name__, i, i.status)
return True | def function[ls, parameter[self, what]]:
constant[List actuators, programs or sensors (what is string)]
for taget[name[i]] in starred[call[name[getattr], parameter[name[self].system, name[what]]]] begin[:]
call[name[self].logger.info, parameter[constant[%s: %s: %s], name[i].__class__.__name__, name[i], name[i].status]]
return[constant[True]] | keyword[def] identifier[ls] ( identifier[self] , identifier[what] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[getattr] ( identifier[self] . identifier[system] , identifier[what] ):
identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[i] . identifier[__class__] . identifier[__name__] , identifier[i] , identifier[i] . identifier[status] )
keyword[return] keyword[True] | def ls(self, what):
"""List actuators, programs or sensors (what is string)"""
for i in getattr(self.system, what):
self.logger.info('%s: %s: %s', i.__class__.__name__, i, i.status) # depends on [control=['for'], data=['i']]
return True |
def update_range(self, share_name, directory_name, file_name, data,
start_range, end_range, validate_content=False, timeout=None):
'''
Writes the bytes specified by the request body into the specified range.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param bytes data:
Content of the range.
:param int start_range:
Start of byte range to use for updating a section of the file.
The range can be up to 4 MB in size.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for updating a section of the file.
The range can be up to 4 MB in size.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If true, calculates an MD5 hash of the page content. The storage
service checks the hash of the content that has arrived
with the hash that was sent. This is primarily valuable for detecting
bitflips on the wire if using http instead of https as https (the default)
will already validate. Note that this MD5 hash is not stored with the
file.
:param int timeout:
The timeout parameter is expressed in seconds.
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('data', data)
request = HTTPRequest()
request.method = 'PUT'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name, directory_name, file_name)
request.query = {
'comp': 'range',
'timeout': _int_to_str(timeout),
}
request.headers = {
'x-ms-write': 'update',
}
_validate_and_format_range_headers(
request, start_range, end_range)
request.body = _get_data_bytes_only('data', data)
if validate_content:
computed_md5 = _get_content_md5(request.body)
request.headers['Content-MD5'] = _to_str(computed_md5)
self._perform_request(request) | def function[update_range, parameter[self, share_name, directory_name, file_name, data, start_range, end_range, validate_content, timeout]]:
constant[
Writes the bytes specified by the request body into the specified range.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param bytes data:
Content of the range.
:param int start_range:
Start of byte range to use for updating a section of the file.
The range can be up to 4 MB in size.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for updating a section of the file.
The range can be up to 4 MB in size.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If true, calculates an MD5 hash of the page content. The storage
service checks the hash of the content that has arrived
with the hash that was sent. This is primarily valuable for detecting
bitflips on the wire if using http instead of https as https (the default)
will already validate. Note that this MD5 hash is not stored with the
file.
:param int timeout:
The timeout parameter is expressed in seconds.
]
call[name[_validate_not_none], parameter[constant[share_name], name[share_name]]]
call[name[_validate_not_none], parameter[constant[file_name], name[file_name]]]
call[name[_validate_not_none], parameter[constant[data], name[data]]]
variable[request] assign[=] call[name[HTTPRequest], parameter[]]
name[request].method assign[=] constant[PUT]
name[request].host_locations assign[=] call[name[self]._get_host_locations, parameter[]]
name[request].path assign[=] call[name[_get_path], parameter[name[share_name], name[directory_name], name[file_name]]]
name[request].query assign[=] dictionary[[<ast.Constant object at 0x7da1b1d0d720>, <ast.Constant object at 0x7da1b1d0f580>], [<ast.Constant object at 0x7da1b1d0e470>, <ast.Call object at 0x7da1b1d0c940>]]
name[request].headers assign[=] dictionary[[<ast.Constant object at 0x7da1b1d0c880>], [<ast.Constant object at 0x7da1b1d0eb30>]]
call[name[_validate_and_format_range_headers], parameter[name[request], name[start_range], name[end_range]]]
name[request].body assign[=] call[name[_get_data_bytes_only], parameter[constant[data], name[data]]]
if name[validate_content] begin[:]
variable[computed_md5] assign[=] call[name[_get_content_md5], parameter[name[request].body]]
call[name[request].headers][constant[Content-MD5]] assign[=] call[name[_to_str], parameter[name[computed_md5]]]
call[name[self]._perform_request, parameter[name[request]]] | keyword[def] identifier[update_range] ( identifier[self] , identifier[share_name] , identifier[directory_name] , identifier[file_name] , identifier[data] ,
identifier[start_range] , identifier[end_range] , identifier[validate_content] = keyword[False] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[_validate_not_none] ( literal[string] , identifier[share_name] )
identifier[_validate_not_none] ( literal[string] , identifier[file_name] )
identifier[_validate_not_none] ( literal[string] , identifier[data] )
identifier[request] = identifier[HTTPRequest] ()
identifier[request] . identifier[method] = literal[string]
identifier[request] . identifier[host_locations] = identifier[self] . identifier[_get_host_locations] ()
identifier[request] . identifier[path] = identifier[_get_path] ( identifier[share_name] , identifier[directory_name] , identifier[file_name] )
identifier[request] . identifier[query] ={
literal[string] : literal[string] ,
literal[string] : identifier[_int_to_str] ( identifier[timeout] ),
}
identifier[request] . identifier[headers] ={
literal[string] : literal[string] ,
}
identifier[_validate_and_format_range_headers] (
identifier[request] , identifier[start_range] , identifier[end_range] )
identifier[request] . identifier[body] = identifier[_get_data_bytes_only] ( literal[string] , identifier[data] )
keyword[if] identifier[validate_content] :
identifier[computed_md5] = identifier[_get_content_md5] ( identifier[request] . identifier[body] )
identifier[request] . identifier[headers] [ literal[string] ]= identifier[_to_str] ( identifier[computed_md5] )
identifier[self] . identifier[_perform_request] ( identifier[request] ) | def update_range(self, share_name, directory_name, file_name, data, start_range, end_range, validate_content=False, timeout=None):
"""
Writes the bytes specified by the request body into the specified range.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param bytes data:
Content of the range.
:param int start_range:
Start of byte range to use for updating a section of the file.
The range can be up to 4 MB in size.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for updating a section of the file.
The range can be up to 4 MB in size.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If true, calculates an MD5 hash of the page content. The storage
service checks the hash of the content that has arrived
with the hash that was sent. This is primarily valuable for detecting
bitflips on the wire if using http instead of https as https (the default)
will already validate. Note that this MD5 hash is not stored with the
file.
:param int timeout:
The timeout parameter is expressed in seconds.
"""
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('data', data)
request = HTTPRequest()
request.method = 'PUT'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name, directory_name, file_name)
request.query = {'comp': 'range', 'timeout': _int_to_str(timeout)}
request.headers = {'x-ms-write': 'update'}
_validate_and_format_range_headers(request, start_range, end_range)
request.body = _get_data_bytes_only('data', data)
if validate_content:
computed_md5 = _get_content_md5(request.body)
request.headers['Content-MD5'] = _to_str(computed_md5) # depends on [control=['if'], data=[]]
self._perform_request(request) |
def set_foreground(self, fg, isRGBA=None):
"""
Set the foreground color. fg can be a matlab format string, a
html hex color string, an rgb unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
"""
# Implementation note: wxPython has a separate concept of pen and
# brush - the brush fills any outline trace left by the pen.
# Here we set both to the same colour - if a figure is not to be
# filled, the renderer will set the brush to be transparent
# Same goes for text foreground...
DEBUG_MSG("set_foreground()", 1, self)
self.select()
GraphicsContextBase.set_foreground(self, fg, isRGBA)
self._pen.SetColour(self.get_wxcolour(self.get_rgb()))
self.gfx_ctx.SetPen(self._pen)
self.unselect() | def function[set_foreground, parameter[self, fg, isRGBA]]:
constant[
Set the foreground color. fg can be a matlab format string, a
html hex color string, an rgb unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
]
call[name[DEBUG_MSG], parameter[constant[set_foreground()], constant[1], name[self]]]
call[name[self].select, parameter[]]
call[name[GraphicsContextBase].set_foreground, parameter[name[self], name[fg], name[isRGBA]]]
call[name[self]._pen.SetColour, parameter[call[name[self].get_wxcolour, parameter[call[name[self].get_rgb, parameter[]]]]]]
call[name[self].gfx_ctx.SetPen, parameter[name[self]._pen]]
call[name[self].unselect, parameter[]] | keyword[def] identifier[set_foreground] ( identifier[self] , identifier[fg] , identifier[isRGBA] = keyword[None] ):
literal[string]
identifier[DEBUG_MSG] ( literal[string] , literal[int] , identifier[self] )
identifier[self] . identifier[select] ()
identifier[GraphicsContextBase] . identifier[set_foreground] ( identifier[self] , identifier[fg] , identifier[isRGBA] )
identifier[self] . identifier[_pen] . identifier[SetColour] ( identifier[self] . identifier[get_wxcolour] ( identifier[self] . identifier[get_rgb] ()))
identifier[self] . identifier[gfx_ctx] . identifier[SetPen] ( identifier[self] . identifier[_pen] )
identifier[self] . identifier[unselect] () | def set_foreground(self, fg, isRGBA=None):
"""
Set the foreground color. fg can be a matlab format string, a
html hex color string, an rgb unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
"""
# Implementation note: wxPython has a separate concept of pen and
# brush - the brush fills any outline trace left by the pen.
# Here we set both to the same colour - if a figure is not to be
# filled, the renderer will set the brush to be transparent
# Same goes for text foreground...
DEBUG_MSG('set_foreground()', 1, self)
self.select()
GraphicsContextBase.set_foreground(self, fg, isRGBA)
self._pen.SetColour(self.get_wxcolour(self.get_rgb()))
self.gfx_ctx.SetPen(self._pen)
self.unselect() |
def BulkLabel(label, hostnames, owner=None, token=None, client_index=None):
"""Assign a label to a group of clients based on hostname.
Sets a label as an identifier to a group of clients. Removes the label from
other clients.
This can be used to automate labeling clients based on externally derived
attributes, for example machines assigned to particular users, or machines
fulfilling particular roles.
Args:
label: The label to apply.
hostnames: The collection of hostnames that should have the label.
owner: The owner for the newly created labels. Defaults to token.username.
token: The authentication token.
client_index: An optional client index to use. If not provided, use the
default client index.
"""
if client_index is None:
client_index = CreateClientIndex(token=token)
fqdns = set()
for hostname in hostnames:
fqdns.add(hostname.lower())
labelled_urns = client_index.LookupClients(["+label:%s" % label])
# If a labelled client fqdn isn't in the set of target fqdns remove the label.
# Labelled clients with a target fqdn need no action and are removed from the
# set of target fqdns.
for client in aff4.FACTORY.MultiOpen(
labelled_urns, token=token, aff4_type=aff4_grr.VFSGRRClient, mode="rw"):
fqdn = utils.SmartStr(client.Get("FQDN")).lower()
if fqdn not in fqdns:
client_index.RemoveClientLabels(client)
client.RemoveLabel(label, owner=owner)
client.Flush()
client_index.AddClient(client)
else:
fqdns.discard(fqdn)
# The residual set of fqdns needs labelling.
# Get the latest URN for these clients and open them to add the label.
urns = []
keywords = ["+host:%s" % fqdn for fqdn in fqdns]
for client_list in client_index.ReadClientPostingLists(keywords).itervalues():
for client_id in client_list:
urns.append(rdfvalue.RDFURN(client_id))
for client in aff4.FACTORY.MultiOpen(
urns, token=token, aff4_type=aff4_grr.VFSGRRClient, mode="rw"):
client.AddLabel(label, owner=owner)
client.Flush()
client_index.AddClient(client) | def function[BulkLabel, parameter[label, hostnames, owner, token, client_index]]:
constant[Assign a label to a group of clients based on hostname.
Sets a label as an identifier to a group of clients. Removes the label from
other clients.
This can be used to automate labeling clients based on externally derived
attributes, for example machines assigned to particular users, or machines
fulfilling particular roles.
Args:
label: The label to apply.
hostnames: The collection of hostnames that should have the label.
owner: The owner for the newly created labels. Defaults to token.username.
token: The authentication token.
client_index: An optional client index to use. If not provided, use the
default client index.
]
if compare[name[client_index] is constant[None]] begin[:]
variable[client_index] assign[=] call[name[CreateClientIndex], parameter[]]
variable[fqdns] assign[=] call[name[set], parameter[]]
for taget[name[hostname]] in starred[name[hostnames]] begin[:]
call[name[fqdns].add, parameter[call[name[hostname].lower, parameter[]]]]
variable[labelled_urns] assign[=] call[name[client_index].LookupClients, parameter[list[[<ast.BinOp object at 0x7da1b1d92f20>]]]]
for taget[name[client]] in starred[call[name[aff4].FACTORY.MultiOpen, parameter[name[labelled_urns]]]] begin[:]
variable[fqdn] assign[=] call[call[name[utils].SmartStr, parameter[call[name[client].Get, parameter[constant[FQDN]]]]].lower, parameter[]]
if compare[name[fqdn] <ast.NotIn object at 0x7da2590d7190> name[fqdns]] begin[:]
call[name[client_index].RemoveClientLabels, parameter[name[client]]]
call[name[client].RemoveLabel, parameter[name[label]]]
call[name[client].Flush, parameter[]]
call[name[client_index].AddClient, parameter[name[client]]]
variable[urns] assign[=] list[[]]
variable[keywords] assign[=] <ast.ListComp object at 0x7da1b1d92110>
for taget[name[client_list]] in starred[call[call[name[client_index].ReadClientPostingLists, parameter[name[keywords]]].itervalues, parameter[]]] begin[:]
for taget[name[client_id]] in starred[name[client_list]] begin[:]
call[name[urns].append, parameter[call[name[rdfvalue].RDFURN, parameter[name[client_id]]]]]
for taget[name[client]] in starred[call[name[aff4].FACTORY.MultiOpen, parameter[name[urns]]]] begin[:]
call[name[client].AddLabel, parameter[name[label]]]
call[name[client].Flush, parameter[]]
call[name[client_index].AddClient, parameter[name[client]]] | keyword[def] identifier[BulkLabel] ( identifier[label] , identifier[hostnames] , identifier[owner] = keyword[None] , identifier[token] = keyword[None] , identifier[client_index] = keyword[None] ):
literal[string]
keyword[if] identifier[client_index] keyword[is] keyword[None] :
identifier[client_index] = identifier[CreateClientIndex] ( identifier[token] = identifier[token] )
identifier[fqdns] = identifier[set] ()
keyword[for] identifier[hostname] keyword[in] identifier[hostnames] :
identifier[fqdns] . identifier[add] ( identifier[hostname] . identifier[lower] ())
identifier[labelled_urns] = identifier[client_index] . identifier[LookupClients] ([ literal[string] % identifier[label] ])
keyword[for] identifier[client] keyword[in] identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] (
identifier[labelled_urns] , identifier[token] = identifier[token] , identifier[aff4_type] = identifier[aff4_grr] . identifier[VFSGRRClient] , identifier[mode] = literal[string] ):
identifier[fqdn] = identifier[utils] . identifier[SmartStr] ( identifier[client] . identifier[Get] ( literal[string] )). identifier[lower] ()
keyword[if] identifier[fqdn] keyword[not] keyword[in] identifier[fqdns] :
identifier[client_index] . identifier[RemoveClientLabels] ( identifier[client] )
identifier[client] . identifier[RemoveLabel] ( identifier[label] , identifier[owner] = identifier[owner] )
identifier[client] . identifier[Flush] ()
identifier[client_index] . identifier[AddClient] ( identifier[client] )
keyword[else] :
identifier[fqdns] . identifier[discard] ( identifier[fqdn] )
identifier[urns] =[]
identifier[keywords] =[ literal[string] % identifier[fqdn] keyword[for] identifier[fqdn] keyword[in] identifier[fqdns] ]
keyword[for] identifier[client_list] keyword[in] identifier[client_index] . identifier[ReadClientPostingLists] ( identifier[keywords] ). identifier[itervalues] ():
keyword[for] identifier[client_id] keyword[in] identifier[client_list] :
identifier[urns] . identifier[append] ( identifier[rdfvalue] . identifier[RDFURN] ( identifier[client_id] ))
keyword[for] identifier[client] keyword[in] identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] (
identifier[urns] , identifier[token] = identifier[token] , identifier[aff4_type] = identifier[aff4_grr] . identifier[VFSGRRClient] , identifier[mode] = literal[string] ):
identifier[client] . identifier[AddLabel] ( identifier[label] , identifier[owner] = identifier[owner] )
identifier[client] . identifier[Flush] ()
identifier[client_index] . identifier[AddClient] ( identifier[client] ) | def BulkLabel(label, hostnames, owner=None, token=None, client_index=None):
"""Assign a label to a group of clients based on hostname.
Sets a label as an identifier to a group of clients. Removes the label from
other clients.
This can be used to automate labeling clients based on externally derived
attributes, for example machines assigned to particular users, or machines
fulfilling particular roles.
Args:
label: The label to apply.
hostnames: The collection of hostnames that should have the label.
owner: The owner for the newly created labels. Defaults to token.username.
token: The authentication token.
client_index: An optional client index to use. If not provided, use the
default client index.
"""
if client_index is None:
client_index = CreateClientIndex(token=token) # depends on [control=['if'], data=['client_index']]
fqdns = set()
for hostname in hostnames:
fqdns.add(hostname.lower()) # depends on [control=['for'], data=['hostname']]
labelled_urns = client_index.LookupClients(['+label:%s' % label])
# If a labelled client fqdn isn't in the set of target fqdns remove the label.
# Labelled clients with a target fqdn need no action and are removed from the
# set of target fqdns.
for client in aff4.FACTORY.MultiOpen(labelled_urns, token=token, aff4_type=aff4_grr.VFSGRRClient, mode='rw'):
fqdn = utils.SmartStr(client.Get('FQDN')).lower()
if fqdn not in fqdns:
client_index.RemoveClientLabels(client)
client.RemoveLabel(label, owner=owner)
client.Flush()
client_index.AddClient(client) # depends on [control=['if'], data=[]]
else:
fqdns.discard(fqdn) # depends on [control=['for'], data=['client']]
# The residual set of fqdns needs labelling.
# Get the latest URN for these clients and open them to add the label.
urns = []
keywords = ['+host:%s' % fqdn for fqdn in fqdns]
for client_list in client_index.ReadClientPostingLists(keywords).itervalues():
for client_id in client_list:
urns.append(rdfvalue.RDFURN(client_id)) # depends on [control=['for'], data=['client_id']] # depends on [control=['for'], data=['client_list']]
for client in aff4.FACTORY.MultiOpen(urns, token=token, aff4_type=aff4_grr.VFSGRRClient, mode='rw'):
client.AddLabel(label, owner=owner)
client.Flush()
client_index.AddClient(client) # depends on [control=['for'], data=['client']] |
def convert_to_wav(files):
'''Converts files to a format that pocketsphinx can deal wtih (16khz mono 16bit wav)'''
converted = []
for f in files:
new_name = f + '.temp.wav'
print(new_name)
if (os.path.exists(f + '.transcription.txt') is False) and (os.path.exists(new_name) is False):
subprocess.call(['ffmpeg', '-y', '-i', f, '-acodec', 'pcm_s16le', '-ac', '1', '-ar', '16000', new_name])
converted.append(new_name)
return converted | def function[convert_to_wav, parameter[files]]:
constant[Converts files to a format that pocketsphinx can deal wtih (16khz mono 16bit wav)]
variable[converted] assign[=] list[[]]
for taget[name[f]] in starred[name[files]] begin[:]
variable[new_name] assign[=] binary_operation[name[f] + constant[.temp.wav]]
call[name[print], parameter[name[new_name]]]
if <ast.BoolOp object at 0x7da18dc9aec0> begin[:]
call[name[subprocess].call, parameter[list[[<ast.Constant object at 0x7da18eb54280>, <ast.Constant object at 0x7da18eb56d10>, <ast.Constant object at 0x7da18eb57850>, <ast.Name object at 0x7da18eb55900>, <ast.Constant object at 0x7da1b04d6410>, <ast.Constant object at 0x7da1b04d7160>, <ast.Constant object at 0x7da1b04d5ab0>, <ast.Constant object at 0x7da1b04d5870>, <ast.Constant object at 0x7da1b04d55a0>, <ast.Constant object at 0x7da1b04d5750>, <ast.Name object at 0x7da1b04d61d0>]]]]
call[name[converted].append, parameter[name[new_name]]]
return[name[converted]] | keyword[def] identifier[convert_to_wav] ( identifier[files] ):
literal[string]
identifier[converted] =[]
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[new_name] = identifier[f] + literal[string]
identifier[print] ( identifier[new_name] )
keyword[if] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[f] + literal[string] ) keyword[is] keyword[False] ) keyword[and] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[new_name] ) keyword[is] keyword[False] ):
identifier[subprocess] . identifier[call] ([ literal[string] , literal[string] , literal[string] , identifier[f] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , identifier[new_name] ])
identifier[converted] . identifier[append] ( identifier[new_name] )
keyword[return] identifier[converted] | def convert_to_wav(files):
"""Converts files to a format that pocketsphinx can deal wtih (16khz mono 16bit wav)"""
converted = []
for f in files:
new_name = f + '.temp.wav'
print(new_name)
if os.path.exists(f + '.transcription.txt') is False and os.path.exists(new_name) is False:
subprocess.call(['ffmpeg', '-y', '-i', f, '-acodec', 'pcm_s16le', '-ac', '1', '-ar', '16000', new_name]) # depends on [control=['if'], data=[]]
converted.append(new_name) # depends on [control=['for'], data=['f']]
return converted |
def _handle_tag_schedule_return(self, tag, data):
'''
Handle a _schedule_return event
'''
# reporting current connection with master
if data['schedule'].startswith(master_event(type='alive', master='')):
if data['return']:
log.debug(
'Connected to master %s',
data['schedule'].split(master_event(type='alive', master=''))[1]
)
self._return_pub(data, ret_cmd='_return', sync=False) | def function[_handle_tag_schedule_return, parameter[self, tag, data]]:
constant[
Handle a _schedule_return event
]
if call[call[name[data]][constant[schedule]].startswith, parameter[call[name[master_event], parameter[]]]] begin[:]
if call[name[data]][constant[return]] begin[:]
call[name[log].debug, parameter[constant[Connected to master %s], call[call[call[name[data]][constant[schedule]].split, parameter[call[name[master_event], parameter[]]]]][constant[1]]]]
call[name[self]._return_pub, parameter[name[data]]] | keyword[def] identifier[_handle_tag_schedule_return] ( identifier[self] , identifier[tag] , identifier[data] ):
literal[string]
keyword[if] identifier[data] [ literal[string] ]. identifier[startswith] ( identifier[master_event] ( identifier[type] = literal[string] , identifier[master] = literal[string] )):
keyword[if] identifier[data] [ literal[string] ]:
identifier[log] . identifier[debug] (
literal[string] ,
identifier[data] [ literal[string] ]. identifier[split] ( identifier[master_event] ( identifier[type] = literal[string] , identifier[master] = literal[string] ))[ literal[int] ]
)
identifier[self] . identifier[_return_pub] ( identifier[data] , identifier[ret_cmd] = literal[string] , identifier[sync] = keyword[False] ) | def _handle_tag_schedule_return(self, tag, data):
"""
Handle a _schedule_return event
"""
# reporting current connection with master
if data['schedule'].startswith(master_event(type='alive', master='')):
if data['return']:
log.debug('Connected to master %s', data['schedule'].split(master_event(type='alive', master=''))[1]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self._return_pub(data, ret_cmd='_return', sync=False) |
def eventize(self, granularity):
""" This splits the JSON information found at self.events into the
several events. For this there are three different levels of time
consuming actions: 1-soft, 2-medium and 3-hard.
Level 1 provides events about commits
Level 2 provides events about files
Level 3 provides other events (not used so far)
:param granularity: Levels of time consuming actions to calculate events
:type granularity: integer
:returns: Pandas dataframe with splitted events.
:rtype: pandas.DataFrame
"""
df_columns = {}
# Init common columns
self._init_common_fields(df_columns)
# First level granularity
df_columns[Git.COMMIT_ID] = []
df_columns[Git.COMMIT_EVENT] = []
df_columns[Git.COMMIT_DATE] = []
df_columns[Git.COMMIT_OWNER] = []
df_columns[Git.COMMIT_COMMITTER] = []
df_columns[Git.COMMIT_COMMITTER_DATE] = []
df_columns[Git.COMMIT_REPOSITORY] = []
df_columns[Git.COMMIT_MESSAGE] = []
df_columns[Git.COMMIT_NUM_FILES] = []
df_columns[Git.COMMIT_ADDED_LINES] = []
df_columns[Git.COMMIT_REMOVED_LINES] = []
df_columns[Git.COMMIT_HASH] = []
df_columns[Git.AUTHOR_DOMAIN] = []
# Second level of granularity
df_columns[Git.FILE_FILES] = []
df_columns[Git.FILE_EVENT] = []
df_columns[Git.FILE_PATH] = []
df_columns[Git.FILE_ADDED_LINES] = []
df_columns[Git.FILE_REMOVED_LINES] = []
events = pandas.DataFrame()
for item in self.items:
commit_data = item["data"]
if granularity == 1:
self._add_common_fields(df_columns, item)
self.__add_commit_info(df_columns, item)
added_lines = 0
removed_lines = 0
files = commit_data["files"]
df_columns[Git.COMMIT_NUM_FILES] = int(len(files))
for f in files:
if "added" in f.keys() and f["added"] != "-":
added_lines = added_lines + int(f["added"])
if "removed" in f.keys() and f["removed"] != "-":
removed_lines = removed_lines + int(f["removed"])
df_columns[Git.COMMIT_ADDED_LINES] = added_lines
df_columns[Git.COMMIT_REMOVED_LINES] = removed_lines
# TODO: this will fail if no files are found in a commit (eg: merge)
if granularity == 2:
# Add extra info about files actions, if there were any
if "files" in commit_data.keys():
files = commit_data["files"]
nfiles = 0
for f in files:
if "action" in f.keys():
nfiles += 1
for f in files:
self._add_common_fields(df_columns, item)
self.__add_commit_info(df_columns, item)
df_columns[Git.FILE_FILES].append(nfiles)
if "action" in f.keys():
df_columns[Git.FILE_EVENT].append(Git.EVENT_FILE + f["action"])
else:
df_columns[Git.FILE_EVENT].append("-")
if "file" in f.keys():
df_columns[Git.FILE_PATH].append(f["file"])
else:
df_columns[Git.FILE_PATH].append("-")
if "added" in f.keys():
if f["added"] == "-":
df_columns[Git.FILE_ADDED_LINES].append(0)
else:
df_columns[Git.FILE_ADDED_LINES].append(int(f["added"]))
else:
df_columns[Git.FILE_ADDED_LINES].append(0)
if "removed" in f.keys():
if f["removed"] == "-":
df_columns[Git.FILE_REMOVED_LINES].append(0)
else:
df_columns[Git.FILE_REMOVED_LINES].append(int(f["removed"]))
else:
df_columns[Git.FILE_REMOVED_LINES].append(0)
else:
print("Merge found, doing nothing...")
if granularity == 3:
# TDB
pass
# Done in this way to have an order (and not a direct cast)
self._add_common_events(events, df_columns)
events[Git.COMMIT_ID] = df_columns[Git.COMMIT_ID]
events[Git.COMMIT_EVENT] = df_columns[Git.COMMIT_EVENT]
events[Git.COMMIT_DATE] = df_columns[Git.COMMIT_DATE]
events[Git.COMMIT_OWNER] = df_columns[Git.COMMIT_OWNER]
events[Git.COMMIT_COMMITTER] = df_columns[Git.COMMIT_COMMITTER]
events[Git.COMMIT_COMMITTER_DATE] = df_columns[Git.COMMIT_COMMITTER_DATE]
events[Git.COMMIT_REPOSITORY] = df_columns[Git.COMMIT_REPOSITORY]
events[Git.COMMIT_MESSAGE] = df_columns[Git.COMMIT_MESSAGE]
events[Git.COMMIT_HASH] = df_columns[Git.COMMIT_HASH]
events[Git.AUTHOR_DOMAIN] = df_columns[Git.AUTHOR_DOMAIN]
if granularity == 1:
events[Git.COMMIT_NUM_FILES] = df_columns[Git.COMMIT_NUM_FILES]
events[Git.COMMIT_ADDED_LINES] = df_columns[Git.COMMIT_ADDED_LINES]
events[Git.COMMIT_REMOVED_LINES] = df_columns[Git.COMMIT_REMOVED_LINES]
if granularity == 2:
events[Git.FILE_FILES] = df_columns[Git.FILE_FILES]
events[Git.FILE_EVENT] = df_columns[Git.FILE_EVENT]
events[Git.FILE_PATH] = df_columns[Git.FILE_PATH]
events[Git.FILE_ADDED_LINES] = df_columns[Git.FILE_ADDED_LINES]
events[Git.FILE_REMOVED_LINES] = df_columns[Git.FILE_REMOVED_LINES]
return events | def function[eventize, parameter[self, granularity]]:
constant[ This splits the JSON information found at self.events into the
several events. For this there are three different levels of time
consuming actions: 1-soft, 2-medium and 3-hard.
Level 1 provides events about commits
Level 2 provides events about files
Level 3 provides other events (not used so far)
:param granularity: Levels of time consuming actions to calculate events
:type granularity: integer
:returns: Pandas dataframe with splitted events.
:rtype: pandas.DataFrame
]
variable[df_columns] assign[=] dictionary[[], []]
call[name[self]._init_common_fields, parameter[name[df_columns]]]
call[name[df_columns]][name[Git].COMMIT_ID] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_EVENT] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_DATE] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_OWNER] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_COMMITTER] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_COMMITTER_DATE] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_REPOSITORY] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_MESSAGE] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_NUM_FILES] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_ADDED_LINES] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_REMOVED_LINES] assign[=] list[[]]
call[name[df_columns]][name[Git].COMMIT_HASH] assign[=] list[[]]
call[name[df_columns]][name[Git].AUTHOR_DOMAIN] assign[=] list[[]]
call[name[df_columns]][name[Git].FILE_FILES] assign[=] list[[]]
call[name[df_columns]][name[Git].FILE_EVENT] assign[=] list[[]]
call[name[df_columns]][name[Git].FILE_PATH] assign[=] list[[]]
call[name[df_columns]][name[Git].FILE_ADDED_LINES] assign[=] list[[]]
call[name[df_columns]][name[Git].FILE_REMOVED_LINES] assign[=] list[[]]
variable[events] assign[=] call[name[pandas].DataFrame, parameter[]]
for taget[name[item]] in starred[name[self].items] begin[:]
variable[commit_data] assign[=] call[name[item]][constant[data]]
if compare[name[granularity] equal[==] constant[1]] begin[:]
call[name[self]._add_common_fields, parameter[name[df_columns], name[item]]]
call[name[self].__add_commit_info, parameter[name[df_columns], name[item]]]
variable[added_lines] assign[=] constant[0]
variable[removed_lines] assign[=] constant[0]
variable[files] assign[=] call[name[commit_data]][constant[files]]
call[name[df_columns]][name[Git].COMMIT_NUM_FILES] assign[=] call[name[int], parameter[call[name[len], parameter[name[files]]]]]
for taget[name[f]] in starred[name[files]] begin[:]
if <ast.BoolOp object at 0x7da18f09d720> begin[:]
variable[added_lines] assign[=] binary_operation[name[added_lines] + call[name[int], parameter[call[name[f]][constant[added]]]]]
if <ast.BoolOp object at 0x7da18f09df30> begin[:]
variable[removed_lines] assign[=] binary_operation[name[removed_lines] + call[name[int], parameter[call[name[f]][constant[removed]]]]]
call[name[df_columns]][name[Git].COMMIT_ADDED_LINES] assign[=] name[added_lines]
call[name[df_columns]][name[Git].COMMIT_REMOVED_LINES] assign[=] name[removed_lines]
if compare[name[granularity] equal[==] constant[2]] begin[:]
if compare[constant[files] in call[name[commit_data].keys, parameter[]]] begin[:]
variable[files] assign[=] call[name[commit_data]][constant[files]]
variable[nfiles] assign[=] constant[0]
for taget[name[f]] in starred[name[files]] begin[:]
if compare[constant[action] in call[name[f].keys, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da18f09df60>
for taget[name[f]] in starred[name[files]] begin[:]
call[name[self]._add_common_fields, parameter[name[df_columns], name[item]]]
call[name[self].__add_commit_info, parameter[name[df_columns], name[item]]]
call[call[name[df_columns]][name[Git].FILE_FILES].append, parameter[name[nfiles]]]
if compare[constant[action] in call[name[f].keys, parameter[]]] begin[:]
call[call[name[df_columns]][name[Git].FILE_EVENT].append, parameter[binary_operation[name[Git].EVENT_FILE + call[name[f]][constant[action]]]]]
if compare[constant[file] in call[name[f].keys, parameter[]]] begin[:]
call[call[name[df_columns]][name[Git].FILE_PATH].append, parameter[call[name[f]][constant[file]]]]
if compare[constant[added] in call[name[f].keys, parameter[]]] begin[:]
if compare[call[name[f]][constant[added]] equal[==] constant[-]] begin[:]
call[call[name[df_columns]][name[Git].FILE_ADDED_LINES].append, parameter[constant[0]]]
if compare[constant[removed] in call[name[f].keys, parameter[]]] begin[:]
if compare[call[name[f]][constant[removed]] equal[==] constant[-]] begin[:]
call[call[name[df_columns]][name[Git].FILE_REMOVED_LINES].append, parameter[constant[0]]]
if compare[name[granularity] equal[==] constant[3]] begin[:]
pass
call[name[self]._add_common_events, parameter[name[events], name[df_columns]]]
call[name[events]][name[Git].COMMIT_ID] assign[=] call[name[df_columns]][name[Git].COMMIT_ID]
call[name[events]][name[Git].COMMIT_EVENT] assign[=] call[name[df_columns]][name[Git].COMMIT_EVENT]
call[name[events]][name[Git].COMMIT_DATE] assign[=] call[name[df_columns]][name[Git].COMMIT_DATE]
call[name[events]][name[Git].COMMIT_OWNER] assign[=] call[name[df_columns]][name[Git].COMMIT_OWNER]
call[name[events]][name[Git].COMMIT_COMMITTER] assign[=] call[name[df_columns]][name[Git].COMMIT_COMMITTER]
call[name[events]][name[Git].COMMIT_COMMITTER_DATE] assign[=] call[name[df_columns]][name[Git].COMMIT_COMMITTER_DATE]
call[name[events]][name[Git].COMMIT_REPOSITORY] assign[=] call[name[df_columns]][name[Git].COMMIT_REPOSITORY]
call[name[events]][name[Git].COMMIT_MESSAGE] assign[=] call[name[df_columns]][name[Git].COMMIT_MESSAGE]
call[name[events]][name[Git].COMMIT_HASH] assign[=] call[name[df_columns]][name[Git].COMMIT_HASH]
call[name[events]][name[Git].AUTHOR_DOMAIN] assign[=] call[name[df_columns]][name[Git].AUTHOR_DOMAIN]
if compare[name[granularity] equal[==] constant[1]] begin[:]
call[name[events]][name[Git].COMMIT_NUM_FILES] assign[=] call[name[df_columns]][name[Git].COMMIT_NUM_FILES]
call[name[events]][name[Git].COMMIT_ADDED_LINES] assign[=] call[name[df_columns]][name[Git].COMMIT_ADDED_LINES]
call[name[events]][name[Git].COMMIT_REMOVED_LINES] assign[=] call[name[df_columns]][name[Git].COMMIT_REMOVED_LINES]
if compare[name[granularity] equal[==] constant[2]] begin[:]
call[name[events]][name[Git].FILE_FILES] assign[=] call[name[df_columns]][name[Git].FILE_FILES]
call[name[events]][name[Git].FILE_EVENT] assign[=] call[name[df_columns]][name[Git].FILE_EVENT]
call[name[events]][name[Git].FILE_PATH] assign[=] call[name[df_columns]][name[Git].FILE_PATH]
call[name[events]][name[Git].FILE_ADDED_LINES] assign[=] call[name[df_columns]][name[Git].FILE_ADDED_LINES]
call[name[events]][name[Git].FILE_REMOVED_LINES] assign[=] call[name[df_columns]][name[Git].FILE_REMOVED_LINES]
return[name[events]] | keyword[def] identifier[eventize] ( identifier[self] , identifier[granularity] ):
literal[string]
identifier[df_columns] ={}
identifier[self] . identifier[_init_common_fields] ( identifier[df_columns] )
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_ID] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_EVENT] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_DATE] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_OWNER] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_COMMITTER] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_COMMITTER_DATE] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_REPOSITORY] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_MESSAGE] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_NUM_FILES] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_ADDED_LINES] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_REMOVED_LINES] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_HASH] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[AUTHOR_DOMAIN] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[FILE_FILES] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[FILE_EVENT] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[FILE_PATH] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[FILE_ADDED_LINES] ]=[]
identifier[df_columns] [ identifier[Git] . identifier[FILE_REMOVED_LINES] ]=[]
identifier[events] = identifier[pandas] . identifier[DataFrame] ()
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[items] :
identifier[commit_data] = identifier[item] [ literal[string] ]
keyword[if] identifier[granularity] == literal[int] :
identifier[self] . identifier[_add_common_fields] ( identifier[df_columns] , identifier[item] )
identifier[self] . identifier[__add_commit_info] ( identifier[df_columns] , identifier[item] )
identifier[added_lines] = literal[int]
identifier[removed_lines] = literal[int]
identifier[files] = identifier[commit_data] [ literal[string] ]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_NUM_FILES] ]= identifier[int] ( identifier[len] ( identifier[files] ))
keyword[for] identifier[f] keyword[in] identifier[files] :
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] () keyword[and] identifier[f] [ literal[string] ]!= literal[string] :
identifier[added_lines] = identifier[added_lines] + identifier[int] ( identifier[f] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] () keyword[and] identifier[f] [ literal[string] ]!= literal[string] :
identifier[removed_lines] = identifier[removed_lines] + identifier[int] ( identifier[f] [ literal[string] ])
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_ADDED_LINES] ]= identifier[added_lines]
identifier[df_columns] [ identifier[Git] . identifier[COMMIT_REMOVED_LINES] ]= identifier[removed_lines]
keyword[if] identifier[granularity] == literal[int] :
keyword[if] literal[string] keyword[in] identifier[commit_data] . identifier[keys] ():
identifier[files] = identifier[commit_data] [ literal[string] ]
identifier[nfiles] = literal[int]
keyword[for] identifier[f] keyword[in] identifier[files] :
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] ():
identifier[nfiles] += literal[int]
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[self] . identifier[_add_common_fields] ( identifier[df_columns] , identifier[item] )
identifier[self] . identifier[__add_commit_info] ( identifier[df_columns] , identifier[item] )
identifier[df_columns] [ identifier[Git] . identifier[FILE_FILES] ]. identifier[append] ( identifier[nfiles] )
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] ():
identifier[df_columns] [ identifier[Git] . identifier[FILE_EVENT] ]. identifier[append] ( identifier[Git] . identifier[EVENT_FILE] + identifier[f] [ literal[string] ])
keyword[else] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_EVENT] ]. identifier[append] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] ():
identifier[df_columns] [ identifier[Git] . identifier[FILE_PATH] ]. identifier[append] ( identifier[f] [ literal[string] ])
keyword[else] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_PATH] ]. identifier[append] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] ():
keyword[if] identifier[f] [ literal[string] ]== literal[string] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_ADDED_LINES] ]. identifier[append] ( literal[int] )
keyword[else] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_ADDED_LINES] ]. identifier[append] ( identifier[int] ( identifier[f] [ literal[string] ]))
keyword[else] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_ADDED_LINES] ]. identifier[append] ( literal[int] )
keyword[if] literal[string] keyword[in] identifier[f] . identifier[keys] ():
keyword[if] identifier[f] [ literal[string] ]== literal[string] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_REMOVED_LINES] ]. identifier[append] ( literal[int] )
keyword[else] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_REMOVED_LINES] ]. identifier[append] ( identifier[int] ( identifier[f] [ literal[string] ]))
keyword[else] :
identifier[df_columns] [ identifier[Git] . identifier[FILE_REMOVED_LINES] ]. identifier[append] ( literal[int] )
keyword[else] :
identifier[print] ( literal[string] )
keyword[if] identifier[granularity] == literal[int] :
keyword[pass]
identifier[self] . identifier[_add_common_events] ( identifier[events] , identifier[df_columns] )
identifier[events] [ identifier[Git] . identifier[COMMIT_ID] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_ID] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_EVENT] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_EVENT] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_DATE] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_DATE] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_OWNER] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_OWNER] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_COMMITTER] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_COMMITTER] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_COMMITTER_DATE] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_COMMITTER_DATE] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_REPOSITORY] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_REPOSITORY] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_MESSAGE] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_MESSAGE] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_HASH] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_HASH] ]
identifier[events] [ identifier[Git] . identifier[AUTHOR_DOMAIN] ]= identifier[df_columns] [ identifier[Git] . identifier[AUTHOR_DOMAIN] ]
keyword[if] identifier[granularity] == literal[int] :
identifier[events] [ identifier[Git] . identifier[COMMIT_NUM_FILES] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_NUM_FILES] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_ADDED_LINES] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_ADDED_LINES] ]
identifier[events] [ identifier[Git] . identifier[COMMIT_REMOVED_LINES] ]= identifier[df_columns] [ identifier[Git] . identifier[COMMIT_REMOVED_LINES] ]
keyword[if] identifier[granularity] == literal[int] :
identifier[events] [ identifier[Git] . identifier[FILE_FILES] ]= identifier[df_columns] [ identifier[Git] . identifier[FILE_FILES] ]
identifier[events] [ identifier[Git] . identifier[FILE_EVENT] ]= identifier[df_columns] [ identifier[Git] . identifier[FILE_EVENT] ]
identifier[events] [ identifier[Git] . identifier[FILE_PATH] ]= identifier[df_columns] [ identifier[Git] . identifier[FILE_PATH] ]
identifier[events] [ identifier[Git] . identifier[FILE_ADDED_LINES] ]= identifier[df_columns] [ identifier[Git] . identifier[FILE_ADDED_LINES] ]
identifier[events] [ identifier[Git] . identifier[FILE_REMOVED_LINES] ]= identifier[df_columns] [ identifier[Git] . identifier[FILE_REMOVED_LINES] ]
keyword[return] identifier[events] | def eventize(self, granularity):
""" This splits the JSON information found at self.events into the
several events. For this there are three different levels of time
consuming actions: 1-soft, 2-medium and 3-hard.
Level 1 provides events about commits
Level 2 provides events about files
Level 3 provides other events (not used so far)
:param granularity: Levels of time consuming actions to calculate events
:type granularity: integer
:returns: Pandas dataframe with splitted events.
:rtype: pandas.DataFrame
"""
df_columns = {}
# Init common columns
self._init_common_fields(df_columns)
# First level granularity
df_columns[Git.COMMIT_ID] = []
df_columns[Git.COMMIT_EVENT] = []
df_columns[Git.COMMIT_DATE] = []
df_columns[Git.COMMIT_OWNER] = []
df_columns[Git.COMMIT_COMMITTER] = []
df_columns[Git.COMMIT_COMMITTER_DATE] = []
df_columns[Git.COMMIT_REPOSITORY] = []
df_columns[Git.COMMIT_MESSAGE] = []
df_columns[Git.COMMIT_NUM_FILES] = []
df_columns[Git.COMMIT_ADDED_LINES] = []
df_columns[Git.COMMIT_REMOVED_LINES] = []
df_columns[Git.COMMIT_HASH] = []
df_columns[Git.AUTHOR_DOMAIN] = []
# Second level of granularity
df_columns[Git.FILE_FILES] = []
df_columns[Git.FILE_EVENT] = []
df_columns[Git.FILE_PATH] = []
df_columns[Git.FILE_ADDED_LINES] = []
df_columns[Git.FILE_REMOVED_LINES] = []
events = pandas.DataFrame()
for item in self.items:
commit_data = item['data']
if granularity == 1:
self._add_common_fields(df_columns, item)
self.__add_commit_info(df_columns, item)
added_lines = 0
removed_lines = 0
files = commit_data['files']
df_columns[Git.COMMIT_NUM_FILES] = int(len(files))
for f in files:
if 'added' in f.keys() and f['added'] != '-':
added_lines = added_lines + int(f['added']) # depends on [control=['if'], data=[]]
if 'removed' in f.keys() and f['removed'] != '-':
removed_lines = removed_lines + int(f['removed']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
df_columns[Git.COMMIT_ADDED_LINES] = added_lines
df_columns[Git.COMMIT_REMOVED_LINES] = removed_lines # depends on [control=['if'], data=[]]
# TODO: this will fail if no files are found in a commit (eg: merge)
if granularity == 2:
# Add extra info about files actions, if there were any
if 'files' in commit_data.keys():
files = commit_data['files']
nfiles = 0
for f in files:
if 'action' in f.keys():
nfiles += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
for f in files:
self._add_common_fields(df_columns, item)
self.__add_commit_info(df_columns, item)
df_columns[Git.FILE_FILES].append(nfiles)
if 'action' in f.keys():
df_columns[Git.FILE_EVENT].append(Git.EVENT_FILE + f['action']) # depends on [control=['if'], data=[]]
else:
df_columns[Git.FILE_EVENT].append('-')
if 'file' in f.keys():
df_columns[Git.FILE_PATH].append(f['file']) # depends on [control=['if'], data=[]]
else:
df_columns[Git.FILE_PATH].append('-')
if 'added' in f.keys():
if f['added'] == '-':
df_columns[Git.FILE_ADDED_LINES].append(0) # depends on [control=['if'], data=[]]
else:
df_columns[Git.FILE_ADDED_LINES].append(int(f['added'])) # depends on [control=['if'], data=[]]
else:
df_columns[Git.FILE_ADDED_LINES].append(0)
if 'removed' in f.keys():
if f['removed'] == '-':
df_columns[Git.FILE_REMOVED_LINES].append(0) # depends on [control=['if'], data=[]]
else:
df_columns[Git.FILE_REMOVED_LINES].append(int(f['removed'])) # depends on [control=['if'], data=[]]
else:
df_columns[Git.FILE_REMOVED_LINES].append(0) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
else:
print('Merge found, doing nothing...') # depends on [control=['if'], data=[]]
if granularity == 3:
# TDB
pass # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
# Done in this way to have an order (and not a direct cast)
self._add_common_events(events, df_columns)
events[Git.COMMIT_ID] = df_columns[Git.COMMIT_ID]
events[Git.COMMIT_EVENT] = df_columns[Git.COMMIT_EVENT]
events[Git.COMMIT_DATE] = df_columns[Git.COMMIT_DATE]
events[Git.COMMIT_OWNER] = df_columns[Git.COMMIT_OWNER]
events[Git.COMMIT_COMMITTER] = df_columns[Git.COMMIT_COMMITTER]
events[Git.COMMIT_COMMITTER_DATE] = df_columns[Git.COMMIT_COMMITTER_DATE]
events[Git.COMMIT_REPOSITORY] = df_columns[Git.COMMIT_REPOSITORY]
events[Git.COMMIT_MESSAGE] = df_columns[Git.COMMIT_MESSAGE]
events[Git.COMMIT_HASH] = df_columns[Git.COMMIT_HASH]
events[Git.AUTHOR_DOMAIN] = df_columns[Git.AUTHOR_DOMAIN]
if granularity == 1:
events[Git.COMMIT_NUM_FILES] = df_columns[Git.COMMIT_NUM_FILES]
events[Git.COMMIT_ADDED_LINES] = df_columns[Git.COMMIT_ADDED_LINES]
events[Git.COMMIT_REMOVED_LINES] = df_columns[Git.COMMIT_REMOVED_LINES] # depends on [control=['if'], data=[]]
if granularity == 2:
events[Git.FILE_FILES] = df_columns[Git.FILE_FILES]
events[Git.FILE_EVENT] = df_columns[Git.FILE_EVENT]
events[Git.FILE_PATH] = df_columns[Git.FILE_PATH]
events[Git.FILE_ADDED_LINES] = df_columns[Git.FILE_ADDED_LINES]
events[Git.FILE_REMOVED_LINES] = df_columns[Git.FILE_REMOVED_LINES] # depends on [control=['if'], data=[]]
return events |
def SendMessage(self, MessageText):
"""Sends a chat message.
:Parameters:
MessageText : unicode
Message text
:return: Message object
:rtype: `ChatMessage`
"""
return ChatMessage(self._Owner, chop(self._Owner._DoCommand('CHATMESSAGE %s %s' % (self.Name,
tounicode(MessageText))), 2)[1]) | def function[SendMessage, parameter[self, MessageText]]:
constant[Sends a chat message.
:Parameters:
MessageText : unicode
Message text
:return: Message object
:rtype: `ChatMessage`
]
return[call[name[ChatMessage], parameter[name[self]._Owner, call[call[name[chop], parameter[call[name[self]._Owner._DoCommand, parameter[binary_operation[constant[CHATMESSAGE %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c76db70>, <ast.Call object at 0x7da20c76f0d0>]]]]], constant[2]]]][constant[1]]]]] | keyword[def] identifier[SendMessage] ( identifier[self] , identifier[MessageText] ):
literal[string]
keyword[return] identifier[ChatMessage] ( identifier[self] . identifier[_Owner] , identifier[chop] ( identifier[self] . identifier[_Owner] . identifier[_DoCommand] ( literal[string] %( identifier[self] . identifier[Name] ,
identifier[tounicode] ( identifier[MessageText] ))), literal[int] )[ literal[int] ]) | def SendMessage(self, MessageText):
"""Sends a chat message.
:Parameters:
MessageText : unicode
Message text
:return: Message object
:rtype: `ChatMessage`
"""
return ChatMessage(self._Owner, chop(self._Owner._DoCommand('CHATMESSAGE %s %s' % (self.Name, tounicode(MessageText))), 2)[1]) |
def get_cardinality(self, node=None):
"""
Returns the cardinality of the node. Throws an error if the CPD for the
queried node hasn't been added to the network.
Parameters
----------
node: Any hashable python object(optional).
The node whose cardinality we want. If node is not specified returns a
dictionary with the given variable as keys and their respective cardinality
as values.
Returns
-------
int or dict : If node is specified returns the cardinality of the node.
If node is not specified returns a dictionary with the given
variable as keys and their respective cardinality as values.
Examples
--------
>>> from pgmpy.models import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> cpd_diff = TabularCPD('diff',2,[[0.6,0.4]]);
>>> cpd_intel = TabularCPD('intel',2,[[0.7,0.3]]);
>>> cpd_grade = TabularCPD('grade', 2, [[0.1, 0.9, 0.2, 0.7],
... [0.9, 0.1, 0.8, 0.3]],
... ['intel', 'diff'], [2, 2])
>>> student.add_cpds(cpd_diff,cpd_intel,cpd_grade)
>>> student.get_cardinality()
defaultdict(int, {'diff': 2, 'grade': 2, 'intel': 2})
>>> student.get_cardinality('intel')
2
"""
if node:
return self.get_cpds(node).cardinality[0]
else:
cardinalities = defaultdict(int)
for cpd in self.cpds:
cardinalities[cpd.variable] = cpd.cardinality[0]
return cardinalities | def function[get_cardinality, parameter[self, node]]:
constant[
Returns the cardinality of the node. Throws an error if the CPD for the
queried node hasn't been added to the network.
Parameters
----------
node: Any hashable python object(optional).
The node whose cardinality we want. If node is not specified returns a
dictionary with the given variable as keys and their respective cardinality
as values.
Returns
-------
int or dict : If node is specified returns the cardinality of the node.
If node is not specified returns a dictionary with the given
variable as keys and their respective cardinality as values.
Examples
--------
>>> from pgmpy.models import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> cpd_diff = TabularCPD('diff',2,[[0.6,0.4]]);
>>> cpd_intel = TabularCPD('intel',2,[[0.7,0.3]]);
>>> cpd_grade = TabularCPD('grade', 2, [[0.1, 0.9, 0.2, 0.7],
... [0.9, 0.1, 0.8, 0.3]],
... ['intel', 'diff'], [2, 2])
>>> student.add_cpds(cpd_diff,cpd_intel,cpd_grade)
>>> student.get_cardinality()
defaultdict(int, {'diff': 2, 'grade': 2, 'intel': 2})
>>> student.get_cardinality('intel')
2
]
if name[node] begin[:]
return[call[call[name[self].get_cpds, parameter[name[node]]].cardinality][constant[0]]] | keyword[def] identifier[get_cardinality] ( identifier[self] , identifier[node] = keyword[None] ):
literal[string]
keyword[if] identifier[node] :
keyword[return] identifier[self] . identifier[get_cpds] ( identifier[node] ). identifier[cardinality] [ literal[int] ]
keyword[else] :
identifier[cardinalities] = identifier[defaultdict] ( identifier[int] )
keyword[for] identifier[cpd] keyword[in] identifier[self] . identifier[cpds] :
identifier[cardinalities] [ identifier[cpd] . identifier[variable] ]= identifier[cpd] . identifier[cardinality] [ literal[int] ]
keyword[return] identifier[cardinalities] | def get_cardinality(self, node=None):
"""
Returns the cardinality of the node. Throws an error if the CPD for the
queried node hasn't been added to the network.
Parameters
----------
node: Any hashable python object(optional).
The node whose cardinality we want. If node is not specified returns a
dictionary with the given variable as keys and their respective cardinality
as values.
Returns
-------
int or dict : If node is specified returns the cardinality of the node.
If node is not specified returns a dictionary with the given
variable as keys and their respective cardinality as values.
Examples
--------
>>> from pgmpy.models import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> cpd_diff = TabularCPD('diff',2,[[0.6,0.4]]);
>>> cpd_intel = TabularCPD('intel',2,[[0.7,0.3]]);
>>> cpd_grade = TabularCPD('grade', 2, [[0.1, 0.9, 0.2, 0.7],
... [0.9, 0.1, 0.8, 0.3]],
... ['intel', 'diff'], [2, 2])
>>> student.add_cpds(cpd_diff,cpd_intel,cpd_grade)
>>> student.get_cardinality()
defaultdict(int, {'diff': 2, 'grade': 2, 'intel': 2})
>>> student.get_cardinality('intel')
2
"""
if node:
return self.get_cpds(node).cardinality[0] # depends on [control=['if'], data=[]]
else:
cardinalities = defaultdict(int)
for cpd in self.cpds:
cardinalities[cpd.variable] = cpd.cardinality[0] # depends on [control=['for'], data=['cpd']]
return cardinalities |
def encrypt(self, data):
"""
Encrypt the data. Also, update the cipher iv. This is needed for SSLv3
and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.post_build().
"""
if False in six.itervalues(self.ready):
raise CipherError(data)
encryptor = self._cipher.encryptor()
tmp = encryptor.update(data) + encryptor.finalize()
self.iv = tmp[-self.block_size:]
return tmp | def function[encrypt, parameter[self, data]]:
constant[
Encrypt the data. Also, update the cipher iv. This is needed for SSLv3
and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.post_build().
]
if compare[constant[False] in call[name[six].itervalues, parameter[name[self].ready]]] begin[:]
<ast.Raise object at 0x7da1b21a2140>
variable[encryptor] assign[=] call[name[self]._cipher.encryptor, parameter[]]
variable[tmp] assign[=] binary_operation[call[name[encryptor].update, parameter[name[data]]] + call[name[encryptor].finalize, parameter[]]]
name[self].iv assign[=] call[name[tmp]][<ast.Slice object at 0x7da1b21a3520>]
return[name[tmp]] | keyword[def] identifier[encrypt] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] keyword[False] keyword[in] identifier[six] . identifier[itervalues] ( identifier[self] . identifier[ready] ):
keyword[raise] identifier[CipherError] ( identifier[data] )
identifier[encryptor] = identifier[self] . identifier[_cipher] . identifier[encryptor] ()
identifier[tmp] = identifier[encryptor] . identifier[update] ( identifier[data] )+ identifier[encryptor] . identifier[finalize] ()
identifier[self] . identifier[iv] = identifier[tmp] [- identifier[self] . identifier[block_size] :]
keyword[return] identifier[tmp] | def encrypt(self, data):
"""
Encrypt the data. Also, update the cipher iv. This is needed for SSLv3
and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.post_build().
"""
if False in six.itervalues(self.ready):
raise CipherError(data) # depends on [control=['if'], data=[]]
encryptor = self._cipher.encryptor()
tmp = encryptor.update(data) + encryptor.finalize()
self.iv = tmp[-self.block_size:]
return tmp |
def _call(self, x, out=None):
"""Calculate the divergence of ``x``."""
if out is None:
out = self.range.element()
ndim = self.range.ndim
dx = self.range.cell_sides
tmp = np.empty(out.shape, out.dtype, order=out.space.default_order)
with writable_array(out) as out_arr:
for axis in range(ndim):
finite_diff(x[axis], axis=axis, dx=dx[axis],
method=self.method, pad_mode=self.pad_mode,
pad_const=self.pad_const,
out=tmp)
if axis == 0:
out_arr[:] = tmp
else:
out_arr += tmp
return out | def function[_call, parameter[self, x, out]]:
constant[Calculate the divergence of ``x``.]
if compare[name[out] is constant[None]] begin[:]
variable[out] assign[=] call[name[self].range.element, parameter[]]
variable[ndim] assign[=] name[self].range.ndim
variable[dx] assign[=] name[self].range.cell_sides
variable[tmp] assign[=] call[name[np].empty, parameter[name[out].shape, name[out].dtype]]
with call[name[writable_array], parameter[name[out]]] begin[:]
for taget[name[axis]] in starred[call[name[range], parameter[name[ndim]]]] begin[:]
call[name[finite_diff], parameter[call[name[x]][name[axis]]]]
if compare[name[axis] equal[==] constant[0]] begin[:]
call[name[out_arr]][<ast.Slice object at 0x7da1b1ec64a0>] assign[=] name[tmp]
return[name[out]] | keyword[def] identifier[_call] ( identifier[self] , identifier[x] , identifier[out] = keyword[None] ):
literal[string]
keyword[if] identifier[out] keyword[is] keyword[None] :
identifier[out] = identifier[self] . identifier[range] . identifier[element] ()
identifier[ndim] = identifier[self] . identifier[range] . identifier[ndim]
identifier[dx] = identifier[self] . identifier[range] . identifier[cell_sides]
identifier[tmp] = identifier[np] . identifier[empty] ( identifier[out] . identifier[shape] , identifier[out] . identifier[dtype] , identifier[order] = identifier[out] . identifier[space] . identifier[default_order] )
keyword[with] identifier[writable_array] ( identifier[out] ) keyword[as] identifier[out_arr] :
keyword[for] identifier[axis] keyword[in] identifier[range] ( identifier[ndim] ):
identifier[finite_diff] ( identifier[x] [ identifier[axis] ], identifier[axis] = identifier[axis] , identifier[dx] = identifier[dx] [ identifier[axis] ],
identifier[method] = identifier[self] . identifier[method] , identifier[pad_mode] = identifier[self] . identifier[pad_mode] ,
identifier[pad_const] = identifier[self] . identifier[pad_const] ,
identifier[out] = identifier[tmp] )
keyword[if] identifier[axis] == literal[int] :
identifier[out_arr] [:]= identifier[tmp]
keyword[else] :
identifier[out_arr] += identifier[tmp]
keyword[return] identifier[out] | def _call(self, x, out=None):
"""Calculate the divergence of ``x``."""
if out is None:
out = self.range.element() # depends on [control=['if'], data=['out']]
ndim = self.range.ndim
dx = self.range.cell_sides
tmp = np.empty(out.shape, out.dtype, order=out.space.default_order)
with writable_array(out) as out_arr:
for axis in range(ndim):
finite_diff(x[axis], axis=axis, dx=dx[axis], method=self.method, pad_mode=self.pad_mode, pad_const=self.pad_const, out=tmp)
if axis == 0:
out_arr[:] = tmp # depends on [control=['if'], data=[]]
else:
out_arr += tmp # depends on [control=['for'], data=['axis']] # depends on [control=['with'], data=['out_arr']]
return out |
def without_edge(self, edge: Edge) -> 'BipartiteGraph[TLeft, TRight, TEdgeValue]':
"""Returns a copy of this bipartite graph with the given edge removed."""
return BipartiteGraph((e2, v) for e2, v in self._edges.items() if edge != e2) | def function[without_edge, parameter[self, edge]]:
constant[Returns a copy of this bipartite graph with the given edge removed.]
return[call[name[BipartiteGraph], parameter[<ast.GeneratorExp object at 0x7da18c4ceb30>]]] | keyword[def] identifier[without_edge] ( identifier[self] , identifier[edge] : identifier[Edge] )-> literal[string] :
literal[string]
keyword[return] identifier[BipartiteGraph] (( identifier[e2] , identifier[v] ) keyword[for] identifier[e2] , identifier[v] keyword[in] identifier[self] . identifier[_edges] . identifier[items] () keyword[if] identifier[edge] != identifier[e2] ) | def without_edge(self, edge: Edge) -> 'BipartiteGraph[TLeft, TRight, TEdgeValue]':
"""Returns a copy of this bipartite graph with the given edge removed."""
return BipartiteGraph(((e2, v) for (e2, v) in self._edges.items() if edge != e2)) |
def fill_document(doc):
"""Add a section, a subsection and some text to the document.
:param doc: the document
:type doc: :class:`pylatex.document.Document` instance
"""
with doc.create(Section('A section')):
doc.append('Some regular text and some ')
doc.append(italic('italic text. '))
with doc.create(Subsection('A subsection')):
doc.append('Also some crazy characters: $&#{}') | def function[fill_document, parameter[doc]]:
constant[Add a section, a subsection and some text to the document.
:param doc: the document
:type doc: :class:`pylatex.document.Document` instance
]
with call[name[doc].create, parameter[call[name[Section], parameter[constant[A section]]]]] begin[:]
call[name[doc].append, parameter[constant[Some regular text and some ]]]
call[name[doc].append, parameter[call[name[italic], parameter[constant[italic text. ]]]]]
with call[name[doc].create, parameter[call[name[Subsection], parameter[constant[A subsection]]]]] begin[:]
call[name[doc].append, parameter[constant[Also some crazy characters: $&#{}]]] | keyword[def] identifier[fill_document] ( identifier[doc] ):
literal[string]
keyword[with] identifier[doc] . identifier[create] ( identifier[Section] ( literal[string] )):
identifier[doc] . identifier[append] ( literal[string] )
identifier[doc] . identifier[append] ( identifier[italic] ( literal[string] ))
keyword[with] identifier[doc] . identifier[create] ( identifier[Subsection] ( literal[string] )):
identifier[doc] . identifier[append] ( literal[string] ) | def fill_document(doc):
"""Add a section, a subsection and some text to the document.
:param doc: the document
:type doc: :class:`pylatex.document.Document` instance
"""
with doc.create(Section('A section')):
doc.append('Some regular text and some ')
doc.append(italic('italic text. '))
with doc.create(Subsection('A subsection')):
doc.append('Also some crazy characters: $&#{}') # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]] |
def _collapse_leading_ws(header, txt):
"""
``Description`` header must preserve newlines; all others need not
"""
if header.lower() == 'description': # preserve newlines
return '\n'.join([x[8:] if x.startswith(' ' * 8) else x
for x in txt.strip().splitlines()])
else:
return ' '.join([x.strip() for x in txt.splitlines()]) | def function[_collapse_leading_ws, parameter[header, txt]]:
constant[
``Description`` header must preserve newlines; all others need not
]
if compare[call[name[header].lower, parameter[]] equal[==] constant[description]] begin[:]
return[call[constant[
].join, parameter[<ast.ListComp object at 0x7da18fe90ee0>]]] | keyword[def] identifier[_collapse_leading_ws] ( identifier[header] , identifier[txt] ):
literal[string]
keyword[if] identifier[header] . identifier[lower] ()== literal[string] :
keyword[return] literal[string] . identifier[join] ([ identifier[x] [ literal[int] :] keyword[if] identifier[x] . identifier[startswith] ( literal[string] * literal[int] ) keyword[else] identifier[x]
keyword[for] identifier[x] keyword[in] identifier[txt] . identifier[strip] (). identifier[splitlines] ()])
keyword[else] :
keyword[return] literal[string] . identifier[join] ([ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[txt] . identifier[splitlines] ()]) | def _collapse_leading_ws(header, txt):
"""
``Description`` header must preserve newlines; all others need not
"""
if header.lower() == 'description': # preserve newlines
return '\n'.join([x[8:] if x.startswith(' ' * 8) else x for x in txt.strip().splitlines()]) # depends on [control=['if'], data=[]]
else:
return ' '.join([x.strip() for x in txt.splitlines()]) |
def _genKeysBins(self):
""" Generates keys from bins, sets self._allowedKeys normally set in _classVariables
"""
binlimits = self._binlimits
allowedKeys = []
midbinlimits = binlimits
if binlimits[0] == -float('inf'):
midbinlimits = binlimits[1:] # remove the bottom limit
allowedKeys.append('<{0}'.format(midbinlimits[0]))
if binlimits[-1] == float('inf'):
midbinlimits = midbinlimits[:-1]
lastbin = midbinlimits[0]
for binlimit in midbinlimits[1:]:
if lastbin == binlimit:
allowedKeys.append('{0}'.format(binlimit))
else:
allowedKeys.append('{0} to {1}'.format(lastbin, binlimit))
lastbin = binlimit
if binlimits[-1] == float('inf'):
allowedKeys.append('{0}+'.format(binlimits[-2]))
allowedKeys.append('Uncertain')
self._allowedKeys = allowedKeys | def function[_genKeysBins, parameter[self]]:
constant[ Generates keys from bins, sets self._allowedKeys normally set in _classVariables
]
variable[binlimits] assign[=] name[self]._binlimits
variable[allowedKeys] assign[=] list[[]]
variable[midbinlimits] assign[=] name[binlimits]
if compare[call[name[binlimits]][constant[0]] equal[==] <ast.UnaryOp object at 0x7da1b0b72020>] begin[:]
variable[midbinlimits] assign[=] call[name[binlimits]][<ast.Slice object at 0x7da1b0b71b10>]
call[name[allowedKeys].append, parameter[call[constant[<{0}].format, parameter[call[name[midbinlimits]][constant[0]]]]]]
if compare[call[name[binlimits]][<ast.UnaryOp object at 0x7da1b0b712a0>] equal[==] call[name[float], parameter[constant[inf]]]] begin[:]
variable[midbinlimits] assign[=] call[name[midbinlimits]][<ast.Slice object at 0x7da18dc059c0>]
variable[lastbin] assign[=] call[name[midbinlimits]][constant[0]]
for taget[name[binlimit]] in starred[call[name[midbinlimits]][<ast.Slice object at 0x7da18dc05db0>]] begin[:]
if compare[name[lastbin] equal[==] name[binlimit]] begin[:]
call[name[allowedKeys].append, parameter[call[constant[{0}].format, parameter[name[binlimit]]]]]
variable[lastbin] assign[=] name[binlimit]
if compare[call[name[binlimits]][<ast.UnaryOp object at 0x7da18dc05060>] equal[==] call[name[float], parameter[constant[inf]]]] begin[:]
call[name[allowedKeys].append, parameter[call[constant[{0}+].format, parameter[call[name[binlimits]][<ast.UnaryOp object at 0x7da18dc06290>]]]]]
call[name[allowedKeys].append, parameter[constant[Uncertain]]]
name[self]._allowedKeys assign[=] name[allowedKeys] | keyword[def] identifier[_genKeysBins] ( identifier[self] ):
literal[string]
identifier[binlimits] = identifier[self] . identifier[_binlimits]
identifier[allowedKeys] =[]
identifier[midbinlimits] = identifier[binlimits]
keyword[if] identifier[binlimits] [ literal[int] ]==- identifier[float] ( literal[string] ):
identifier[midbinlimits] = identifier[binlimits] [ literal[int] :]
identifier[allowedKeys] . identifier[append] ( literal[string] . identifier[format] ( identifier[midbinlimits] [ literal[int] ]))
keyword[if] identifier[binlimits] [- literal[int] ]== identifier[float] ( literal[string] ):
identifier[midbinlimits] = identifier[midbinlimits] [:- literal[int] ]
identifier[lastbin] = identifier[midbinlimits] [ literal[int] ]
keyword[for] identifier[binlimit] keyword[in] identifier[midbinlimits] [ literal[int] :]:
keyword[if] identifier[lastbin] == identifier[binlimit] :
identifier[allowedKeys] . identifier[append] ( literal[string] . identifier[format] ( identifier[binlimit] ))
keyword[else] :
identifier[allowedKeys] . identifier[append] ( literal[string] . identifier[format] ( identifier[lastbin] , identifier[binlimit] ))
identifier[lastbin] = identifier[binlimit]
keyword[if] identifier[binlimits] [- literal[int] ]== identifier[float] ( literal[string] ):
identifier[allowedKeys] . identifier[append] ( literal[string] . identifier[format] ( identifier[binlimits] [- literal[int] ]))
identifier[allowedKeys] . identifier[append] ( literal[string] )
identifier[self] . identifier[_allowedKeys] = identifier[allowedKeys] | def _genKeysBins(self):
""" Generates keys from bins, sets self._allowedKeys normally set in _classVariables
"""
binlimits = self._binlimits
allowedKeys = []
midbinlimits = binlimits
if binlimits[0] == -float('inf'):
midbinlimits = binlimits[1:] # remove the bottom limit
allowedKeys.append('<{0}'.format(midbinlimits[0])) # depends on [control=['if'], data=[]]
if binlimits[-1] == float('inf'):
midbinlimits = midbinlimits[:-1] # depends on [control=['if'], data=[]]
lastbin = midbinlimits[0]
for binlimit in midbinlimits[1:]:
if lastbin == binlimit:
allowedKeys.append('{0}'.format(binlimit)) # depends on [control=['if'], data=['binlimit']]
else:
allowedKeys.append('{0} to {1}'.format(lastbin, binlimit))
lastbin = binlimit # depends on [control=['for'], data=['binlimit']]
if binlimits[-1] == float('inf'):
allowedKeys.append('{0}+'.format(binlimits[-2])) # depends on [control=['if'], data=[]]
allowedKeys.append('Uncertain')
self._allowedKeys = allowedKeys |
def as_cep(numero):
"""Formata um número de CEP. Se o argumento não for um CEP válido apenas
retorna o argumento sem qualquer modificação.
"""
_numero = digitos(numero)
if is_cep(_numero):
return '{}-{}'.format(_numero[:5], _numero[5:])
return numero | def function[as_cep, parameter[numero]]:
constant[Formata um número de CEP. Se o argumento não for um CEP válido apenas
retorna o argumento sem qualquer modificação.
]
variable[_numero] assign[=] call[name[digitos], parameter[name[numero]]]
if call[name[is_cep], parameter[name[_numero]]] begin[:]
return[call[constant[{}-{}].format, parameter[call[name[_numero]][<ast.Slice object at 0x7da1b1bb2e00>], call[name[_numero]][<ast.Slice object at 0x7da1b1b0c1f0>]]]]
return[name[numero]] | keyword[def] identifier[as_cep] ( identifier[numero] ):
literal[string]
identifier[_numero] = identifier[digitos] ( identifier[numero] )
keyword[if] identifier[is_cep] ( identifier[_numero] ):
keyword[return] literal[string] . identifier[format] ( identifier[_numero] [: literal[int] ], identifier[_numero] [ literal[int] :])
keyword[return] identifier[numero] | def as_cep(numero):
"""Formata um número de CEP. Se o argumento não for um CEP válido apenas
retorna o argumento sem qualquer modificação.
"""
_numero = digitos(numero)
if is_cep(_numero):
return '{}-{}'.format(_numero[:5], _numero[5:]) # depends on [control=['if'], data=[]]
return numero |
def normalize_text(self, text):
"""
Normalize text, when fixed format is ON, replace the first 6 chars by a space.
"""
if not self.editor.free_format:
text = ' ' * 6 + text[6:]
return text.upper() | def function[normalize_text, parameter[self, text]]:
constant[
Normalize text, when fixed format is ON, replace the first 6 chars by a space.
]
if <ast.UnaryOp object at 0x7da2041db0d0> begin[:]
variable[text] assign[=] binary_operation[binary_operation[constant[ ] * constant[6]] + call[name[text]][<ast.Slice object at 0x7da2041dbf40>]]
return[call[name[text].upper, parameter[]]] | keyword[def] identifier[normalize_text] ( identifier[self] , identifier[text] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[editor] . identifier[free_format] :
identifier[text] = literal[string] * literal[int] + identifier[text] [ literal[int] :]
keyword[return] identifier[text] . identifier[upper] () | def normalize_text(self, text):
"""
Normalize text, when fixed format is ON, replace the first 6 chars by a space.
"""
if not self.editor.free_format:
text = ' ' * 6 + text[6:] # depends on [control=['if'], data=[]]
return text.upper() |
def image(images, name=None, ch_axis=1, row=0, mode=None, batched=True,
out=None, subdir='', timeout=5, **kwargs):
"""Summary images to visualize.
Array of images are converted as image format (PNG format on default),
saved to output directory, and reported to the ChainerUI server.
The images are saved every called this function. The images will be shown
on `assets` endpoint vertically. If need to aggregate images in a row, use
:func:`~chainerui.summary.reporter`.
Examples of how to set arguments::
>>> from chainerui import summary
>>> summary.set_out('/path/to/log') # same as 'log' file directory
>>>
>>> x.shape # = [Batchsize, Channel, Height, Width]
(10, 3, 5, 5)
>>> summary.image(x, name='test') # images are tiled as 1x10
>>> summary.image(x, name='test', row=2) # images are tiled as 2x5
>>>
>>> x.shape # = [C, H, W]
(3, 5, 5)
>>> # need to set as a non-batched image and channel axis explicitly
>>> summary.image(x, name='test', ch_axis=0, batched=False)
>>>
>>> x.shape # = [B, H, W, C]
(10, 5, 5, 3)
>>> # need to set channel axis explicitly
>>> summary.image(x, name='test', ch_axis=-1, row=2)
>>>
>>> x.shape # = [H, W, C]
(5, 5, 3)
>>> # need to set as a non-batched image
>>> summary.image(x, name='test', ch_axis=-1, batched=False)
>>>
>>> x.shape # = [B, H, W], grayscale images
(10, 5, 5)
>>> summary.image(x, name='test') # image are tiled as 1x10
>>> summary.image(x, name='test', row=2) # image are tiled as 2x5
>>>
>>> x.shape # = [H, W], a grayscale image
(5, 5)
>>> # need to set as a non-bathed image
>>> summary.image(x, name='test', batched=False)
Add description about the image::
>>> summary.image(x, name='test', epoch=1, iteration=100)
>>> # 'epoch' and 'iteration' column will be shown.
Args:
images (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \
:class:`chainer.Variable`): batch of images. If Number of dimension
is 3 (or 2 when set `batched=False`), the pixels assume as
black and white image.
name (str): name of image. set as column name. when not setting,
assigned ``'image'``.
ch_axis (int): index number of channel dimension. set 1 by default.
if the images don't have channel axis, this parameter is ignored.
row (int): row size to visualize batched images. when set 0,
show on unstuck. if images set only one image, the row size
will be ignored.
mode (str): if the images are not RGB or RGBA space, set their
color space code. ChainerUI supports 'HSV'.
batched (bool): if the image is not batched, set ``False``.
out (str): directory path of output.
subdir (str): sub-directory path of output.
**kwargs (dict): key-value pair to show as description. regardless of
empty or not, timestamp on created the image is added.
"""
from chainerui.report.image_report import check_available
if not check_available():
return
from chainerui.report.image_report import report as _image
out_root = _chainerui_asset_observer.get_outpath(out)
out_path = os.path.join(out_root, subdir)
if not os.path.isdir(out_path):
os.makedirs(out_path)
col_name = name
if col_name is None:
col_name = 'image'
filename, created_at = _image(
images, out_path, col_name, ch_axis, row, mode, batched)
value = kwargs
value['timestamp'] = created_at.isoformat()
value['images'] = {col_name: os.path.join(subdir, filename)}
_chainerui_asset_observer.add(value)
_chainerui_asset_observer.save(out_root, timeout) | def function[image, parameter[images, name, ch_axis, row, mode, batched, out, subdir, timeout]]:
constant[Summary images to visualize.
Array of images are converted as image format (PNG format on default),
saved to output directory, and reported to the ChainerUI server.
The images are saved every called this function. The images will be shown
on `assets` endpoint vertically. If need to aggregate images in a row, use
:func:`~chainerui.summary.reporter`.
Examples of how to set arguments::
>>> from chainerui import summary
>>> summary.set_out('/path/to/log') # same as 'log' file directory
>>>
>>> x.shape # = [Batchsize, Channel, Height, Width]
(10, 3, 5, 5)
>>> summary.image(x, name='test') # images are tiled as 1x10
>>> summary.image(x, name='test', row=2) # images are tiled as 2x5
>>>
>>> x.shape # = [C, H, W]
(3, 5, 5)
>>> # need to set as a non-batched image and channel axis explicitly
>>> summary.image(x, name='test', ch_axis=0, batched=False)
>>>
>>> x.shape # = [B, H, W, C]
(10, 5, 5, 3)
>>> # need to set channel axis explicitly
>>> summary.image(x, name='test', ch_axis=-1, row=2)
>>>
>>> x.shape # = [H, W, C]
(5, 5, 3)
>>> # need to set as a non-batched image
>>> summary.image(x, name='test', ch_axis=-1, batched=False)
>>>
>>> x.shape # = [B, H, W], grayscale images
(10, 5, 5)
>>> summary.image(x, name='test') # image are tiled as 1x10
>>> summary.image(x, name='test', row=2) # image are tiled as 2x5
>>>
>>> x.shape # = [H, W], a grayscale image
(5, 5)
>>> # need to set as a non-bathed image
>>> summary.image(x, name='test', batched=False)
Add description about the image::
>>> summary.image(x, name='test', epoch=1, iteration=100)
>>> # 'epoch' and 'iteration' column will be shown.
Args:
images (:class:`numpy.ndarray` or :class:`cupy.ndarray` or :class:`chainer.Variable`): batch of images. If Number of dimension
is 3 (or 2 when set `batched=False`), the pixels assume as
black and white image.
name (str): name of image. set as column name. when not setting,
assigned ``'image'``.
ch_axis (int): index number of channel dimension. set 1 by default.
if the images don't have channel axis, this parameter is ignored.
row (int): row size to visualize batched images. when set 0,
show on unstuck. if images set only one image, the row size
will be ignored.
mode (str): if the images are not RGB or RGBA space, set their
color space code. ChainerUI supports 'HSV'.
batched (bool): if the image is not batched, set ``False``.
out (str): directory path of output.
subdir (str): sub-directory path of output.
**kwargs (dict): key-value pair to show as description. regardless of
empty or not, timestamp on created the image is added.
]
from relative_module[chainerui.report.image_report] import module[check_available]
if <ast.UnaryOp object at 0x7da1b0f5aec0> begin[:]
return[None]
from relative_module[chainerui.report.image_report] import module[report]
variable[out_root] assign[=] call[name[_chainerui_asset_observer].get_outpath, parameter[name[out]]]
variable[out_path] assign[=] call[name[os].path.join, parameter[name[out_root], name[subdir]]]
if <ast.UnaryOp object at 0x7da1b0f5b400> begin[:]
call[name[os].makedirs, parameter[name[out_path]]]
variable[col_name] assign[=] name[name]
if compare[name[col_name] is constant[None]] begin[:]
variable[col_name] assign[=] constant[image]
<ast.Tuple object at 0x7da1b0f58f10> assign[=] call[name[_image], parameter[name[images], name[out_path], name[col_name], name[ch_axis], name[row], name[mode], name[batched]]]
variable[value] assign[=] name[kwargs]
call[name[value]][constant[timestamp]] assign[=] call[name[created_at].isoformat, parameter[]]
call[name[value]][constant[images]] assign[=] dictionary[[<ast.Name object at 0x7da1b0de3040>], [<ast.Call object at 0x7da1b0de0820>]]
call[name[_chainerui_asset_observer].add, parameter[name[value]]]
call[name[_chainerui_asset_observer].save, parameter[name[out_root], name[timeout]]] | keyword[def] identifier[image] ( identifier[images] , identifier[name] = keyword[None] , identifier[ch_axis] = literal[int] , identifier[row] = literal[int] , identifier[mode] = keyword[None] , identifier[batched] = keyword[True] ,
identifier[out] = keyword[None] , identifier[subdir] = literal[string] , identifier[timeout] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[chainerui] . identifier[report] . identifier[image_report] keyword[import] identifier[check_available]
keyword[if] keyword[not] identifier[check_available] ():
keyword[return]
keyword[from] identifier[chainerui] . identifier[report] . identifier[image_report] keyword[import] identifier[report] keyword[as] identifier[_image]
identifier[out_root] = identifier[_chainerui_asset_observer] . identifier[get_outpath] ( identifier[out] )
identifier[out_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_root] , identifier[subdir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[out_path] ):
identifier[os] . identifier[makedirs] ( identifier[out_path] )
identifier[col_name] = identifier[name]
keyword[if] identifier[col_name] keyword[is] keyword[None] :
identifier[col_name] = literal[string]
identifier[filename] , identifier[created_at] = identifier[_image] (
identifier[images] , identifier[out_path] , identifier[col_name] , identifier[ch_axis] , identifier[row] , identifier[mode] , identifier[batched] )
identifier[value] = identifier[kwargs]
identifier[value] [ literal[string] ]= identifier[created_at] . identifier[isoformat] ()
identifier[value] [ literal[string] ]={ identifier[col_name] : identifier[os] . identifier[path] . identifier[join] ( identifier[subdir] , identifier[filename] )}
identifier[_chainerui_asset_observer] . identifier[add] ( identifier[value] )
identifier[_chainerui_asset_observer] . identifier[save] ( identifier[out_root] , identifier[timeout] ) | def image(images, name=None, ch_axis=1, row=0, mode=None, batched=True, out=None, subdir='', timeout=5, **kwargs):
"""Summary images to visualize.
Array of images are converted as image format (PNG format on default),
saved to output directory, and reported to the ChainerUI server.
The images are saved every called this function. The images will be shown
on `assets` endpoint vertically. If need to aggregate images in a row, use
:func:`~chainerui.summary.reporter`.
Examples of how to set arguments::
>>> from chainerui import summary
>>> summary.set_out('/path/to/log') # same as 'log' file directory
>>>
>>> x.shape # = [Batchsize, Channel, Height, Width]
(10, 3, 5, 5)
>>> summary.image(x, name='test') # images are tiled as 1x10
>>> summary.image(x, name='test', row=2) # images are tiled as 2x5
>>>
>>> x.shape # = [C, H, W]
(3, 5, 5)
>>> # need to set as a non-batched image and channel axis explicitly
>>> summary.image(x, name='test', ch_axis=0, batched=False)
>>>
>>> x.shape # = [B, H, W, C]
(10, 5, 5, 3)
>>> # need to set channel axis explicitly
>>> summary.image(x, name='test', ch_axis=-1, row=2)
>>>
>>> x.shape # = [H, W, C]
(5, 5, 3)
>>> # need to set as a non-batched image
>>> summary.image(x, name='test', ch_axis=-1, batched=False)
>>>
>>> x.shape # = [B, H, W], grayscale images
(10, 5, 5)
>>> summary.image(x, name='test') # image are tiled as 1x10
>>> summary.image(x, name='test', row=2) # image are tiled as 2x5
>>>
>>> x.shape # = [H, W], a grayscale image
(5, 5)
>>> # need to set as a non-bathed image
>>> summary.image(x, name='test', batched=False)
Add description about the image::
>>> summary.image(x, name='test', epoch=1, iteration=100)
>>> # 'epoch' and 'iteration' column will be shown.
Args:
images (:class:`numpy.ndarray` or :class:`cupy.ndarray` or :class:`chainer.Variable`): batch of images. If Number of dimension
is 3 (or 2 when set `batched=False`), the pixels assume as
black and white image.
name (str): name of image. set as column name. when not setting,
assigned ``'image'``.
ch_axis (int): index number of channel dimension. set 1 by default.
if the images don't have channel axis, this parameter is ignored.
row (int): row size to visualize batched images. when set 0,
show on unstuck. if images set only one image, the row size
will be ignored.
mode (str): if the images are not RGB or RGBA space, set their
color space code. ChainerUI supports 'HSV'.
batched (bool): if the image is not batched, set ``False``.
out (str): directory path of output.
subdir (str): sub-directory path of output.
**kwargs (dict): key-value pair to show as description. regardless of
empty or not, timestamp on created the image is added.
"""
from chainerui.report.image_report import check_available
if not check_available():
return # depends on [control=['if'], data=[]]
from chainerui.report.image_report import report as _image
out_root = _chainerui_asset_observer.get_outpath(out)
out_path = os.path.join(out_root, subdir)
if not os.path.isdir(out_path):
os.makedirs(out_path) # depends on [control=['if'], data=[]]
col_name = name
if col_name is None:
col_name = 'image' # depends on [control=['if'], data=['col_name']]
(filename, created_at) = _image(images, out_path, col_name, ch_axis, row, mode, batched)
value = kwargs
value['timestamp'] = created_at.isoformat()
value['images'] = {col_name: os.path.join(subdir, filename)}
_chainerui_asset_observer.add(value)
_chainerui_asset_observer.save(out_root, timeout) |
def btc_tx_sign_input(tx, idx, prevout_script, prevout_amount, private_key_info, hashcode=SIGHASH_ALL, hashcodes=None, segwit=None, scriptsig_type=None, redeem_script=None, witness_script=None, **blockchain_opts):
"""
Sign a particular input in the given transaction.
@private_key_info can either be a private key, or it can be a dict with 'redeem_script' and 'private_keys' defined
Returns the tx with the signed input
"""
if segwit is None:
segwit = get_features('segwit')
if scriptsig_type is None:
scriptsig_type = btc_privkey_scriptsig_classify(private_key_info)
if scriptsig_type in ['p2wpkh', 'p2wsh', 'p2sh-p2wpkh', 'p2sh-p2wsh'] and not segwit:
raise ValueError("Segwit is not enabled, but {} is a segwit scriptsig type".format(prevout_script))
return btc_tx_sign(tx, idx, prevout_script, prevout_amount, private_key_info, scriptsig_type, hashcode=hashcode, hashcodes=hashcodes, redeem_script=redeem_script, witness_script=witness_script) | def function[btc_tx_sign_input, parameter[tx, idx, prevout_script, prevout_amount, private_key_info, hashcode, hashcodes, segwit, scriptsig_type, redeem_script, witness_script]]:
constant[
Sign a particular input in the given transaction.
@private_key_info can either be a private key, or it can be a dict with 'redeem_script' and 'private_keys' defined
Returns the tx with the signed input
]
if compare[name[segwit] is constant[None]] begin[:]
variable[segwit] assign[=] call[name[get_features], parameter[constant[segwit]]]
if compare[name[scriptsig_type] is constant[None]] begin[:]
variable[scriptsig_type] assign[=] call[name[btc_privkey_scriptsig_classify], parameter[name[private_key_info]]]
if <ast.BoolOp object at 0x7da18ede4ca0> begin[:]
<ast.Raise object at 0x7da18ede7ca0>
return[call[name[btc_tx_sign], parameter[name[tx], name[idx], name[prevout_script], name[prevout_amount], name[private_key_info], name[scriptsig_type]]]] | keyword[def] identifier[btc_tx_sign_input] ( identifier[tx] , identifier[idx] , identifier[prevout_script] , identifier[prevout_amount] , identifier[private_key_info] , identifier[hashcode] = identifier[SIGHASH_ALL] , identifier[hashcodes] = keyword[None] , identifier[segwit] = keyword[None] , identifier[scriptsig_type] = keyword[None] , identifier[redeem_script] = keyword[None] , identifier[witness_script] = keyword[None] ,** identifier[blockchain_opts] ):
literal[string]
keyword[if] identifier[segwit] keyword[is] keyword[None] :
identifier[segwit] = identifier[get_features] ( literal[string] )
keyword[if] identifier[scriptsig_type] keyword[is] keyword[None] :
identifier[scriptsig_type] = identifier[btc_privkey_scriptsig_classify] ( identifier[private_key_info] )
keyword[if] identifier[scriptsig_type] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[and] keyword[not] identifier[segwit] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[prevout_script] ))
keyword[return] identifier[btc_tx_sign] ( identifier[tx] , identifier[idx] , identifier[prevout_script] , identifier[prevout_amount] , identifier[private_key_info] , identifier[scriptsig_type] , identifier[hashcode] = identifier[hashcode] , identifier[hashcodes] = identifier[hashcodes] , identifier[redeem_script] = identifier[redeem_script] , identifier[witness_script] = identifier[witness_script] ) | def btc_tx_sign_input(tx, idx, prevout_script, prevout_amount, private_key_info, hashcode=SIGHASH_ALL, hashcodes=None, segwit=None, scriptsig_type=None, redeem_script=None, witness_script=None, **blockchain_opts):
"""
Sign a particular input in the given transaction.
@private_key_info can either be a private key, or it can be a dict with 'redeem_script' and 'private_keys' defined
Returns the tx with the signed input
"""
if segwit is None:
segwit = get_features('segwit') # depends on [control=['if'], data=['segwit']]
if scriptsig_type is None:
scriptsig_type = btc_privkey_scriptsig_classify(private_key_info) # depends on [control=['if'], data=['scriptsig_type']]
if scriptsig_type in ['p2wpkh', 'p2wsh', 'p2sh-p2wpkh', 'p2sh-p2wsh'] and (not segwit):
raise ValueError('Segwit is not enabled, but {} is a segwit scriptsig type'.format(prevout_script)) # depends on [control=['if'], data=[]]
return btc_tx_sign(tx, idx, prevout_script, prevout_amount, private_key_info, scriptsig_type, hashcode=hashcode, hashcodes=hashcodes, redeem_script=redeem_script, witness_script=witness_script) |
def set_sync_info(self, name, mtime, size):
"""Store mtime/size when this resource was last synchronized with remote."""
if not self.is_local():
return self.peer.set_sync_info(name, mtime, size)
return self.cur_dir_meta.set_sync_info(name, mtime, size) | def function[set_sync_info, parameter[self, name, mtime, size]]:
constant[Store mtime/size when this resource was last synchronized with remote.]
if <ast.UnaryOp object at 0x7da1b0650790> begin[:]
return[call[name[self].peer.set_sync_info, parameter[name[name], name[mtime], name[size]]]]
return[call[name[self].cur_dir_meta.set_sync_info, parameter[name[name], name[mtime], name[size]]]] | keyword[def] identifier[set_sync_info] ( identifier[self] , identifier[name] , identifier[mtime] , identifier[size] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_local] ():
keyword[return] identifier[self] . identifier[peer] . identifier[set_sync_info] ( identifier[name] , identifier[mtime] , identifier[size] )
keyword[return] identifier[self] . identifier[cur_dir_meta] . identifier[set_sync_info] ( identifier[name] , identifier[mtime] , identifier[size] ) | def set_sync_info(self, name, mtime, size):
"""Store mtime/size when this resource was last synchronized with remote."""
if not self.is_local():
return self.peer.set_sync_info(name, mtime, size) # depends on [control=['if'], data=[]]
return self.cur_dir_meta.set_sync_info(name, mtime, size) |
def _remove_keys(cls, parent_dict, paths):
"""
Remove a list of keys from a dictionary.
Keys are specified as a series of `.` separated paths for keys in child
dictionaries, e.g 'parent_key.child_key.grandchild_key'.
"""
for path in paths:
keys = cls._path_to_keys(path)
# Traverse to the tip of the path
child_dict = parent_dict
for key in keys[:-1]:
child_dict = child_dict.get(key)
if child_dict is None:
break
if child_dict is None:
continue
# Remove the key
if keys[-1] in child_dict:
child_dict.pop(keys[-1]) | def function[_remove_keys, parameter[cls, parent_dict, paths]]:
constant[
Remove a list of keys from a dictionary.
Keys are specified as a series of `.` separated paths for keys in child
dictionaries, e.g 'parent_key.child_key.grandchild_key'.
]
for taget[name[path]] in starred[name[paths]] begin[:]
variable[keys] assign[=] call[name[cls]._path_to_keys, parameter[name[path]]]
variable[child_dict] assign[=] name[parent_dict]
for taget[name[key]] in starred[call[name[keys]][<ast.Slice object at 0x7da1b0c41090>]] begin[:]
variable[child_dict] assign[=] call[name[child_dict].get, parameter[name[key]]]
if compare[name[child_dict] is constant[None]] begin[:]
break
if compare[name[child_dict] is constant[None]] begin[:]
continue
if compare[call[name[keys]][<ast.UnaryOp object at 0x7da1b0c40820>] in name[child_dict]] begin[:]
call[name[child_dict].pop, parameter[call[name[keys]][<ast.UnaryOp object at 0x7da1b0c40970>]]] | keyword[def] identifier[_remove_keys] ( identifier[cls] , identifier[parent_dict] , identifier[paths] ):
literal[string]
keyword[for] identifier[path] keyword[in] identifier[paths] :
identifier[keys] = identifier[cls] . identifier[_path_to_keys] ( identifier[path] )
identifier[child_dict] = identifier[parent_dict]
keyword[for] identifier[key] keyword[in] identifier[keys] [:- literal[int] ]:
identifier[child_dict] = identifier[child_dict] . identifier[get] ( identifier[key] )
keyword[if] identifier[child_dict] keyword[is] keyword[None] :
keyword[break]
keyword[if] identifier[child_dict] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[keys] [- literal[int] ] keyword[in] identifier[child_dict] :
identifier[child_dict] . identifier[pop] ( identifier[keys] [- literal[int] ]) | def _remove_keys(cls, parent_dict, paths):
"""
Remove a list of keys from a dictionary.
Keys are specified as a series of `.` separated paths for keys in child
dictionaries, e.g 'parent_key.child_key.grandchild_key'.
"""
for path in paths:
keys = cls._path_to_keys(path)
# Traverse to the tip of the path
child_dict = parent_dict
for key in keys[:-1]:
child_dict = child_dict.get(key)
if child_dict is None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
if child_dict is None:
continue # depends on [control=['if'], data=[]]
# Remove the key
if keys[-1] in child_dict:
child_dict.pop(keys[-1]) # depends on [control=['if'], data=['child_dict']] # depends on [control=['for'], data=['path']] |
def exit(self):
"""Stop the simple WSGI server running the appliation."""
if self._server is not None:
self._server.shutdown()
self._server.server_close()
self._server = None | def function[exit, parameter[self]]:
constant[Stop the simple WSGI server running the appliation.]
if compare[name[self]._server is_not constant[None]] begin[:]
call[name[self]._server.shutdown, parameter[]]
call[name[self]._server.server_close, parameter[]]
name[self]._server assign[=] constant[None] | keyword[def] identifier[exit] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_server] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_server] . identifier[shutdown] ()
identifier[self] . identifier[_server] . identifier[server_close] ()
identifier[self] . identifier[_server] = keyword[None] | def exit(self):
"""Stop the simple WSGI server running the appliation."""
if self._server is not None:
self._server.shutdown()
self._server.server_close()
self._server = None # depends on [control=['if'], data=[]] |
def nvlist_to_dict(nvlist):
'''Convert a CORBA namevalue list into a dictionary.'''
result = {}
for item in nvlist :
result[item.name] = item.value.value()
return result | def function[nvlist_to_dict, parameter[nvlist]]:
constant[Convert a CORBA namevalue list into a dictionary.]
variable[result] assign[=] dictionary[[], []]
for taget[name[item]] in starred[name[nvlist]] begin[:]
call[name[result]][name[item].name] assign[=] call[name[item].value.value, parameter[]]
return[name[result]] | keyword[def] identifier[nvlist_to_dict] ( identifier[nvlist] ):
literal[string]
identifier[result] ={}
keyword[for] identifier[item] keyword[in] identifier[nvlist] :
identifier[result] [ identifier[item] . identifier[name] ]= identifier[item] . identifier[value] . identifier[value] ()
keyword[return] identifier[result] | def nvlist_to_dict(nvlist):
"""Convert a CORBA namevalue list into a dictionary."""
result = {}
for item in nvlist:
result[item.name] = item.value.value() # depends on [control=['for'], data=['item']]
return result |
def is_valid_combination(values, names):
dictionary = dict(zip(names, values))
"""
Should return True if combination is valid and False otherwise.
Dictionary that is passed here can be incomplete.
To prevent search for unnecessary items filtering function
is executed with found subset of data to validate it.
"""
rules = [
# Brand Y does not support Windows 98
# Brand X does not work with XP
# Contractors are billed in 30 min increments
lambda d: "98" == d["os"] and "Brand Y" == d["brand"],
lambda d: "XP" == d["os"] and "Brand X" == d["brand"],
lambda d: "Contr." == d["employee"] and d["increment"] < 30,
]
for rule in rules:
try:
if rule(dictionary):
return False
except KeyError:
pass
return True | def function[is_valid_combination, parameter[values, names]]:
variable[dictionary] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[names], name[values]]]]]
constant[
Should return True if combination is valid and False otherwise.
Dictionary that is passed here can be incomplete.
To prevent search for unnecessary items filtering function
is executed with found subset of data to validate it.
]
variable[rules] assign[=] list[[<ast.Lambda object at 0x7da1b0ca52a0>, <ast.Lambda object at 0x7da1b0c4dd20>, <ast.Lambda object at 0x7da1b0c4de70>]]
for taget[name[rule]] in starred[name[rules]] begin[:]
<ast.Try object at 0x7da1b0c4e050>
return[constant[True]] | keyword[def] identifier[is_valid_combination] ( identifier[values] , identifier[names] ):
identifier[dictionary] = identifier[dict] ( identifier[zip] ( identifier[names] , identifier[values] ))
literal[string]
identifier[rules] =[
keyword[lambda] identifier[d] : literal[string] == identifier[d] [ literal[string] ] keyword[and] literal[string] == identifier[d] [ literal[string] ],
keyword[lambda] identifier[d] : literal[string] == identifier[d] [ literal[string] ] keyword[and] literal[string] == identifier[d] [ literal[string] ],
keyword[lambda] identifier[d] : literal[string] == identifier[d] [ literal[string] ] keyword[and] identifier[d] [ literal[string] ]< literal[int] ,
]
keyword[for] identifier[rule] keyword[in] identifier[rules] :
keyword[try] :
keyword[if] identifier[rule] ( identifier[dictionary] ):
keyword[return] keyword[False]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[return] keyword[True] | def is_valid_combination(values, names):
dictionary = dict(zip(names, values))
'\n Should return True if combination is valid and False otherwise.\n\n Dictionary that is passed here can be incomplete.\n To prevent search for unnecessary items filtering function\n is executed with found subset of data to validate it.\n '
# Brand Y does not support Windows 98
# Brand X does not work with XP
# Contractors are billed in 30 min increments
rules = [lambda d: '98' == d['os'] and 'Brand Y' == d['brand'], lambda d: 'XP' == d['os'] and 'Brand X' == d['brand'], lambda d: 'Contr.' == d['employee'] and d['increment'] < 30]
for rule in rules:
try:
if rule(dictionary):
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['rule']]
return True |
def register_phonon_task(self, *args, **kwargs):
"""Register a phonon task."""
kwargs["task_class"] = PhononTask
return self.register_task(*args, **kwargs) | def function[register_phonon_task, parameter[self]]:
constant[Register a phonon task.]
call[name[kwargs]][constant[task_class]] assign[=] name[PhononTask]
return[call[name[self].register_task, parameter[<ast.Starred object at 0x7da1b2187e80>]]] | keyword[def] identifier[register_phonon_task] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[PhononTask]
keyword[return] identifier[self] . identifier[register_task] (* identifier[args] ,** identifier[kwargs] ) | def register_phonon_task(self, *args, **kwargs):
"""Register a phonon task."""
kwargs['task_class'] = PhononTask
return self.register_task(*args, **kwargs) |
def get_environmental_configuration(self):
"""
Gets the settings that describe the environmental configuration (supported feature set, calibrated minimum &
maximum power, location & dimensions, ...) of the enclosure resource.
Returns:
Settings that describe the environmental configuration.
"""
uri = '{}/environmentalConfiguration'.format(self.data['uri'])
return self._helper.do_get(uri) | def function[get_environmental_configuration, parameter[self]]:
constant[
Gets the settings that describe the environmental configuration (supported feature set, calibrated minimum &
maximum power, location & dimensions, ...) of the enclosure resource.
Returns:
Settings that describe the environmental configuration.
]
variable[uri] assign[=] call[constant[{}/environmentalConfiguration].format, parameter[call[name[self].data][constant[uri]]]]
return[call[name[self]._helper.do_get, parameter[name[uri]]]] | keyword[def] identifier[get_environmental_configuration] ( identifier[self] ):
literal[string]
identifier[uri] = literal[string] . identifier[format] ( identifier[self] . identifier[data] [ literal[string] ])
keyword[return] identifier[self] . identifier[_helper] . identifier[do_get] ( identifier[uri] ) | def get_environmental_configuration(self):
"""
Gets the settings that describe the environmental configuration (supported feature set, calibrated minimum &
maximum power, location & dimensions, ...) of the enclosure resource.
Returns:
Settings that describe the environmental configuration.
"""
uri = '{}/environmentalConfiguration'.format(self.data['uri'])
return self._helper.do_get(uri) |
def delete(self):
"""
Delete a security context. This method will delete the local data structures associated
with the specified security context, and may return an output token, which when passed to
:meth:`process_context_token` on the peer may instruct it to also delete its context.
RFC 2744 recommends that GSSAPI mechanisms do not emit any output token when they're
deleted, so this behaviour could be considered deprecated.
After this method is called, this security context will become invalid and should not be
used in any way.
:returns: An output token if one was emitted by the GSSAPI mechanism, otherwise an empty
bytestring.
:rtype: bytes
"""
if not self._ctx[0]:
raise GSSException("Can't delete invalid context")
output_token_buffer = ffi.new('gss_buffer_desc[1]')
minor_status = ffi.new('OM_uint32[1]')
retval = C.gss_delete_sec_context(
minor_status,
self._ctx,
output_token_buffer
)
self._ctx = ffi.new('gss_ctx_id_t[1]')
self._reset_flags()
try:
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type)
else:
raise _exception_for_status(retval, minor_status[0])
return _buf_to_str(output_token_buffer[0])
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) | def function[delete, parameter[self]]:
constant[
Delete a security context. This method will delete the local data structures associated
with the specified security context, and may return an output token, which when passed to
:meth:`process_context_token` on the peer may instruct it to also delete its context.
RFC 2744 recommends that GSSAPI mechanisms do not emit any output token when they're
deleted, so this behaviour could be considered deprecated.
After this method is called, this security context will become invalid and should not be
used in any way.
:returns: An output token if one was emitted by the GSSAPI mechanism, otherwise an empty
bytestring.
:rtype: bytes
]
if <ast.UnaryOp object at 0x7da1b023d090> begin[:]
<ast.Raise object at 0x7da1b023d600>
variable[output_token_buffer] assign[=] call[name[ffi].new, parameter[constant[gss_buffer_desc[1]]]]
variable[minor_status] assign[=] call[name[ffi].new, parameter[constant[OM_uint32[1]]]]
variable[retval] assign[=] call[name[C].gss_delete_sec_context, parameter[name[minor_status], name[self]._ctx, name[output_token_buffer]]]
name[self]._ctx assign[=] call[name[ffi].new, parameter[constant[gss_ctx_id_t[1]]]]
call[name[self]._reset_flags, parameter[]]
<ast.Try object at 0x7da1b023eda0> | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_ctx] [ literal[int] ]:
keyword[raise] identifier[GSSException] ( literal[string] )
identifier[output_token_buffer] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[minor_status] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[retval] = identifier[C] . identifier[gss_delete_sec_context] (
identifier[minor_status] ,
identifier[self] . identifier[_ctx] ,
identifier[output_token_buffer]
)
identifier[self] . identifier[_ctx] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[self] . identifier[_reset_flags] ()
keyword[try] :
keyword[if] identifier[GSS_ERROR] ( identifier[retval] ):
keyword[if] identifier[minor_status] [ literal[int] ] keyword[and] identifier[self] . identifier[mech_type] :
keyword[raise] identifier[_exception_for_status] ( identifier[retval] , identifier[minor_status] [ literal[int] ], identifier[self] . identifier[mech_type] )
keyword[else] :
keyword[raise] identifier[_exception_for_status] ( identifier[retval] , identifier[minor_status] [ literal[int] ])
keyword[return] identifier[_buf_to_str] ( identifier[output_token_buffer] [ literal[int] ])
keyword[finally] :
keyword[if] identifier[output_token_buffer] [ literal[int] ]. identifier[length] != literal[int] :
identifier[C] . identifier[gss_release_buffer] ( identifier[minor_status] , identifier[output_token_buffer] ) | def delete(self):
"""
Delete a security context. This method will delete the local data structures associated
with the specified security context, and may return an output token, which when passed to
:meth:`process_context_token` on the peer may instruct it to also delete its context.
RFC 2744 recommends that GSSAPI mechanisms do not emit any output token when they're
deleted, so this behaviour could be considered deprecated.
After this method is called, this security context will become invalid and should not be
used in any way.
:returns: An output token if one was emitted by the GSSAPI mechanism, otherwise an empty
bytestring.
:rtype: bytes
"""
if not self._ctx[0]:
raise GSSException("Can't delete invalid context") # depends on [control=['if'], data=[]]
output_token_buffer = ffi.new('gss_buffer_desc[1]')
minor_status = ffi.new('OM_uint32[1]')
retval = C.gss_delete_sec_context(minor_status, self._ctx, output_token_buffer)
self._ctx = ffi.new('gss_ctx_id_t[1]')
self._reset_flags()
try:
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type) # depends on [control=['if'], data=[]]
else:
raise _exception_for_status(retval, minor_status[0]) # depends on [control=['if'], data=[]]
return _buf_to_str(output_token_buffer[0]) # depends on [control=['try'], data=[]]
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) # depends on [control=['if'], data=[]] |
def _fake_getqualifier(self, namespace, **params):
"""
Implements a server responder for
:meth:`pywbem.WBEMConnection.GetQualifier`.
Retrieves a qualifier declaration from the local repository of this
namespace.
Returns:
Returns a tuple representing the _imethodcall return for this
method where the data is a QualifierDeclaration
Raises:
CIMError: CIM_ERR_INVALID_NAMESPACE
CIMError: CIM_ERR_NOT_FOUND
"""
# Validate namespace
qualifier_repo = self._get_qualifier_repo(namespace)
qname = params['QualifierName']
try:
qualifier = qualifier_repo[qname]
except KeyError:
ce = CIMError(
CIM_ERR_NOT_FOUND,
_format("Qualifier declaration {0!A} not found in namespace "
"{1!A}.", qname, namespace))
raise ce
return self._make_tuple([qualifier]) | def function[_fake_getqualifier, parameter[self, namespace]]:
constant[
Implements a server responder for
:meth:`pywbem.WBEMConnection.GetQualifier`.
Retrieves a qualifier declaration from the local repository of this
namespace.
Returns:
Returns a tuple representing the _imethodcall return for this
method where the data is a QualifierDeclaration
Raises:
CIMError: CIM_ERR_INVALID_NAMESPACE
CIMError: CIM_ERR_NOT_FOUND
]
variable[qualifier_repo] assign[=] call[name[self]._get_qualifier_repo, parameter[name[namespace]]]
variable[qname] assign[=] call[name[params]][constant[QualifierName]]
<ast.Try object at 0x7da18f09ceb0>
return[call[name[self]._make_tuple, parameter[list[[<ast.Name object at 0x7da18f09cd00>]]]]] | keyword[def] identifier[_fake_getqualifier] ( identifier[self] , identifier[namespace] ,** identifier[params] ):
literal[string]
identifier[qualifier_repo] = identifier[self] . identifier[_get_qualifier_repo] ( identifier[namespace] )
identifier[qname] = identifier[params] [ literal[string] ]
keyword[try] :
identifier[qualifier] = identifier[qualifier_repo] [ identifier[qname] ]
keyword[except] identifier[KeyError] :
identifier[ce] = identifier[CIMError] (
identifier[CIM_ERR_NOT_FOUND] ,
identifier[_format] ( literal[string]
literal[string] , identifier[qname] , identifier[namespace] ))
keyword[raise] identifier[ce]
keyword[return] identifier[self] . identifier[_make_tuple] ([ identifier[qualifier] ]) | def _fake_getqualifier(self, namespace, **params):
"""
Implements a server responder for
:meth:`pywbem.WBEMConnection.GetQualifier`.
Retrieves a qualifier declaration from the local repository of this
namespace.
Returns:
Returns a tuple representing the _imethodcall return for this
method where the data is a QualifierDeclaration
Raises:
CIMError: CIM_ERR_INVALID_NAMESPACE
CIMError: CIM_ERR_NOT_FOUND
"""
# Validate namespace
qualifier_repo = self._get_qualifier_repo(namespace)
qname = params['QualifierName']
try:
qualifier = qualifier_repo[qname] # depends on [control=['try'], data=[]]
except KeyError:
ce = CIMError(CIM_ERR_NOT_FOUND, _format('Qualifier declaration {0!A} not found in namespace {1!A}.', qname, namespace))
raise ce # depends on [control=['except'], data=[]]
return self._make_tuple([qualifier]) |
def CalculateBestPosition(self,widget):
"When dealing with a Top-Level window position it absolute lower-right"
if isinstance(widget, wx.Frame):
screen = wx.ClientDisplayRect()[2:]
left,top = widget.ClientToScreenXY(0,0)
right,bottom = widget.ClientToScreenXY(*widget.GetClientRect()[2:])
size = self.GetSize()
xpos = right
ypos = bottom - size[1]
self.SetPosition((xpos,ypos))
else:
STT.ToolTipWindow.CalculateBestPosition(self, widget) | def function[CalculateBestPosition, parameter[self, widget]]:
constant[When dealing with a Top-Level window position it absolute lower-right]
if call[name[isinstance], parameter[name[widget], name[wx].Frame]] begin[:]
variable[screen] assign[=] call[call[name[wx].ClientDisplayRect, parameter[]]][<ast.Slice object at 0x7da1b0285630>]
<ast.Tuple object at 0x7da1b0286e00> assign[=] call[name[widget].ClientToScreenXY, parameter[constant[0], constant[0]]]
<ast.Tuple object at 0x7da1b0286350> assign[=] call[name[widget].ClientToScreenXY, parameter[<ast.Starred object at 0x7da1b0284eb0>]]
variable[size] assign[=] call[name[self].GetSize, parameter[]]
variable[xpos] assign[=] name[right]
variable[ypos] assign[=] binary_operation[name[bottom] - call[name[size]][constant[1]]]
call[name[self].SetPosition, parameter[tuple[[<ast.Name object at 0x7da1b0286170>, <ast.Name object at 0x7da1b0287790>]]]] | keyword[def] identifier[CalculateBestPosition] ( identifier[self] , identifier[widget] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[widget] , identifier[wx] . identifier[Frame] ):
identifier[screen] = identifier[wx] . identifier[ClientDisplayRect] ()[ literal[int] :]
identifier[left] , identifier[top] = identifier[widget] . identifier[ClientToScreenXY] ( literal[int] , literal[int] )
identifier[right] , identifier[bottom] = identifier[widget] . identifier[ClientToScreenXY] (* identifier[widget] . identifier[GetClientRect] ()[ literal[int] :])
identifier[size] = identifier[self] . identifier[GetSize] ()
identifier[xpos] = identifier[right]
identifier[ypos] = identifier[bottom] - identifier[size] [ literal[int] ]
identifier[self] . identifier[SetPosition] (( identifier[xpos] , identifier[ypos] ))
keyword[else] :
identifier[STT] . identifier[ToolTipWindow] . identifier[CalculateBestPosition] ( identifier[self] , identifier[widget] ) | def CalculateBestPosition(self, widget):
"""When dealing with a Top-Level window position it absolute lower-right"""
if isinstance(widget, wx.Frame):
screen = wx.ClientDisplayRect()[2:]
(left, top) = widget.ClientToScreenXY(0, 0)
(right, bottom) = widget.ClientToScreenXY(*widget.GetClientRect()[2:])
size = self.GetSize()
xpos = right
ypos = bottom - size[1]
self.SetPosition((xpos, ypos)) # depends on [control=['if'], data=[]]
else:
STT.ToolTipWindow.CalculateBestPosition(self, widget) |
def _sort_by(self, datum, param, path=None):
"""
Key function that is used for results sorting. This is passed as argument to `sorted()`
"""
if not path:
path = []
try:
if '__' in param:
root, new_param = param.split('__')
path.append(root)
return self._sort_by(datum[root], param=new_param, path=path)
else:
path.append(param)
data = datum[param]
if isinstance(data, list):
raise ValidationError(self._list_attribute_error.format(param))
return data
except TypeError:
raise ValidationError(self._list_attribute_error.format('.'.join(path)))
except KeyError:
raise ValidationError('Invalid sorting field: {}'.format('.'.join(path))) | def function[_sort_by, parameter[self, datum, param, path]]:
constant[
Key function that is used for results sorting. This is passed as argument to `sorted()`
]
if <ast.UnaryOp object at 0x7da18f7225f0> begin[:]
variable[path] assign[=] list[[]]
<ast.Try object at 0x7da18f720610> | keyword[def] identifier[_sort_by] ( identifier[self] , identifier[datum] , identifier[param] , identifier[path] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[path] :
identifier[path] =[]
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[param] :
identifier[root] , identifier[new_param] = identifier[param] . identifier[split] ( literal[string] )
identifier[path] . identifier[append] ( identifier[root] )
keyword[return] identifier[self] . identifier[_sort_by] ( identifier[datum] [ identifier[root] ], identifier[param] = identifier[new_param] , identifier[path] = identifier[path] )
keyword[else] :
identifier[path] . identifier[append] ( identifier[param] )
identifier[data] = identifier[datum] [ identifier[param] ]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[list] ):
keyword[raise] identifier[ValidationError] ( identifier[self] . identifier[_list_attribute_error] . identifier[format] ( identifier[param] ))
keyword[return] identifier[data]
keyword[except] identifier[TypeError] :
keyword[raise] identifier[ValidationError] ( identifier[self] . identifier[_list_attribute_error] . identifier[format] ( literal[string] . identifier[join] ( identifier[path] )))
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValidationError] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[path] ))) | def _sort_by(self, datum, param, path=None):
"""
Key function that is used for results sorting. This is passed as argument to `sorted()`
"""
if not path:
path = [] # depends on [control=['if'], data=[]]
try:
if '__' in param:
(root, new_param) = param.split('__')
path.append(root)
return self._sort_by(datum[root], param=new_param, path=path) # depends on [control=['if'], data=['param']]
else:
path.append(param)
data = datum[param]
if isinstance(data, list):
raise ValidationError(self._list_attribute_error.format(param)) # depends on [control=['if'], data=[]]
return data # depends on [control=['try'], data=[]]
except TypeError:
raise ValidationError(self._list_attribute_error.format('.'.join(path))) # depends on [control=['except'], data=[]]
except KeyError:
raise ValidationError('Invalid sorting field: {}'.format('.'.join(path))) # depends on [control=['except'], data=[]] |
def update_remote_ids(self, remote_folder):
"""
Set remote id based on remote_folder and check children against this folder's children.
:param remote_folder: RemoteFolder to compare against
"""
self.remote_id = remote_folder.id
_update_remote_children(remote_folder, self.children) | def function[update_remote_ids, parameter[self, remote_folder]]:
constant[
Set remote id based on remote_folder and check children against this folder's children.
:param remote_folder: RemoteFolder to compare against
]
name[self].remote_id assign[=] name[remote_folder].id
call[name[_update_remote_children], parameter[name[remote_folder], name[self].children]] | keyword[def] identifier[update_remote_ids] ( identifier[self] , identifier[remote_folder] ):
literal[string]
identifier[self] . identifier[remote_id] = identifier[remote_folder] . identifier[id]
identifier[_update_remote_children] ( identifier[remote_folder] , identifier[self] . identifier[children] ) | def update_remote_ids(self, remote_folder):
"""
Set remote id based on remote_folder and check children against this folder's children.
:param remote_folder: RemoteFolder to compare against
"""
self.remote_id = remote_folder.id
_update_remote_children(remote_folder, self.children) |
def phantomjs(ctx, phantomjs_path, port, auto_restart, args):
"""
Run phantomjs fetcher if phantomjs is installed.
"""
args = args or ctx.default_map and ctx.default_map.get('args', [])
import subprocess
g = ctx.obj
_quit = []
phantomjs_fetcher = os.path.join(
os.path.dirname(pyspider.__file__), 'fetcher/phantomjs_fetcher.js')
cmd = [phantomjs_path,
# this may cause memory leak: https://github.com/ariya/phantomjs/issues/12903
#'--load-images=false',
'--ssl-protocol=any',
'--disk-cache=true'] + list(args or []) + [phantomjs_fetcher, str(port)]
try:
_phantomjs = subprocess.Popen(cmd)
except OSError:
logging.warning('phantomjs not found, continue running without it.')
return None
def quit(*args, **kwargs):
_quit.append(1)
_phantomjs.kill()
_phantomjs.wait()
logging.info('phantomjs exited.')
if not g.get('phantomjs_proxy'):
g['phantomjs_proxy'] = '127.0.0.1:%s' % port
phantomjs = utils.ObjectDict(port=port, quit=quit)
g.instances.append(phantomjs)
if g.get('testing_mode'):
return phantomjs
while True:
_phantomjs.wait()
if _quit or not auto_restart:
break
_phantomjs = subprocess.Popen(cmd) | def function[phantomjs, parameter[ctx, phantomjs_path, port, auto_restart, args]]:
constant[
Run phantomjs fetcher if phantomjs is installed.
]
variable[args] assign[=] <ast.BoolOp object at 0x7da1b1f8ff40>
import module[subprocess]
variable[g] assign[=] name[ctx].obj
variable[_quit] assign[=] list[[]]
variable[phantomjs_fetcher] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[pyspider].__file__]], constant[fetcher/phantomjs_fetcher.js]]]
variable[cmd] assign[=] binary_operation[binary_operation[list[[<ast.Name object at 0x7da1b1f8e0b0>, <ast.Constant object at 0x7da1b1f8d540>, <ast.Constant object at 0x7da1b1f8c880>]] + call[name[list], parameter[<ast.BoolOp object at 0x7da1b1f8dba0>]]] + list[[<ast.Name object at 0x7da1b1f05240>, <ast.Call object at 0x7da1b1f04070>]]]
<ast.Try object at 0x7da1b1f05210>
def function[quit, parameter[]]:
call[name[_quit].append, parameter[constant[1]]]
call[name[_phantomjs].kill, parameter[]]
call[name[_phantomjs].wait, parameter[]]
call[name[logging].info, parameter[constant[phantomjs exited.]]]
if <ast.UnaryOp object at 0x7da1b1f8d690> begin[:]
call[name[g]][constant[phantomjs_proxy]] assign[=] binary_operation[constant[127.0.0.1:%s] <ast.Mod object at 0x7da2590d6920> name[port]]
variable[phantomjs] assign[=] call[name[utils].ObjectDict, parameter[]]
call[name[g].instances.append, parameter[name[phantomjs]]]
if call[name[g].get, parameter[constant[testing_mode]]] begin[:]
return[name[phantomjs]]
while constant[True] begin[:]
call[name[_phantomjs].wait, parameter[]]
if <ast.BoolOp object at 0x7da18f811330> begin[:]
break
variable[_phantomjs] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]] | keyword[def] identifier[phantomjs] ( identifier[ctx] , identifier[phantomjs_path] , identifier[port] , identifier[auto_restart] , identifier[args] ):
literal[string]
identifier[args] = identifier[args] keyword[or] identifier[ctx] . identifier[default_map] keyword[and] identifier[ctx] . identifier[default_map] . identifier[get] ( literal[string] ,[])
keyword[import] identifier[subprocess]
identifier[g] = identifier[ctx] . identifier[obj]
identifier[_quit] =[]
identifier[phantomjs_fetcher] = identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[pyspider] . identifier[__file__] ), literal[string] )
identifier[cmd] =[ identifier[phantomjs_path] ,
literal[string] ,
literal[string] ]+ identifier[list] ( identifier[args] keyword[or] [])+[ identifier[phantomjs_fetcher] , identifier[str] ( identifier[port] )]
keyword[try] :
identifier[_phantomjs] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] )
keyword[except] identifier[OSError] :
identifier[logging] . identifier[warning] ( literal[string] )
keyword[return] keyword[None]
keyword[def] identifier[quit] (* identifier[args] ,** identifier[kwargs] ):
identifier[_quit] . identifier[append] ( literal[int] )
identifier[_phantomjs] . identifier[kill] ()
identifier[_phantomjs] . identifier[wait] ()
identifier[logging] . identifier[info] ( literal[string] )
keyword[if] keyword[not] identifier[g] . identifier[get] ( literal[string] ):
identifier[g] [ literal[string] ]= literal[string] % identifier[port]
identifier[phantomjs] = identifier[utils] . identifier[ObjectDict] ( identifier[port] = identifier[port] , identifier[quit] = identifier[quit] )
identifier[g] . identifier[instances] . identifier[append] ( identifier[phantomjs] )
keyword[if] identifier[g] . identifier[get] ( literal[string] ):
keyword[return] identifier[phantomjs]
keyword[while] keyword[True] :
identifier[_phantomjs] . identifier[wait] ()
keyword[if] identifier[_quit] keyword[or] keyword[not] identifier[auto_restart] :
keyword[break]
identifier[_phantomjs] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] ) | def phantomjs(ctx, phantomjs_path, port, auto_restart, args):
"""
Run phantomjs fetcher if phantomjs is installed.
"""
args = args or (ctx.default_map and ctx.default_map.get('args', []))
import subprocess
g = ctx.obj
_quit = []
phantomjs_fetcher = os.path.join(os.path.dirname(pyspider.__file__), 'fetcher/phantomjs_fetcher.js')
# this may cause memory leak: https://github.com/ariya/phantomjs/issues/12903
#'--load-images=false',
cmd = [phantomjs_path, '--ssl-protocol=any', '--disk-cache=true'] + list(args or []) + [phantomjs_fetcher, str(port)]
try:
_phantomjs = subprocess.Popen(cmd) # depends on [control=['try'], data=[]]
except OSError:
logging.warning('phantomjs not found, continue running without it.')
return None # depends on [control=['except'], data=[]]
def quit(*args, **kwargs):
_quit.append(1)
_phantomjs.kill()
_phantomjs.wait()
logging.info('phantomjs exited.')
if not g.get('phantomjs_proxy'):
g['phantomjs_proxy'] = '127.0.0.1:%s' % port # depends on [control=['if'], data=[]]
phantomjs = utils.ObjectDict(port=port, quit=quit)
g.instances.append(phantomjs)
if g.get('testing_mode'):
return phantomjs # depends on [control=['if'], data=[]]
while True:
_phantomjs.wait()
if _quit or not auto_restart:
break # depends on [control=['if'], data=[]]
_phantomjs = subprocess.Popen(cmd) # depends on [control=['while'], data=[]] |
def _pushMessages(self):
""" Internal callback used to make sure the msg list keeps moving. """
# This continues to get itself called until no msgs are left in list.
self.showStatus('')
if len(self._statusMsgsToShow) > 0:
self.top.after(200, self._pushMessages) | def function[_pushMessages, parameter[self]]:
constant[ Internal callback used to make sure the msg list keeps moving. ]
call[name[self].showStatus, parameter[constant[]]]
if compare[call[name[len], parameter[name[self]._statusMsgsToShow]] greater[>] constant[0]] begin[:]
call[name[self].top.after, parameter[constant[200], name[self]._pushMessages]] | keyword[def] identifier[_pushMessages] ( identifier[self] ):
literal[string]
identifier[self] . identifier[showStatus] ( literal[string] )
keyword[if] identifier[len] ( identifier[self] . identifier[_statusMsgsToShow] )> literal[int] :
identifier[self] . identifier[top] . identifier[after] ( literal[int] , identifier[self] . identifier[_pushMessages] ) | def _pushMessages(self):
""" Internal callback used to make sure the msg list keeps moving. """
# This continues to get itself called until no msgs are left in list.
self.showStatus('')
if len(self._statusMsgsToShow) > 0:
self.top.after(200, self._pushMessages) # depends on [control=['if'], data=[]] |
def hybrid_forward(self, F, x, sampled_values, label, w_all, b_all):
"""Forward computation."""
sampled_candidates, expected_count_sampled, expected_count_true = sampled_values
# (num_sampled, in_unit)
w_sampled = w_all.slice(begin=(0, 0), end=(self._num_sampled, None))
w_true = w_all.slice(begin=(self._num_sampled, 0), end=(None, None))
b_sampled = b_all.slice(begin=(0,), end=(self._num_sampled,))
b_true = b_all.slice(begin=(self._num_sampled,), end=(None,))
# true pred
# (batch_size, 1)
x = x.reshape((-1, self._in_unit))
pred_true = (w_true * x).sum(axis=1) + b_true
# samples pred
# (batch_size, num_sampled)
b_sampled = F.reshape(b_sampled, (-1,))
pred_sampled = F.FullyConnected(x, weight=w_sampled, bias=b_sampled,
num_hidden=self._num_sampled)
# remove accidental hits
if self._remove_accidental_hits:
label_vec = F.reshape(label, (-1, 1))
sample_vec = F.reshape(sampled_candidates, (1, -1))
mask = F.broadcast_equal(label_vec, sample_vec) * -1e37
pred_sampled = pred_sampled + mask
# subtract log(q)
expected_count_sampled = F.reshape(expected_count_sampled,
shape=(1, self._num_sampled))
expected_count_true = expected_count_true.reshape((-1,))
pred_true = pred_true - F.log(expected_count_true)
pred_true = pred_true.reshape((-1, 1))
pred_sampled = F.broadcast_sub(pred_sampled, F.log(expected_count_sampled))
# pred and new_labels
# (batch_size, 1+num_sampled)
pred = F.concat(pred_true, pred_sampled, dim=1)
if self._sparse_label:
new_label = F.zeros_like(label)
else:
label_vec = F.reshape(label, (-1, 1))
new_label_true = F.ones_like(label_vec)
new_label_sampled = F.zeros_like(pred_sampled)
new_label = F.Concat(new_label_true, new_label_sampled, dim=1)
return pred, new_label | def function[hybrid_forward, parameter[self, F, x, sampled_values, label, w_all, b_all]]:
constant[Forward computation.]
<ast.Tuple object at 0x7da1b21e1660> assign[=] name[sampled_values]
variable[w_sampled] assign[=] call[name[w_all].slice, parameter[]]
variable[w_true] assign[=] call[name[w_all].slice, parameter[]]
variable[b_sampled] assign[=] call[name[b_all].slice, parameter[]]
variable[b_true] assign[=] call[name[b_all].slice, parameter[]]
variable[x] assign[=] call[name[x].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b21e3190>, <ast.Attribute object at 0x7da1b21e2f50>]]]]
variable[pred_true] assign[=] binary_operation[call[binary_operation[name[w_true] * name[x]].sum, parameter[]] + name[b_true]]
variable[b_sampled] assign[=] call[name[F].reshape, parameter[name[b_sampled], tuple[[<ast.UnaryOp object at 0x7da1b21e3250>]]]]
variable[pred_sampled] assign[=] call[name[F].FullyConnected, parameter[name[x]]]
if name[self]._remove_accidental_hits begin[:]
variable[label_vec] assign[=] call[name[F].reshape, parameter[name[label], tuple[[<ast.UnaryOp object at 0x7da1b21e1c90>, <ast.Constant object at 0x7da1b21e1450>]]]]
variable[sample_vec] assign[=] call[name[F].reshape, parameter[name[sampled_candidates], tuple[[<ast.Constant object at 0x7da1b21e03d0>, <ast.UnaryOp object at 0x7da1b21e1600>]]]]
variable[mask] assign[=] binary_operation[call[name[F].broadcast_equal, parameter[name[label_vec], name[sample_vec]]] * <ast.UnaryOp object at 0x7da1b21e1a20>]
variable[pred_sampled] assign[=] binary_operation[name[pred_sampled] + name[mask]]
variable[expected_count_sampled] assign[=] call[name[F].reshape, parameter[name[expected_count_sampled]]]
variable[expected_count_true] assign[=] call[name[expected_count_true].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b21e1420>]]]]
variable[pred_true] assign[=] binary_operation[name[pred_true] - call[name[F].log, parameter[name[expected_count_true]]]]
variable[pred_true] assign[=] call[name[pred_true].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b21e1510>, <ast.Constant object at 0x7da1b21e2080>]]]]
variable[pred_sampled] assign[=] call[name[F].broadcast_sub, parameter[name[pred_sampled], call[name[F].log, parameter[name[expected_count_sampled]]]]]
variable[pred] assign[=] call[name[F].concat, parameter[name[pred_true], name[pred_sampled]]]
if name[self]._sparse_label begin[:]
variable[new_label] assign[=] call[name[F].zeros_like, parameter[name[label]]]
return[tuple[[<ast.Name object at 0x7da18f723cd0>, <ast.Name object at 0x7da18f720610>]]] | keyword[def] identifier[hybrid_forward] ( identifier[self] , identifier[F] , identifier[x] , identifier[sampled_values] , identifier[label] , identifier[w_all] , identifier[b_all] ):
literal[string]
identifier[sampled_candidates] , identifier[expected_count_sampled] , identifier[expected_count_true] = identifier[sampled_values]
identifier[w_sampled] = identifier[w_all] . identifier[slice] ( identifier[begin] =( literal[int] , literal[int] ), identifier[end] =( identifier[self] . identifier[_num_sampled] , keyword[None] ))
identifier[w_true] = identifier[w_all] . identifier[slice] ( identifier[begin] =( identifier[self] . identifier[_num_sampled] , literal[int] ), identifier[end] =( keyword[None] , keyword[None] ))
identifier[b_sampled] = identifier[b_all] . identifier[slice] ( identifier[begin] =( literal[int] ,), identifier[end] =( identifier[self] . identifier[_num_sampled] ,))
identifier[b_true] = identifier[b_all] . identifier[slice] ( identifier[begin] =( identifier[self] . identifier[_num_sampled] ,), identifier[end] =( keyword[None] ,))
identifier[x] = identifier[x] . identifier[reshape] ((- literal[int] , identifier[self] . identifier[_in_unit] ))
identifier[pred_true] =( identifier[w_true] * identifier[x] ). identifier[sum] ( identifier[axis] = literal[int] )+ identifier[b_true]
identifier[b_sampled] = identifier[F] . identifier[reshape] ( identifier[b_sampled] ,(- literal[int] ,))
identifier[pred_sampled] = identifier[F] . identifier[FullyConnected] ( identifier[x] , identifier[weight] = identifier[w_sampled] , identifier[bias] = identifier[b_sampled] ,
identifier[num_hidden] = identifier[self] . identifier[_num_sampled] )
keyword[if] identifier[self] . identifier[_remove_accidental_hits] :
identifier[label_vec] = identifier[F] . identifier[reshape] ( identifier[label] ,(- literal[int] , literal[int] ))
identifier[sample_vec] = identifier[F] . identifier[reshape] ( identifier[sampled_candidates] ,( literal[int] ,- literal[int] ))
identifier[mask] = identifier[F] . identifier[broadcast_equal] ( identifier[label_vec] , identifier[sample_vec] )*- literal[int]
identifier[pred_sampled] = identifier[pred_sampled] + identifier[mask]
identifier[expected_count_sampled] = identifier[F] . identifier[reshape] ( identifier[expected_count_sampled] ,
identifier[shape] =( literal[int] , identifier[self] . identifier[_num_sampled] ))
identifier[expected_count_true] = identifier[expected_count_true] . identifier[reshape] ((- literal[int] ,))
identifier[pred_true] = identifier[pred_true] - identifier[F] . identifier[log] ( identifier[expected_count_true] )
identifier[pred_true] = identifier[pred_true] . identifier[reshape] ((- literal[int] , literal[int] ))
identifier[pred_sampled] = identifier[F] . identifier[broadcast_sub] ( identifier[pred_sampled] , identifier[F] . identifier[log] ( identifier[expected_count_sampled] ))
identifier[pred] = identifier[F] . identifier[concat] ( identifier[pred_true] , identifier[pred_sampled] , identifier[dim] = literal[int] )
keyword[if] identifier[self] . identifier[_sparse_label] :
identifier[new_label] = identifier[F] . identifier[zeros_like] ( identifier[label] )
keyword[else] :
identifier[label_vec] = identifier[F] . identifier[reshape] ( identifier[label] ,(- literal[int] , literal[int] ))
identifier[new_label_true] = identifier[F] . identifier[ones_like] ( identifier[label_vec] )
identifier[new_label_sampled] = identifier[F] . identifier[zeros_like] ( identifier[pred_sampled] )
identifier[new_label] = identifier[F] . identifier[Concat] ( identifier[new_label_true] , identifier[new_label_sampled] , identifier[dim] = literal[int] )
keyword[return] identifier[pred] , identifier[new_label] | def hybrid_forward(self, F, x, sampled_values, label, w_all, b_all):
"""Forward computation."""
(sampled_candidates, expected_count_sampled, expected_count_true) = sampled_values
# (num_sampled, in_unit)
w_sampled = w_all.slice(begin=(0, 0), end=(self._num_sampled, None))
w_true = w_all.slice(begin=(self._num_sampled, 0), end=(None, None))
b_sampled = b_all.slice(begin=(0,), end=(self._num_sampled,))
b_true = b_all.slice(begin=(self._num_sampled,), end=(None,))
# true pred
# (batch_size, 1)
x = x.reshape((-1, self._in_unit))
pred_true = (w_true * x).sum(axis=1) + b_true
# samples pred
# (batch_size, num_sampled)
b_sampled = F.reshape(b_sampled, (-1,))
pred_sampled = F.FullyConnected(x, weight=w_sampled, bias=b_sampled, num_hidden=self._num_sampled)
# remove accidental hits
if self._remove_accidental_hits:
label_vec = F.reshape(label, (-1, 1))
sample_vec = F.reshape(sampled_candidates, (1, -1))
mask = F.broadcast_equal(label_vec, sample_vec) * -1e+37
pred_sampled = pred_sampled + mask # depends on [control=['if'], data=[]]
# subtract log(q)
expected_count_sampled = F.reshape(expected_count_sampled, shape=(1, self._num_sampled))
expected_count_true = expected_count_true.reshape((-1,))
pred_true = pred_true - F.log(expected_count_true)
pred_true = pred_true.reshape((-1, 1))
pred_sampled = F.broadcast_sub(pred_sampled, F.log(expected_count_sampled))
# pred and new_labels
# (batch_size, 1+num_sampled)
pred = F.concat(pred_true, pred_sampled, dim=1)
if self._sparse_label:
new_label = F.zeros_like(label) # depends on [control=['if'], data=[]]
else:
label_vec = F.reshape(label, (-1, 1))
new_label_true = F.ones_like(label_vec)
new_label_sampled = F.zeros_like(pred_sampled)
new_label = F.Concat(new_label_true, new_label_sampled, dim=1)
return (pred, new_label) |
def get_response(self):
'''
Returns response according submitted the data and method.
'''
self.process_commmon()
self.process_data()
urlencoded_data = urllib.urlencode(self.data)
if self.METHOD == POST:
req = urllib2.Request(self.URL, urlencoded_data)
else:
req = urllib2.Request('%s?%s' %(self.URL, urlencoded_data))
if not self.data['content']:
raise PasteException("No content to paste")
self.response = urllib2.urlopen(req)
return self.response | def function[get_response, parameter[self]]:
constant[
Returns response according submitted the data and method.
]
call[name[self].process_commmon, parameter[]]
call[name[self].process_data, parameter[]]
variable[urlencoded_data] assign[=] call[name[urllib].urlencode, parameter[name[self].data]]
if compare[name[self].METHOD equal[==] name[POST]] begin[:]
variable[req] assign[=] call[name[urllib2].Request, parameter[name[self].URL, name[urlencoded_data]]]
if <ast.UnaryOp object at 0x7da18dc98f70> begin[:]
<ast.Raise object at 0x7da18dc9bbb0>
name[self].response assign[=] call[name[urllib2].urlopen, parameter[name[req]]]
return[name[self].response] | keyword[def] identifier[get_response] ( identifier[self] ):
literal[string]
identifier[self] . identifier[process_commmon] ()
identifier[self] . identifier[process_data] ()
identifier[urlencoded_data] = identifier[urllib] . identifier[urlencode] ( identifier[self] . identifier[data] )
keyword[if] identifier[self] . identifier[METHOD] == identifier[POST] :
identifier[req] = identifier[urllib2] . identifier[Request] ( identifier[self] . identifier[URL] , identifier[urlencoded_data] )
keyword[else] :
identifier[req] = identifier[urllib2] . identifier[Request] ( literal[string] %( identifier[self] . identifier[URL] , identifier[urlencoded_data] ))
keyword[if] keyword[not] identifier[self] . identifier[data] [ literal[string] ]:
keyword[raise] identifier[PasteException] ( literal[string] )
identifier[self] . identifier[response] = identifier[urllib2] . identifier[urlopen] ( identifier[req] )
keyword[return] identifier[self] . identifier[response] | def get_response(self):
"""
Returns response according submitted the data and method.
"""
self.process_commmon()
self.process_data()
urlencoded_data = urllib.urlencode(self.data)
if self.METHOD == POST:
req = urllib2.Request(self.URL, urlencoded_data) # depends on [control=['if'], data=[]]
else:
req = urllib2.Request('%s?%s' % (self.URL, urlencoded_data))
if not self.data['content']:
raise PasteException('No content to paste') # depends on [control=['if'], data=[]]
self.response = urllib2.urlopen(req)
return self.response |
def _collect_cpu_info(run_info):
"""Collect the CPU information for the local environment."""
cpu_info = {}
cpu_info["num_cores"] = multiprocessing.cpu_count()
# Note: cpuinfo is not installed in the TensorFlow OSS tree.
# It is installable via pip.
import cpuinfo # pylint: disable=g-import-not-at-top
info = cpuinfo.get_cpu_info()
cpu_info["cpu_info"] = info["brand"]
cpu_info["mhz_per_cpu"] = info["hz_advertised_raw"][0] / 1.0e6
run_info["machine_config"]["cpu_info"] = cpu_info | def function[_collect_cpu_info, parameter[run_info]]:
constant[Collect the CPU information for the local environment.]
variable[cpu_info] assign[=] dictionary[[], []]
call[name[cpu_info]][constant[num_cores]] assign[=] call[name[multiprocessing].cpu_count, parameter[]]
import module[cpuinfo]
variable[info] assign[=] call[name[cpuinfo].get_cpu_info, parameter[]]
call[name[cpu_info]][constant[cpu_info]] assign[=] call[name[info]][constant[brand]]
call[name[cpu_info]][constant[mhz_per_cpu]] assign[=] binary_operation[call[call[name[info]][constant[hz_advertised_raw]]][constant[0]] / constant[1000000.0]]
call[call[name[run_info]][constant[machine_config]]][constant[cpu_info]] assign[=] name[cpu_info] | keyword[def] identifier[_collect_cpu_info] ( identifier[run_info] ):
literal[string]
identifier[cpu_info] ={}
identifier[cpu_info] [ literal[string] ]= identifier[multiprocessing] . identifier[cpu_count] ()
keyword[import] identifier[cpuinfo]
identifier[info] = identifier[cpuinfo] . identifier[get_cpu_info] ()
identifier[cpu_info] [ literal[string] ]= identifier[info] [ literal[string] ]
identifier[cpu_info] [ literal[string] ]= identifier[info] [ literal[string] ][ literal[int] ]/ literal[int]
identifier[run_info] [ literal[string] ][ literal[string] ]= identifier[cpu_info] | def _collect_cpu_info(run_info):
"""Collect the CPU information for the local environment."""
cpu_info = {}
cpu_info['num_cores'] = multiprocessing.cpu_count()
# Note: cpuinfo is not installed in the TensorFlow OSS tree.
# It is installable via pip.
import cpuinfo # pylint: disable=g-import-not-at-top
info = cpuinfo.get_cpu_info()
cpu_info['cpu_info'] = info['brand']
cpu_info['mhz_per_cpu'] = info['hz_advertised_raw'][0] / 1000000.0
run_info['machine_config']['cpu_info'] = cpu_info |
def next_power_of_2(x):
"""Finds the next power of 2 value
Args:
x: Input value
Returns:
power_of_2: Next power of 2 value
"""
power_of_2 = 1 if x == 0 else 2 ** np.ceil(np.log2(x))
return power_of_2 | def function[next_power_of_2, parameter[x]]:
constant[Finds the next power of 2 value
Args:
x: Input value
Returns:
power_of_2: Next power of 2 value
]
variable[power_of_2] assign[=] <ast.IfExp object at 0x7da18ede6080>
return[name[power_of_2]] | keyword[def] identifier[next_power_of_2] ( identifier[x] ):
literal[string]
identifier[power_of_2] = literal[int] keyword[if] identifier[x] == literal[int] keyword[else] literal[int] ** identifier[np] . identifier[ceil] ( identifier[np] . identifier[log2] ( identifier[x] ))
keyword[return] identifier[power_of_2] | def next_power_of_2(x):
"""Finds the next power of 2 value
Args:
x: Input value
Returns:
power_of_2: Next power of 2 value
"""
power_of_2 = 1 if x == 0 else 2 ** np.ceil(np.log2(x))
return power_of_2 |
def get_block_details(block_representation, coin_symbol='btc', txn_limit=None,
txn_offset=None, in_out_limit=None, api_key=None):
"""
Takes a block_representation, coin_symbol and txn_limit and
1) Gets the block overview
2) Makes a separate API call to get specific data on txn_limit transactions
Note: block_representation may be the block number or block hash
WARNING: using a high txn_limit will make this *extremely* slow.
"""
assert is_valid_coin_symbol(coin_symbol)
block_overview = get_block_overview(
block_representation=block_representation,
coin_symbol=coin_symbol,
txn_limit=txn_limit,
txn_offset=txn_offset,
api_key=api_key,
)
if 'error' in block_overview:
return block_overview
txids_to_lookup = block_overview['txids']
txs_details = get_transactions_details(
tx_hash_list=txids_to_lookup,
coin_symbol=coin_symbol,
limit=in_out_limit,
api_key=api_key,
)
if 'error' in txs_details:
return txs_details
# build comparator dict to use for fast sorting of batched results later
txids_comparator_dict = {}
for cnt, tx_id in enumerate(txids_to_lookup):
txids_comparator_dict[tx_id] = cnt
# sort results using comparator dict
block_overview['txids'] = sorted(
txs_details,
key=lambda k: txids_comparator_dict.get(k.get('hash'), 9999), # anything that fails goes last
)
return block_overview | def function[get_block_details, parameter[block_representation, coin_symbol, txn_limit, txn_offset, in_out_limit, api_key]]:
constant[
Takes a block_representation, coin_symbol and txn_limit and
1) Gets the block overview
2) Makes a separate API call to get specific data on txn_limit transactions
Note: block_representation may be the block number or block hash
WARNING: using a high txn_limit will make this *extremely* slow.
]
assert[call[name[is_valid_coin_symbol], parameter[name[coin_symbol]]]]
variable[block_overview] assign[=] call[name[get_block_overview], parameter[]]
if compare[constant[error] in name[block_overview]] begin[:]
return[name[block_overview]]
variable[txids_to_lookup] assign[=] call[name[block_overview]][constant[txids]]
variable[txs_details] assign[=] call[name[get_transactions_details], parameter[]]
if compare[constant[error] in name[txs_details]] begin[:]
return[name[txs_details]]
variable[txids_comparator_dict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b07ce290>, <ast.Name object at 0x7da1b07cd090>]]] in starred[call[name[enumerate], parameter[name[txids_to_lookup]]]] begin[:]
call[name[txids_comparator_dict]][name[tx_id]] assign[=] name[cnt]
call[name[block_overview]][constant[txids]] assign[=] call[name[sorted], parameter[name[txs_details]]]
return[name[block_overview]] | keyword[def] identifier[get_block_details] ( identifier[block_representation] , identifier[coin_symbol] = literal[string] , identifier[txn_limit] = keyword[None] ,
identifier[txn_offset] = keyword[None] , identifier[in_out_limit] = keyword[None] , identifier[api_key] = keyword[None] ):
literal[string]
keyword[assert] identifier[is_valid_coin_symbol] ( identifier[coin_symbol] )
identifier[block_overview] = identifier[get_block_overview] (
identifier[block_representation] = identifier[block_representation] ,
identifier[coin_symbol] = identifier[coin_symbol] ,
identifier[txn_limit] = identifier[txn_limit] ,
identifier[txn_offset] = identifier[txn_offset] ,
identifier[api_key] = identifier[api_key] ,
)
keyword[if] literal[string] keyword[in] identifier[block_overview] :
keyword[return] identifier[block_overview]
identifier[txids_to_lookup] = identifier[block_overview] [ literal[string] ]
identifier[txs_details] = identifier[get_transactions_details] (
identifier[tx_hash_list] = identifier[txids_to_lookup] ,
identifier[coin_symbol] = identifier[coin_symbol] ,
identifier[limit] = identifier[in_out_limit] ,
identifier[api_key] = identifier[api_key] ,
)
keyword[if] literal[string] keyword[in] identifier[txs_details] :
keyword[return] identifier[txs_details]
identifier[txids_comparator_dict] ={}
keyword[for] identifier[cnt] , identifier[tx_id] keyword[in] identifier[enumerate] ( identifier[txids_to_lookup] ):
identifier[txids_comparator_dict] [ identifier[tx_id] ]= identifier[cnt]
identifier[block_overview] [ literal[string] ]= identifier[sorted] (
identifier[txs_details] ,
identifier[key] = keyword[lambda] identifier[k] : identifier[txids_comparator_dict] . identifier[get] ( identifier[k] . identifier[get] ( literal[string] ), literal[int] ),
)
keyword[return] identifier[block_overview] | def get_block_details(block_representation, coin_symbol='btc', txn_limit=None, txn_offset=None, in_out_limit=None, api_key=None):
"""
Takes a block_representation, coin_symbol and txn_limit and
1) Gets the block overview
2) Makes a separate API call to get specific data on txn_limit transactions
Note: block_representation may be the block number or block hash
WARNING: using a high txn_limit will make this *extremely* slow.
"""
assert is_valid_coin_symbol(coin_symbol)
block_overview = get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=txn_limit, txn_offset=txn_offset, api_key=api_key)
if 'error' in block_overview:
return block_overview # depends on [control=['if'], data=['block_overview']]
txids_to_lookup = block_overview['txids']
txs_details = get_transactions_details(tx_hash_list=txids_to_lookup, coin_symbol=coin_symbol, limit=in_out_limit, api_key=api_key)
if 'error' in txs_details:
return txs_details # depends on [control=['if'], data=['txs_details']]
# build comparator dict to use for fast sorting of batched results later
txids_comparator_dict = {}
for (cnt, tx_id) in enumerate(txids_to_lookup):
txids_comparator_dict[tx_id] = cnt # depends on [control=['for'], data=[]]
# sort results using comparator dict
# anything that fails goes last
block_overview['txids'] = sorted(txs_details, key=lambda k: txids_comparator_dict.get(k.get('hash'), 9999))
return block_overview |
def _load_actor_class_from_gcs(self, driver_id, function_descriptor):
"""Load actor class from GCS."""
key = (b"ActorClass:" + driver_id.binary() + b":" +
function_descriptor.function_id.binary())
# Wait for the actor class key to have been imported by the
# import thread. TODO(rkn): It shouldn't be possible to end
# up in an infinite loop here, but we should push an error to
# the driver if too much time is spent here.
while key not in self.imported_actor_classes:
time.sleep(0.001)
# Fetch raw data from GCS.
(driver_id_str, class_name, module, pickled_class,
actor_method_names) = self._worker.redis_client.hmget(
key, [
"driver_id", "class_name", "module", "class",
"actor_method_names"
])
class_name = ensure_str(class_name)
module_name = ensure_str(module)
driver_id = ray.DriverID(driver_id_str)
actor_method_names = json.loads(ensure_str(actor_method_names))
actor_class = None
try:
with self.lock:
actor_class = pickle.loads(pickled_class)
except Exception:
logger.exception(
"Failed to load actor class %s.".format(class_name))
# The actor class failed to be unpickled, create a fake actor
# class instead (just to produce error messages and to prevent
# the driver from hanging).
actor_class = self._create_fake_actor_class(
class_name, actor_method_names)
# If an exception was thrown when the actor was imported, we record
# the traceback and notify the scheduler of the failure.
traceback_str = ray.utils.format_error_message(
traceback.format_exc())
# Log the error message.
push_error_to_driver(
self._worker, ray_constants.REGISTER_ACTOR_PUSH_ERROR,
"Failed to unpickle actor class '{}' for actor ID {}. "
"Traceback:\n{}".format(class_name,
self._worker.actor_id.hex(),
traceback_str), driver_id)
# TODO(rkn): In the future, it might make sense to have the worker
# exit here. However, currently that would lead to hanging if
# someone calls ray.get on a method invoked on the actor.
# The below line is necessary. Because in the driver process,
# if the function is defined in the file where the python script
# was started from, its module is `__main__`.
# However in the worker process, the `__main__` module is a
# different module, which is `default_worker.py`
actor_class.__module__ = module_name
return actor_class | def function[_load_actor_class_from_gcs, parameter[self, driver_id, function_descriptor]]:
constant[Load actor class from GCS.]
variable[key] assign[=] binary_operation[binary_operation[binary_operation[constant[b'ActorClass:'] + call[name[driver_id].binary, parameter[]]] + constant[b':']] + call[name[function_descriptor].function_id.binary, parameter[]]]
while compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self].imported_actor_classes] begin[:]
call[name[time].sleep, parameter[constant[0.001]]]
<ast.Tuple object at 0x7da20e9b1600> assign[=] call[name[self]._worker.redis_client.hmget, parameter[name[key], list[[<ast.Constant object at 0x7da207f9b850>, <ast.Constant object at 0x7da207f9a110>, <ast.Constant object at 0x7da207f9b0a0>, <ast.Constant object at 0x7da207f99ed0>, <ast.Constant object at 0x7da207f9a5c0>]]]]
variable[class_name] assign[=] call[name[ensure_str], parameter[name[class_name]]]
variable[module_name] assign[=] call[name[ensure_str], parameter[name[module]]]
variable[driver_id] assign[=] call[name[ray].DriverID, parameter[name[driver_id_str]]]
variable[actor_method_names] assign[=] call[name[json].loads, parameter[call[name[ensure_str], parameter[name[actor_method_names]]]]]
variable[actor_class] assign[=] constant[None]
<ast.Try object at 0x7da207f99a80>
name[actor_class].__module__ assign[=] name[module_name]
return[name[actor_class]] | keyword[def] identifier[_load_actor_class_from_gcs] ( identifier[self] , identifier[driver_id] , identifier[function_descriptor] ):
literal[string]
identifier[key] =( literal[string] + identifier[driver_id] . identifier[binary] ()+ literal[string] +
identifier[function_descriptor] . identifier[function_id] . identifier[binary] ())
keyword[while] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[imported_actor_classes] :
identifier[time] . identifier[sleep] ( literal[int] )
( identifier[driver_id_str] , identifier[class_name] , identifier[module] , identifier[pickled_class] ,
identifier[actor_method_names] )= identifier[self] . identifier[_worker] . identifier[redis_client] . identifier[hmget] (
identifier[key] ,[
literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string]
])
identifier[class_name] = identifier[ensure_str] ( identifier[class_name] )
identifier[module_name] = identifier[ensure_str] ( identifier[module] )
identifier[driver_id] = identifier[ray] . identifier[DriverID] ( identifier[driver_id_str] )
identifier[actor_method_names] = identifier[json] . identifier[loads] ( identifier[ensure_str] ( identifier[actor_method_names] ))
identifier[actor_class] = keyword[None]
keyword[try] :
keyword[with] identifier[self] . identifier[lock] :
identifier[actor_class] = identifier[pickle] . identifier[loads] ( identifier[pickled_class] )
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] (
literal[string] . identifier[format] ( identifier[class_name] ))
identifier[actor_class] = identifier[self] . identifier[_create_fake_actor_class] (
identifier[class_name] , identifier[actor_method_names] )
identifier[traceback_str] = identifier[ray] . identifier[utils] . identifier[format_error_message] (
identifier[traceback] . identifier[format_exc] ())
identifier[push_error_to_driver] (
identifier[self] . identifier[_worker] , identifier[ray_constants] . identifier[REGISTER_ACTOR_PUSH_ERROR] ,
literal[string]
literal[string] . identifier[format] ( identifier[class_name] ,
identifier[self] . identifier[_worker] . identifier[actor_id] . identifier[hex] (),
identifier[traceback_str] ), identifier[driver_id] )
identifier[actor_class] . identifier[__module__] = identifier[module_name]
keyword[return] identifier[actor_class] | def _load_actor_class_from_gcs(self, driver_id, function_descriptor):
"""Load actor class from GCS."""
key = b'ActorClass:' + driver_id.binary() + b':' + function_descriptor.function_id.binary()
# Wait for the actor class key to have been imported by the
# import thread. TODO(rkn): It shouldn't be possible to end
# up in an infinite loop here, but we should push an error to
# the driver if too much time is spent here.
while key not in self.imported_actor_classes:
time.sleep(0.001) # depends on [control=['while'], data=[]]
# Fetch raw data from GCS.
(driver_id_str, class_name, module, pickled_class, actor_method_names) = self._worker.redis_client.hmget(key, ['driver_id', 'class_name', 'module', 'class', 'actor_method_names'])
class_name = ensure_str(class_name)
module_name = ensure_str(module)
driver_id = ray.DriverID(driver_id_str)
actor_method_names = json.loads(ensure_str(actor_method_names))
actor_class = None
try:
with self.lock:
actor_class = pickle.loads(pickled_class) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
logger.exception('Failed to load actor class %s.'.format(class_name))
# The actor class failed to be unpickled, create a fake actor
# class instead (just to produce error messages and to prevent
# the driver from hanging).
actor_class = self._create_fake_actor_class(class_name, actor_method_names)
# If an exception was thrown when the actor was imported, we record
# the traceback and notify the scheduler of the failure.
traceback_str = ray.utils.format_error_message(traceback.format_exc())
# Log the error message.
push_error_to_driver(self._worker, ray_constants.REGISTER_ACTOR_PUSH_ERROR, "Failed to unpickle actor class '{}' for actor ID {}. Traceback:\n{}".format(class_name, self._worker.actor_id.hex(), traceback_str), driver_id) # depends on [control=['except'], data=[]]
# TODO(rkn): In the future, it might make sense to have the worker
# exit here. However, currently that would lead to hanging if
# someone calls ray.get on a method invoked on the actor.
# The below line is necessary. Because in the driver process,
# if the function is defined in the file where the python script
# was started from, its module is `__main__`.
# However in the worker process, the `__main__` module is a
# different module, which is `default_worker.py`
actor_class.__module__ = module_name
return actor_class |
def update_client_grants(self, client_id, scope=[], authorities=[],
grant_types=[], redirect_uri=[], replace=False):
"""
Will extend the client with additional scopes or
authorities. Any existing scopes and authorities will be left
as is unless asked to replace entirely.
"""
self.assert_has_permission('clients.write')
client = self.get_client(client_id)
if not client:
raise ValueError("Must first create client: '%s'" % (client_id))
if replace:
changes = {
'client_id': client_id,
'scope': scope,
'authorities': authorities,
}
else:
changes = {'client_id': client_id}
if scope:
changes['scope'] = client['scope']
changes['scope'].extend(scope)
if authorities:
changes['authorities'] = client['authorities']
changes['authorities'].extend(authorities)
if grant_types:
if 'authorization_code' in grant_types and not redirect_uri:
logging.warning("A redirect_uri is required for authorization_code.")
changes['authorized_grant_types'] = client['authorized_grant_types']
changes['authorized_grant_types'].extend(grant_types)
if redirect_uri:
if 'redirect_uri' in client:
changes['redirect_uri'] = client['redirect_uri']
changes['redirect_uri'].extend(redirect_uri)
else:
changes['redirect_uri'] = redirect_uri
uri = self.uri + '/oauth/clients/' + client_id
headers = {
"pragma": "no-cache",
"Cache-Control": "no-cache",
"Content-Type": "application/json",
"Accepts": "application/json",
"Authorization": "Bearer " + self.get_token()
}
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + json.dumps(changes))
response = requests.put(uri, headers=headers, data=json.dumps(changes))
logging.debug("STATUS=" + str(response.status_code))
if response.status_code == 200:
return response
else:
logging.error(response.content)
response.raise_for_status() | def function[update_client_grants, parameter[self, client_id, scope, authorities, grant_types, redirect_uri, replace]]:
constant[
Will extend the client with additional scopes or
authorities. Any existing scopes and authorities will be left
as is unless asked to replace entirely.
]
call[name[self].assert_has_permission, parameter[constant[clients.write]]]
variable[client] assign[=] call[name[self].get_client, parameter[name[client_id]]]
if <ast.UnaryOp object at 0x7da1b26adc90> begin[:]
<ast.Raise object at 0x7da1b26af280>
if name[replace] begin[:]
variable[changes] assign[=] dictionary[[<ast.Constant object at 0x7da1b26aed40>, <ast.Constant object at 0x7da1b26aee60>, <ast.Constant object at 0x7da1b26af190>], [<ast.Name object at 0x7da1b26afd30>, <ast.Name object at 0x7da1b26ac580>, <ast.Name object at 0x7da1b26ad6f0>]]
variable[uri] assign[=] binary_operation[binary_operation[name[self].uri + constant[/oauth/clients/]] + name[client_id]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b26acc70>, <ast.Constant object at 0x7da1b26ae680>, <ast.Constant object at 0x7da1b26ac820>, <ast.Constant object at 0x7da1b26ae0b0>, <ast.Constant object at 0x7da1b26aed70>], [<ast.Constant object at 0x7da1b26acfa0>, <ast.Constant object at 0x7da1b26adc00>, <ast.Constant object at 0x7da1b26ae5f0>, <ast.Constant object at 0x7da1b26ae470>, <ast.BinOp object at 0x7da1b26ad300>]]
call[name[logging].debug, parameter[binary_operation[constant[URI=] + call[name[str], parameter[name[uri]]]]]]
call[name[logging].debug, parameter[binary_operation[constant[HEADERS=] + call[name[str], parameter[name[headers]]]]]]
call[name[logging].debug, parameter[binary_operation[constant[BODY=] + call[name[json].dumps, parameter[name[changes]]]]]]
variable[response] assign[=] call[name[requests].put, parameter[name[uri]]]
call[name[logging].debug, parameter[binary_operation[constant[STATUS=] + call[name[str], parameter[name[response].status_code]]]]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
return[name[response]] | keyword[def] identifier[update_client_grants] ( identifier[self] , identifier[client_id] , identifier[scope] =[], identifier[authorities] =[],
identifier[grant_types] =[], identifier[redirect_uri] =[], identifier[replace] = keyword[False] ):
literal[string]
identifier[self] . identifier[assert_has_permission] ( literal[string] )
identifier[client] = identifier[self] . identifier[get_client] ( identifier[client_id] )
keyword[if] keyword[not] identifier[client] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[client_id] ))
keyword[if] identifier[replace] :
identifier[changes] ={
literal[string] : identifier[client_id] ,
literal[string] : identifier[scope] ,
literal[string] : identifier[authorities] ,
}
keyword[else] :
identifier[changes] ={ literal[string] : identifier[client_id] }
keyword[if] identifier[scope] :
identifier[changes] [ literal[string] ]= identifier[client] [ literal[string] ]
identifier[changes] [ literal[string] ]. identifier[extend] ( identifier[scope] )
keyword[if] identifier[authorities] :
identifier[changes] [ literal[string] ]= identifier[client] [ literal[string] ]
identifier[changes] [ literal[string] ]. identifier[extend] ( identifier[authorities] )
keyword[if] identifier[grant_types] :
keyword[if] literal[string] keyword[in] identifier[grant_types] keyword[and] keyword[not] identifier[redirect_uri] :
identifier[logging] . identifier[warning] ( literal[string] )
identifier[changes] [ literal[string] ]= identifier[client] [ literal[string] ]
identifier[changes] [ literal[string] ]. identifier[extend] ( identifier[grant_types] )
keyword[if] identifier[redirect_uri] :
keyword[if] literal[string] keyword[in] identifier[client] :
identifier[changes] [ literal[string] ]= identifier[client] [ literal[string] ]
identifier[changes] [ literal[string] ]. identifier[extend] ( identifier[redirect_uri] )
keyword[else] :
identifier[changes] [ literal[string] ]= identifier[redirect_uri]
identifier[uri] = identifier[self] . identifier[uri] + literal[string] + identifier[client_id]
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] + identifier[self] . identifier[get_token] ()
}
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[uri] ))
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[headers] ))
identifier[logging] . identifier[debug] ( literal[string] + identifier[json] . identifier[dumps] ( identifier[changes] ))
identifier[response] = identifier[requests] . identifier[put] ( identifier[uri] , identifier[headers] = identifier[headers] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[changes] ))
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[response] . identifier[status_code] ))
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[response]
keyword[else] :
identifier[logging] . identifier[error] ( identifier[response] . identifier[content] )
identifier[response] . identifier[raise_for_status] () | def update_client_grants(self, client_id, scope=[], authorities=[], grant_types=[], redirect_uri=[], replace=False):
"""
Will extend the client with additional scopes or
authorities. Any existing scopes and authorities will be left
as is unless asked to replace entirely.
"""
self.assert_has_permission('clients.write')
client = self.get_client(client_id)
if not client:
raise ValueError("Must first create client: '%s'" % client_id) # depends on [control=['if'], data=[]]
if replace:
changes = {'client_id': client_id, 'scope': scope, 'authorities': authorities} # depends on [control=['if'], data=[]]
else:
changes = {'client_id': client_id}
if scope:
changes['scope'] = client['scope']
changes['scope'].extend(scope) # depends on [control=['if'], data=[]]
if authorities:
changes['authorities'] = client['authorities']
changes['authorities'].extend(authorities) # depends on [control=['if'], data=[]]
if grant_types:
if 'authorization_code' in grant_types and (not redirect_uri):
logging.warning('A redirect_uri is required for authorization_code.') # depends on [control=['if'], data=[]]
changes['authorized_grant_types'] = client['authorized_grant_types']
changes['authorized_grant_types'].extend(grant_types) # depends on [control=['if'], data=[]]
if redirect_uri:
if 'redirect_uri' in client:
changes['redirect_uri'] = client['redirect_uri']
changes['redirect_uri'].extend(redirect_uri) # depends on [control=['if'], data=['client']]
else:
changes['redirect_uri'] = redirect_uri # depends on [control=['if'], data=[]]
uri = self.uri + '/oauth/clients/' + client_id
headers = {'pragma': 'no-cache', 'Cache-Control': 'no-cache', 'Content-Type': 'application/json', 'Accepts': 'application/json', 'Authorization': 'Bearer ' + self.get_token()}
logging.debug('URI=' + str(uri))
logging.debug('HEADERS=' + str(headers))
logging.debug('BODY=' + json.dumps(changes))
response = requests.put(uri, headers=headers, data=json.dumps(changes))
logging.debug('STATUS=' + str(response.status_code))
if response.status_code == 200:
return response # depends on [control=['if'], data=[]]
else:
logging.error(response.content)
response.raise_for_status() |
def sort_by(function):
"""
Sorts an incoming sequence by using the given `function` as key.
>>> range(10) > sort_by(-X)
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
Supports automatic data-structure creation::
users > sort_by([X.last_name, X.first_name])
There is also a shortcut for ``sort_by(X)`` called ``sort``:
>>> [4, 5, 8, -3, 0] > sort
[-3, 0, 4, 5, 8]
And (as of ``0.2.3``) a shortcut for reversing the sort:
>>> 'asdfaSfa' > sort_by(X.lower()).descending
['s', 'S', 'f', 'f', 'd', 'a', 'a', 'a']
"""
f = partial(sorted, key=function)
f.attrs = {'descending': _descending_sort_by(function)}
return f | def function[sort_by, parameter[function]]:
constant[
Sorts an incoming sequence by using the given `function` as key.
>>> range(10) > sort_by(-X)
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
Supports automatic data-structure creation::
users > sort_by([X.last_name, X.first_name])
There is also a shortcut for ``sort_by(X)`` called ``sort``:
>>> [4, 5, 8, -3, 0] > sort
[-3, 0, 4, 5, 8]
And (as of ``0.2.3``) a shortcut for reversing the sort:
>>> 'asdfaSfa' > sort_by(X.lower()).descending
['s', 'S', 'f', 'f', 'd', 'a', 'a', 'a']
]
variable[f] assign[=] call[name[partial], parameter[name[sorted]]]
name[f].attrs assign[=] dictionary[[<ast.Constant object at 0x7da20e74bc70>], [<ast.Call object at 0x7da20e9623e0>]]
return[name[f]] | keyword[def] identifier[sort_by] ( identifier[function] ):
literal[string]
identifier[f] = identifier[partial] ( identifier[sorted] , identifier[key] = identifier[function] )
identifier[f] . identifier[attrs] ={ literal[string] : identifier[_descending_sort_by] ( identifier[function] )}
keyword[return] identifier[f] | def sort_by(function):
"""
Sorts an incoming sequence by using the given `function` as key.
>>> range(10) > sort_by(-X)
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
Supports automatic data-structure creation::
users > sort_by([X.last_name, X.first_name])
There is also a shortcut for ``sort_by(X)`` called ``sort``:
>>> [4, 5, 8, -3, 0] > sort
[-3, 0, 4, 5, 8]
And (as of ``0.2.3``) a shortcut for reversing the sort:
>>> 'asdfaSfa' > sort_by(X.lower()).descending
['s', 'S', 'f', 'f', 'd', 'a', 'a', 'a']
"""
f = partial(sorted, key=function)
f.attrs = {'descending': _descending_sort_by(function)}
return f |
def maybe_append_oov_vectors(embeddings, num_oov_buckets):
"""Adds zero vectors for oov buckets if num_oov_buckets > 0.
Since we are assigning zero vectors, adding more that one oov bucket is only
meaningful if we perform fine-tuning.
Args:
embeddings: Embeddings to extend.
num_oov_buckets: Number of OOV buckets in the extended embedding.
"""
num_embeddings = np.shape(embeddings)[0]
embedding_dim = np.shape(embeddings)[1]
embeddings.resize(
[num_embeddings + num_oov_buckets, embedding_dim], refcheck=False) | def function[maybe_append_oov_vectors, parameter[embeddings, num_oov_buckets]]:
constant[Adds zero vectors for oov buckets if num_oov_buckets > 0.
Since we are assigning zero vectors, adding more that one oov bucket is only
meaningful if we perform fine-tuning.
Args:
embeddings: Embeddings to extend.
num_oov_buckets: Number of OOV buckets in the extended embedding.
]
variable[num_embeddings] assign[=] call[call[name[np].shape, parameter[name[embeddings]]]][constant[0]]
variable[embedding_dim] assign[=] call[call[name[np].shape, parameter[name[embeddings]]]][constant[1]]
call[name[embeddings].resize, parameter[list[[<ast.BinOp object at 0x7da1b20eea40>, <ast.Name object at 0x7da1b20ec6d0>]]]] | keyword[def] identifier[maybe_append_oov_vectors] ( identifier[embeddings] , identifier[num_oov_buckets] ):
literal[string]
identifier[num_embeddings] = identifier[np] . identifier[shape] ( identifier[embeddings] )[ literal[int] ]
identifier[embedding_dim] = identifier[np] . identifier[shape] ( identifier[embeddings] )[ literal[int] ]
identifier[embeddings] . identifier[resize] (
[ identifier[num_embeddings] + identifier[num_oov_buckets] , identifier[embedding_dim] ], identifier[refcheck] = keyword[False] ) | def maybe_append_oov_vectors(embeddings, num_oov_buckets):
"""Adds zero vectors for oov buckets if num_oov_buckets > 0.
Since we are assigning zero vectors, adding more that one oov bucket is only
meaningful if we perform fine-tuning.
Args:
embeddings: Embeddings to extend.
num_oov_buckets: Number of OOV buckets in the extended embedding.
"""
num_embeddings = np.shape(embeddings)[0]
embedding_dim = np.shape(embeddings)[1]
embeddings.resize([num_embeddings + num_oov_buckets, embedding_dim], refcheck=False) |
def inline(self) -> str:
"""
Return endpoint string
:return:
"""
inlined = [str(info) for info in (self.server, self.ipv4, self.ipv6, self.port, self.path) if info]
return SecuredBMAEndpoint.API + " " + " ".join(inlined) | def function[inline, parameter[self]]:
constant[
Return endpoint string
:return:
]
variable[inlined] assign[=] <ast.ListComp object at 0x7da18ede4bb0>
return[binary_operation[binary_operation[name[SecuredBMAEndpoint].API + constant[ ]] + call[constant[ ].join, parameter[name[inlined]]]]] | keyword[def] identifier[inline] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[inlined] =[ identifier[str] ( identifier[info] ) keyword[for] identifier[info] keyword[in] ( identifier[self] . identifier[server] , identifier[self] . identifier[ipv4] , identifier[self] . identifier[ipv6] , identifier[self] . identifier[port] , identifier[self] . identifier[path] ) keyword[if] identifier[info] ]
keyword[return] identifier[SecuredBMAEndpoint] . identifier[API] + literal[string] + literal[string] . identifier[join] ( identifier[inlined] ) | def inline(self) -> str:
"""
Return endpoint string
:return:
"""
inlined = [str(info) for info in (self.server, self.ipv4, self.ipv6, self.port, self.path) if info]
return SecuredBMAEndpoint.API + ' ' + ' '.join(inlined) |
def name(value):
"""Get the string title for a particular type.
Given a value, get an appropriate string title for the type that can
be used to re-cast the value later.
"""
if value is None:
return 'any'
for (test, name) in TESTS:
if isinstance(value, test):
return name
return 'string' | def function[name, parameter[value]]:
constant[Get the string title for a particular type.
Given a value, get an appropriate string title for the type that can
be used to re-cast the value later.
]
if compare[name[value] is constant[None]] begin[:]
return[constant[any]]
for taget[tuple[[<ast.Name object at 0x7da20e954910>, <ast.Name object at 0x7da20e954400>]]] in starred[name[TESTS]] begin[:]
if call[name[isinstance], parameter[name[value], name[test]]] begin[:]
return[name[name]]
return[constant[string]] | keyword[def] identifier[name] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] literal[string]
keyword[for] ( identifier[test] , identifier[name] ) keyword[in] identifier[TESTS] :
keyword[if] identifier[isinstance] ( identifier[value] , identifier[test] ):
keyword[return] identifier[name]
keyword[return] literal[string] | def name(value):
"""Get the string title for a particular type.
Given a value, get an appropriate string title for the type that can
be used to re-cast the value later.
"""
if value is None:
return 'any' # depends on [control=['if'], data=[]]
for (test, name) in TESTS:
if isinstance(value, test):
return name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return 'string' |
def gameValue(self):
"""identify the correpsonding internal SC2 game value for self.type's value"""
allowed = type(self).ALLOWED_TYPES
try:
if isinstance(allowed, dict): # if ALLOWED_TYPES is not a dict, there is no-internal game value mapping defined
return allowed.get(self.type.name)
except: pass # None .type values are okay -- such result in a None gameValue() result
return None | def function[gameValue, parameter[self]]:
constant[identify the correpsonding internal SC2 game value for self.type's value]
variable[allowed] assign[=] call[name[type], parameter[name[self]]].ALLOWED_TYPES
<ast.Try object at 0x7da20c7c80a0>
return[constant[None]] | keyword[def] identifier[gameValue] ( identifier[self] ):
literal[string]
identifier[allowed] = identifier[type] ( identifier[self] ). identifier[ALLOWED_TYPES]
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[allowed] , identifier[dict] ):
keyword[return] identifier[allowed] . identifier[get] ( identifier[self] . identifier[type] . identifier[name] )
keyword[except] : keyword[pass]
keyword[return] keyword[None] | def gameValue(self):
"""identify the correpsonding internal SC2 game value for self.type's value"""
allowed = type(self).ALLOWED_TYPES
try:
if isinstance(allowed, dict): # if ALLOWED_TYPES is not a dict, there is no-internal game value mapping defined
return allowed.get(self.type.name) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
pass # None .type values are okay -- such result in a None gameValue() result # depends on [control=['except'], data=[]]
return None |
def resolution_order(lang, override=None):
"""
Return order of languages which should be checked for parameter language.
First is always the parameter language, later are fallback languages.
Override parameter has priority over FALLBACK_LANGUAGES.
"""
if not settings.ENABLE_FALLBACKS:
return (lang,)
if override is None:
override = {}
fallback_for_lang = override.get(lang, settings.FALLBACK_LANGUAGES.get(lang, ()))
fallback_def = override.get('default', settings.FALLBACK_LANGUAGES['default'])
order = (lang,) + fallback_for_lang + fallback_def
return tuple(unique(order)) | def function[resolution_order, parameter[lang, override]]:
constant[
Return order of languages which should be checked for parameter language.
First is always the parameter language, later are fallback languages.
Override parameter has priority over FALLBACK_LANGUAGES.
]
if <ast.UnaryOp object at 0x7da18f58e9e0> begin[:]
return[tuple[[<ast.Name object at 0x7da18f58ea70>]]]
if compare[name[override] is constant[None]] begin[:]
variable[override] assign[=] dictionary[[], []]
variable[fallback_for_lang] assign[=] call[name[override].get, parameter[name[lang], call[name[settings].FALLBACK_LANGUAGES.get, parameter[name[lang], tuple[[]]]]]]
variable[fallback_def] assign[=] call[name[override].get, parameter[constant[default], call[name[settings].FALLBACK_LANGUAGES][constant[default]]]]
variable[order] assign[=] binary_operation[binary_operation[tuple[[<ast.Name object at 0x7da18f58cb50>]] + name[fallback_for_lang]] + name[fallback_def]]
return[call[name[tuple], parameter[call[name[unique], parameter[name[order]]]]]] | keyword[def] identifier[resolution_order] ( identifier[lang] , identifier[override] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[settings] . identifier[ENABLE_FALLBACKS] :
keyword[return] ( identifier[lang] ,)
keyword[if] identifier[override] keyword[is] keyword[None] :
identifier[override] ={}
identifier[fallback_for_lang] = identifier[override] . identifier[get] ( identifier[lang] , identifier[settings] . identifier[FALLBACK_LANGUAGES] . identifier[get] ( identifier[lang] ,()))
identifier[fallback_def] = identifier[override] . identifier[get] ( literal[string] , identifier[settings] . identifier[FALLBACK_LANGUAGES] [ literal[string] ])
identifier[order] =( identifier[lang] ,)+ identifier[fallback_for_lang] + identifier[fallback_def]
keyword[return] identifier[tuple] ( identifier[unique] ( identifier[order] )) | def resolution_order(lang, override=None):
"""
Return order of languages which should be checked for parameter language.
First is always the parameter language, later are fallback languages.
Override parameter has priority over FALLBACK_LANGUAGES.
"""
if not settings.ENABLE_FALLBACKS:
return (lang,) # depends on [control=['if'], data=[]]
if override is None:
override = {} # depends on [control=['if'], data=['override']]
fallback_for_lang = override.get(lang, settings.FALLBACK_LANGUAGES.get(lang, ()))
fallback_def = override.get('default', settings.FALLBACK_LANGUAGES['default'])
order = (lang,) + fallback_for_lang + fallback_def
return tuple(unique(order)) |
def child_added(self, child):
""" Reset the item cache when a child is added """
super(AbstractItemView, self).child_added(child)
self.get_member('_items').reset(self) | def function[child_added, parameter[self, child]]:
constant[ Reset the item cache when a child is added ]
call[call[name[super], parameter[name[AbstractItemView], name[self]]].child_added, parameter[name[child]]]
call[call[name[self].get_member, parameter[constant[_items]]].reset, parameter[name[self]]] | keyword[def] identifier[child_added] ( identifier[self] , identifier[child] ):
literal[string]
identifier[super] ( identifier[AbstractItemView] , identifier[self] ). identifier[child_added] ( identifier[child] )
identifier[self] . identifier[get_member] ( literal[string] ). identifier[reset] ( identifier[self] ) | def child_added(self, child):
""" Reset the item cache when a child is added """
super(AbstractItemView, self).child_added(child)
self.get_member('_items').reset(self) |
def _observation(self, observation):
""" Paper: First, to encode a single frame we take the maximum value for each pixel colour
value over the frame being encoded and the previous frame. This was necessary to
remove flickering that is present in games where some objects appear only in even
frames while other objects appear only in odd frames, an artefact caused by the
limited number of sprites Atari 2600 can display at once. """
obs = np.maximum(observation, self.previous_frame)
self.previous_frame = observation
""" Paper: Second, we then extract
the Y channel, also known as luminance, from the RGB frame and rescale it to
84 x 84 """
img = Image.fromarray(obs)
obs = img.resize([84, 84]).convert('L')
obs = np.asarray(obs, dtype=np.uint8)
return obs | def function[_observation, parameter[self, observation]]:
constant[ Paper: First, to encode a single frame we take the maximum value for each pixel colour
value over the frame being encoded and the previous frame. This was necessary to
remove flickering that is present in games where some objects appear only in even
frames while other objects appear only in odd frames, an artefact caused by the
limited number of sprites Atari 2600 can display at once. ]
variable[obs] assign[=] call[name[np].maximum, parameter[name[observation], name[self].previous_frame]]
name[self].previous_frame assign[=] name[observation]
constant[ Paper: Second, we then extract
the Y channel, also known as luminance, from the RGB frame and rescale it to
84 x 84 ]
variable[img] assign[=] call[name[Image].fromarray, parameter[name[obs]]]
variable[obs] assign[=] call[call[name[img].resize, parameter[list[[<ast.Constant object at 0x7da1b246a260>, <ast.Constant object at 0x7da1b2468d30>]]]].convert, parameter[constant[L]]]
variable[obs] assign[=] call[name[np].asarray, parameter[name[obs]]]
return[name[obs]] | keyword[def] identifier[_observation] ( identifier[self] , identifier[observation] ):
literal[string]
identifier[obs] = identifier[np] . identifier[maximum] ( identifier[observation] , identifier[self] . identifier[previous_frame] )
identifier[self] . identifier[previous_frame] = identifier[observation]
literal[string]
identifier[img] = identifier[Image] . identifier[fromarray] ( identifier[obs] )
identifier[obs] = identifier[img] . identifier[resize] ([ literal[int] , literal[int] ]). identifier[convert] ( literal[string] )
identifier[obs] = identifier[np] . identifier[asarray] ( identifier[obs] , identifier[dtype] = identifier[np] . identifier[uint8] )
keyword[return] identifier[obs] | def _observation(self, observation):
""" Paper: First, to encode a single frame we take the maximum value for each pixel colour
value over the frame being encoded and the previous frame. This was necessary to
remove flickering that is present in games where some objects appear only in even
frames while other objects appear only in odd frames, an artefact caused by the
limited number of sprites Atari 2600 can display at once. """
obs = np.maximum(observation, self.previous_frame)
self.previous_frame = observation
' Paper: Second, we then extract\n the Y channel, also known as luminance, from the RGB frame and rescale it to\n 84 x 84 '
img = Image.fromarray(obs)
obs = img.resize([84, 84]).convert('L')
obs = np.asarray(obs, dtype=np.uint8)
return obs |
def _setup_validation(self, sender, **kwargs):
"""
User a customer setter for the field to validate new value against the old one.
The current value is set as '_enum_[att_name]' on the model instance.
"""
att_name = self.get_attname()
private_att_name = '_enum_%s' % att_name
enum = self.enum
def set_enum(self, new_value):
if hasattr(self, private_att_name):
# Fetch previous value from private enum attribute.
old_value = getattr(self, private_att_name)
else:
# First setattr no previous value on instance.
old_value = new_value
# Update private enum attribute with new value
setattr(self, private_att_name, new_value)
self.__dict__[att_name] = new_value
# Run validation for new value.
validators.validate_valid_transition(enum, old_value, new_value)
def get_enum(self):
return getattr(self, private_att_name)
def delete_enum(self):
self.__dict__[att_name] = None
return setattr(self, private_att_name, None)
if not sender._meta.abstract:
setattr(sender, att_name, property(get_enum, set_enum, delete_enum)) | def function[_setup_validation, parameter[self, sender]]:
constant[
User a customer setter for the field to validate new value against the old one.
The current value is set as '_enum_[att_name]' on the model instance.
]
variable[att_name] assign[=] call[name[self].get_attname, parameter[]]
variable[private_att_name] assign[=] binary_operation[constant[_enum_%s] <ast.Mod object at 0x7da2590d6920> name[att_name]]
variable[enum] assign[=] name[self].enum
def function[set_enum, parameter[self, new_value]]:
if call[name[hasattr], parameter[name[self], name[private_att_name]]] begin[:]
variable[old_value] assign[=] call[name[getattr], parameter[name[self], name[private_att_name]]]
call[name[setattr], parameter[name[self], name[private_att_name], name[new_value]]]
call[name[self].__dict__][name[att_name]] assign[=] name[new_value]
call[name[validators].validate_valid_transition, parameter[name[enum], name[old_value], name[new_value]]]
def function[get_enum, parameter[self]]:
return[call[name[getattr], parameter[name[self], name[private_att_name]]]]
def function[delete_enum, parameter[self]]:
call[name[self].__dict__][name[att_name]] assign[=] constant[None]
return[call[name[setattr], parameter[name[self], name[private_att_name], constant[None]]]]
if <ast.UnaryOp object at 0x7da1b115f400> begin[:]
call[name[setattr], parameter[name[sender], name[att_name], call[name[property], parameter[name[get_enum], name[set_enum], name[delete_enum]]]]] | keyword[def] identifier[_setup_validation] ( identifier[self] , identifier[sender] ,** identifier[kwargs] ):
literal[string]
identifier[att_name] = identifier[self] . identifier[get_attname] ()
identifier[private_att_name] = literal[string] % identifier[att_name]
identifier[enum] = identifier[self] . identifier[enum]
keyword[def] identifier[set_enum] ( identifier[self] , identifier[new_value] ):
keyword[if] identifier[hasattr] ( identifier[self] , identifier[private_att_name] ):
identifier[old_value] = identifier[getattr] ( identifier[self] , identifier[private_att_name] )
keyword[else] :
identifier[old_value] = identifier[new_value]
identifier[setattr] ( identifier[self] , identifier[private_att_name] , identifier[new_value] )
identifier[self] . identifier[__dict__] [ identifier[att_name] ]= identifier[new_value]
identifier[validators] . identifier[validate_valid_transition] ( identifier[enum] , identifier[old_value] , identifier[new_value] )
keyword[def] identifier[get_enum] ( identifier[self] ):
keyword[return] identifier[getattr] ( identifier[self] , identifier[private_att_name] )
keyword[def] identifier[delete_enum] ( identifier[self] ):
identifier[self] . identifier[__dict__] [ identifier[att_name] ]= keyword[None]
keyword[return] identifier[setattr] ( identifier[self] , identifier[private_att_name] , keyword[None] )
keyword[if] keyword[not] identifier[sender] . identifier[_meta] . identifier[abstract] :
identifier[setattr] ( identifier[sender] , identifier[att_name] , identifier[property] ( identifier[get_enum] , identifier[set_enum] , identifier[delete_enum] )) | def _setup_validation(self, sender, **kwargs):
"""
User a customer setter for the field to validate new value against the old one.
The current value is set as '_enum_[att_name]' on the model instance.
"""
att_name = self.get_attname()
private_att_name = '_enum_%s' % att_name
enum = self.enum
def set_enum(self, new_value):
if hasattr(self, private_att_name):
# Fetch previous value from private enum attribute.
old_value = getattr(self, private_att_name) # depends on [control=['if'], data=[]]
else:
# First setattr no previous value on instance.
old_value = new_value
# Update private enum attribute with new value
setattr(self, private_att_name, new_value)
self.__dict__[att_name] = new_value
# Run validation for new value.
validators.validate_valid_transition(enum, old_value, new_value)
def get_enum(self):
return getattr(self, private_att_name)
def delete_enum(self):
self.__dict__[att_name] = None
return setattr(self, private_att_name, None)
if not sender._meta.abstract:
setattr(sender, att_name, property(get_enum, set_enum, delete_enum)) # depends on [control=['if'], data=[]] |
def _displayFeatures(self, fig, features, minX, maxX, offsetAdjuster):
"""
Add the given C{features} to the figure in C{fig}.
@param fig: A matplotlib figure.
@param features: A C{FeatureList} instance.
@param minX: The smallest x coordinate.
@param maxX: The largest x coordinate.
@param offsetAdjuster: a function for adjusting feature X axis offsets
for plotting.
"""
frame = None
labels = []
for feature in features:
start = offsetAdjuster(feature.start)
end = offsetAdjuster(feature.end)
if feature.subfeature:
subfeatureFrame = start % 3
if subfeatureFrame == frame:
# Move overlapping subfeatures down a little to make them
# visible.
y = subfeatureFrame - 0.2
else:
y = subfeatureFrame
else:
frame = start % 3
# If we have a polyprotein, shift it up slightly so we can see
# its components below it.
product = feature.feature.qualifiers.get('product', [''])[0]
if product.lower().find('polyprotein') > -1:
y = frame + 0.2
else:
y = frame
fig.plot([start, end], [y, y], color=feature.color, linewidth=2)
labels.append(feature.legendLabel())
# Note that minX and maxX do not need to be adjusted by the offset
# adjuster. They are the already-adjusted min/max values as
# computed in computePlotInfo in blast.py
fig.axis([minX, maxX, -0.5, 2.5])
fig.set_yticks(np.arange(3))
fig.set_ylabel('Frame')
if labels:
# Put a legend above the figure.
box = fig.get_position()
fig.set_position([box.x0, box.y0,
box.width, box.height * 0.3])
fig.legend(labels, loc='lower center', bbox_to_anchor=(0.5, 2.5),
fancybox=True, shadow=True, ncol=2) | def function[_displayFeatures, parameter[self, fig, features, minX, maxX, offsetAdjuster]]:
constant[
Add the given C{features} to the figure in C{fig}.
@param fig: A matplotlib figure.
@param features: A C{FeatureList} instance.
@param minX: The smallest x coordinate.
@param maxX: The largest x coordinate.
@param offsetAdjuster: a function for adjusting feature X axis offsets
for plotting.
]
variable[frame] assign[=] constant[None]
variable[labels] assign[=] list[[]]
for taget[name[feature]] in starred[name[features]] begin[:]
variable[start] assign[=] call[name[offsetAdjuster], parameter[name[feature].start]]
variable[end] assign[=] call[name[offsetAdjuster], parameter[name[feature].end]]
if name[feature].subfeature begin[:]
variable[subfeatureFrame] assign[=] binary_operation[name[start] <ast.Mod object at 0x7da2590d6920> constant[3]]
if compare[name[subfeatureFrame] equal[==] name[frame]] begin[:]
variable[y] assign[=] binary_operation[name[subfeatureFrame] - constant[0.2]]
call[name[fig].plot, parameter[list[[<ast.Name object at 0x7da20e957fa0>, <ast.Name object at 0x7da20e9550c0>]], list[[<ast.Name object at 0x7da20e957490>, <ast.Name object at 0x7da20e954cd0>]]]]
call[name[labels].append, parameter[call[name[feature].legendLabel, parameter[]]]]
call[name[fig].axis, parameter[list[[<ast.Name object at 0x7da20e9563e0>, <ast.Name object at 0x7da20e954ac0>, <ast.UnaryOp object at 0x7da20e954550>, <ast.Constant object at 0x7da20e955660>]]]]
call[name[fig].set_yticks, parameter[call[name[np].arange, parameter[constant[3]]]]]
call[name[fig].set_ylabel, parameter[constant[Frame]]]
if name[labels] begin[:]
variable[box] assign[=] call[name[fig].get_position, parameter[]]
call[name[fig].set_position, parameter[list[[<ast.Attribute object at 0x7da20e955b10>, <ast.Attribute object at 0x7da20e9571f0>, <ast.Attribute object at 0x7da20e957460>, <ast.BinOp object at 0x7da20e954580>]]]]
call[name[fig].legend, parameter[name[labels]]] | keyword[def] identifier[_displayFeatures] ( identifier[self] , identifier[fig] , identifier[features] , identifier[minX] , identifier[maxX] , identifier[offsetAdjuster] ):
literal[string]
identifier[frame] = keyword[None]
identifier[labels] =[]
keyword[for] identifier[feature] keyword[in] identifier[features] :
identifier[start] = identifier[offsetAdjuster] ( identifier[feature] . identifier[start] )
identifier[end] = identifier[offsetAdjuster] ( identifier[feature] . identifier[end] )
keyword[if] identifier[feature] . identifier[subfeature] :
identifier[subfeatureFrame] = identifier[start] % literal[int]
keyword[if] identifier[subfeatureFrame] == identifier[frame] :
identifier[y] = identifier[subfeatureFrame] - literal[int]
keyword[else] :
identifier[y] = identifier[subfeatureFrame]
keyword[else] :
identifier[frame] = identifier[start] % literal[int]
identifier[product] = identifier[feature] . identifier[feature] . identifier[qualifiers] . identifier[get] ( literal[string] ,[ literal[string] ])[ literal[int] ]
keyword[if] identifier[product] . identifier[lower] (). identifier[find] ( literal[string] )>- literal[int] :
identifier[y] = identifier[frame] + literal[int]
keyword[else] :
identifier[y] = identifier[frame]
identifier[fig] . identifier[plot] ([ identifier[start] , identifier[end] ],[ identifier[y] , identifier[y] ], identifier[color] = identifier[feature] . identifier[color] , identifier[linewidth] = literal[int] )
identifier[labels] . identifier[append] ( identifier[feature] . identifier[legendLabel] ())
identifier[fig] . identifier[axis] ([ identifier[minX] , identifier[maxX] ,- literal[int] , literal[int] ])
identifier[fig] . identifier[set_yticks] ( identifier[np] . identifier[arange] ( literal[int] ))
identifier[fig] . identifier[set_ylabel] ( literal[string] )
keyword[if] identifier[labels] :
identifier[box] = identifier[fig] . identifier[get_position] ()
identifier[fig] . identifier[set_position] ([ identifier[box] . identifier[x0] , identifier[box] . identifier[y0] ,
identifier[box] . identifier[width] , identifier[box] . identifier[height] * literal[int] ])
identifier[fig] . identifier[legend] ( identifier[labels] , identifier[loc] = literal[string] , identifier[bbox_to_anchor] =( literal[int] , literal[int] ),
identifier[fancybox] = keyword[True] , identifier[shadow] = keyword[True] , identifier[ncol] = literal[int] ) | def _displayFeatures(self, fig, features, minX, maxX, offsetAdjuster):
"""
Add the given C{features} to the figure in C{fig}.
@param fig: A matplotlib figure.
@param features: A C{FeatureList} instance.
@param minX: The smallest x coordinate.
@param maxX: The largest x coordinate.
@param offsetAdjuster: a function for adjusting feature X axis offsets
for plotting.
"""
frame = None
labels = []
for feature in features:
start = offsetAdjuster(feature.start)
end = offsetAdjuster(feature.end)
if feature.subfeature:
subfeatureFrame = start % 3
if subfeatureFrame == frame:
# Move overlapping subfeatures down a little to make them
# visible.
y = subfeatureFrame - 0.2 # depends on [control=['if'], data=['subfeatureFrame']]
else:
y = subfeatureFrame # depends on [control=['if'], data=[]]
else:
frame = start % 3
# If we have a polyprotein, shift it up slightly so we can see
# its components below it.
product = feature.feature.qualifiers.get('product', [''])[0]
if product.lower().find('polyprotein') > -1:
y = frame + 0.2 # depends on [control=['if'], data=[]]
else:
y = frame
fig.plot([start, end], [y, y], color=feature.color, linewidth=2)
labels.append(feature.legendLabel()) # depends on [control=['for'], data=['feature']]
# Note that minX and maxX do not need to be adjusted by the offset
# adjuster. They are the already-adjusted min/max values as
# computed in computePlotInfo in blast.py
fig.axis([minX, maxX, -0.5, 2.5])
fig.set_yticks(np.arange(3))
fig.set_ylabel('Frame')
if labels:
# Put a legend above the figure.
box = fig.get_position()
fig.set_position([box.x0, box.y0, box.width, box.height * 0.3])
fig.legend(labels, loc='lower center', bbox_to_anchor=(0.5, 2.5), fancybox=True, shadow=True, ncol=2) # depends on [control=['if'], data=[]] |
def sethost(self, host):
"""Setter method that also sets the address property as a result
of the host that is set."""
self.__host = host
self.address = socket.gethostbyname(host) | def function[sethost, parameter[self, host]]:
constant[Setter method that also sets the address property as a result
of the host that is set.]
name[self].__host assign[=] name[host]
name[self].address assign[=] call[name[socket].gethostbyname, parameter[name[host]]] | keyword[def] identifier[sethost] ( identifier[self] , identifier[host] ):
literal[string]
identifier[self] . identifier[__host] = identifier[host]
identifier[self] . identifier[address] = identifier[socket] . identifier[gethostbyname] ( identifier[host] ) | def sethost(self, host):
"""Setter method that also sets the address property as a result
of the host that is set."""
self.__host = host
self.address = socket.gethostbyname(host) |
def set_residual(self, pores=[], overwrite=False):
r"""
Method to start invasion in a network w. residual saturation.
Called after inlets are set.
Parameters
----------
pores : array_like
The pores locations that are to be filled with invader at the
beginning of the simulation.
overwrite : boolean
If ``True`` then all existing inlet locations will be removed and
then the supplied locations will be added. If ``False``, then
supplied locations are added to any already existing locations.
Notes
-----
Currently works for pores only and treats inner throats, i.e.
those that connect two pores in the cluster as invaded and outer ones
as uninvaded. Uninvaded throats are added to a new residual cluster
queue but do not start invading independently if not connected to an
inlet.
Step 1. Identify clusters in the phase occupancy.
Step 2. Look for clusters that are connected or contain an inlet
Step 3. For those that are merge into inlet cluster. May be connected
to more than one - run should sort this out
Step 4. For those that are isolated set the queue to not invading.
Step 5. (in run) When isolated cluster is met my invading cluster it
merges in and starts invading
"""
Ps = self._parse_indices(pores)
if overwrite:
self['pore.residual'] = False
self['pore.residual'][Ps] = True
residual = self['pore.residual']
net = self.project.network
conns = net['throat.conns']
rclusters = site_percolation(conns, residual).sites
rcluster_ids = np.unique(rclusters[rclusters > -1])
initial_num = len(self.queue)-1
for rcluster_id in rcluster_ids:
rPs = rclusters == rcluster_id
existing = np.unique(self['pore.cluster'][rPs])
existing = existing[existing > -1]
if len(existing) > 0:
# There was at least one inlet cluster connected to this
# residual cluster, pick the first one.
cluster_num = existing[0]
else:
# Make a new cluster queue
cluster_num = len(self.queue)
self.queue.append([])
queue = self.queue[cluster_num]
# Set the residual pores and inner throats as part of cluster
self['pore.cluster'][rPs] = cluster_num
Ts = net.find_neighbor_throats(pores=rPs,
flatten=True,
mode='xnor')
self['throat.cluster'][Ts] = cluster_num
self['pore.invasion_sequence'][rPs] = 0
self['throat.invasion_sequence'][Ts] = 0
self['pore.invasion_pressure'][rPs] = -np.inf
self['throat.invasion_pressure'][Ts] = -np.inf
# Add all the outer throats to the queue
Ts = net.find_neighbor_throats(pores=rPs,
flatten=True,
mode='exclusive_or')
for T in Ts:
data = []
# Pc
data.append(self['throat.entry_pressure'][T])
# Element Index
data.append(T)
# Element Type (Pore of Throat)
data.append('throat')
hq.heappush(queue, data)
self.invasion_running = [True]*len(self.queue)
# we have added new clusters that are currently isolated and we
# need to stop them invading until they merge into an invading
# cluster
for c_num in range(len(self.queue)):
if c_num > initial_num:
self.invasion_running[c_num] = False | def function[set_residual, parameter[self, pores, overwrite]]:
constant[
Method to start invasion in a network w. residual saturation.
Called after inlets are set.
Parameters
----------
pores : array_like
The pores locations that are to be filled with invader at the
beginning of the simulation.
overwrite : boolean
If ``True`` then all existing inlet locations will be removed and
then the supplied locations will be added. If ``False``, then
supplied locations are added to any already existing locations.
Notes
-----
Currently works for pores only and treats inner throats, i.e.
those that connect two pores in the cluster as invaded and outer ones
as uninvaded. Uninvaded throats are added to a new residual cluster
queue but do not start invading independently if not connected to an
inlet.
Step 1. Identify clusters in the phase occupancy.
Step 2. Look for clusters that are connected or contain an inlet
Step 3. For those that are merge into inlet cluster. May be connected
to more than one - run should sort this out
Step 4. For those that are isolated set the queue to not invading.
Step 5. (in run) When isolated cluster is met my invading cluster it
merges in and starts invading
]
variable[Ps] assign[=] call[name[self]._parse_indices, parameter[name[pores]]]
if name[overwrite] begin[:]
call[name[self]][constant[pore.residual]] assign[=] constant[False]
call[call[name[self]][constant[pore.residual]]][name[Ps]] assign[=] constant[True]
variable[residual] assign[=] call[name[self]][constant[pore.residual]]
variable[net] assign[=] name[self].project.network
variable[conns] assign[=] call[name[net]][constant[throat.conns]]
variable[rclusters] assign[=] call[name[site_percolation], parameter[name[conns], name[residual]]].sites
variable[rcluster_ids] assign[=] call[name[np].unique, parameter[call[name[rclusters]][compare[name[rclusters] greater[>] <ast.UnaryOp object at 0x7da207f03160>]]]]
variable[initial_num] assign[=] binary_operation[call[name[len], parameter[name[self].queue]] - constant[1]]
for taget[name[rcluster_id]] in starred[name[rcluster_ids]] begin[:]
variable[rPs] assign[=] compare[name[rclusters] equal[==] name[rcluster_id]]
variable[existing] assign[=] call[name[np].unique, parameter[call[call[name[self]][constant[pore.cluster]]][name[rPs]]]]
variable[existing] assign[=] call[name[existing]][compare[name[existing] greater[>] <ast.UnaryOp object at 0x7da207f02a40>]]
if compare[call[name[len], parameter[name[existing]]] greater[>] constant[0]] begin[:]
variable[cluster_num] assign[=] call[name[existing]][constant[0]]
variable[queue] assign[=] call[name[self].queue][name[cluster_num]]
call[call[name[self]][constant[pore.cluster]]][name[rPs]] assign[=] name[cluster_num]
variable[Ts] assign[=] call[name[net].find_neighbor_throats, parameter[]]
call[call[name[self]][constant[throat.cluster]]][name[Ts]] assign[=] name[cluster_num]
call[call[name[self]][constant[pore.invasion_sequence]]][name[rPs]] assign[=] constant[0]
call[call[name[self]][constant[throat.invasion_sequence]]][name[Ts]] assign[=] constant[0]
call[call[name[self]][constant[pore.invasion_pressure]]][name[rPs]] assign[=] <ast.UnaryOp object at 0x7da204961930>
call[call[name[self]][constant[throat.invasion_pressure]]][name[Ts]] assign[=] <ast.UnaryOp object at 0x7da204963580>
variable[Ts] assign[=] call[name[net].find_neighbor_throats, parameter[]]
for taget[name[T]] in starred[name[Ts]] begin[:]
variable[data] assign[=] list[[]]
call[name[data].append, parameter[call[call[name[self]][constant[throat.entry_pressure]]][name[T]]]]
call[name[data].append, parameter[name[T]]]
call[name[data].append, parameter[constant[throat]]]
call[name[hq].heappush, parameter[name[queue], name[data]]]
name[self].invasion_running assign[=] binary_operation[list[[<ast.Constant object at 0x7da18eb567d0>]] * call[name[len], parameter[name[self].queue]]]
for taget[name[c_num]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].queue]]]]] begin[:]
if compare[name[c_num] greater[>] name[initial_num]] begin[:]
call[name[self].invasion_running][name[c_num]] assign[=] constant[False] | keyword[def] identifier[set_residual] ( identifier[self] , identifier[pores] =[], identifier[overwrite] = keyword[False] ):
literal[string]
identifier[Ps] = identifier[self] . identifier[_parse_indices] ( identifier[pores] )
keyword[if] identifier[overwrite] :
identifier[self] [ literal[string] ]= keyword[False]
identifier[self] [ literal[string] ][ identifier[Ps] ]= keyword[True]
identifier[residual] = identifier[self] [ literal[string] ]
identifier[net] = identifier[self] . identifier[project] . identifier[network]
identifier[conns] = identifier[net] [ literal[string] ]
identifier[rclusters] = identifier[site_percolation] ( identifier[conns] , identifier[residual] ). identifier[sites]
identifier[rcluster_ids] = identifier[np] . identifier[unique] ( identifier[rclusters] [ identifier[rclusters] >- literal[int] ])
identifier[initial_num] = identifier[len] ( identifier[self] . identifier[queue] )- literal[int]
keyword[for] identifier[rcluster_id] keyword[in] identifier[rcluster_ids] :
identifier[rPs] = identifier[rclusters] == identifier[rcluster_id]
identifier[existing] = identifier[np] . identifier[unique] ( identifier[self] [ literal[string] ][ identifier[rPs] ])
identifier[existing] = identifier[existing] [ identifier[existing] >- literal[int] ]
keyword[if] identifier[len] ( identifier[existing] )> literal[int] :
identifier[cluster_num] = identifier[existing] [ literal[int] ]
keyword[else] :
identifier[cluster_num] = identifier[len] ( identifier[self] . identifier[queue] )
identifier[self] . identifier[queue] . identifier[append] ([])
identifier[queue] = identifier[self] . identifier[queue] [ identifier[cluster_num] ]
identifier[self] [ literal[string] ][ identifier[rPs] ]= identifier[cluster_num]
identifier[Ts] = identifier[net] . identifier[find_neighbor_throats] ( identifier[pores] = identifier[rPs] ,
identifier[flatten] = keyword[True] ,
identifier[mode] = literal[string] )
identifier[self] [ literal[string] ][ identifier[Ts] ]= identifier[cluster_num]
identifier[self] [ literal[string] ][ identifier[rPs] ]= literal[int]
identifier[self] [ literal[string] ][ identifier[Ts] ]= literal[int]
identifier[self] [ literal[string] ][ identifier[rPs] ]=- identifier[np] . identifier[inf]
identifier[self] [ literal[string] ][ identifier[Ts] ]=- identifier[np] . identifier[inf]
identifier[Ts] = identifier[net] . identifier[find_neighbor_throats] ( identifier[pores] = identifier[rPs] ,
identifier[flatten] = keyword[True] ,
identifier[mode] = literal[string] )
keyword[for] identifier[T] keyword[in] identifier[Ts] :
identifier[data] =[]
identifier[data] . identifier[append] ( identifier[self] [ literal[string] ][ identifier[T] ])
identifier[data] . identifier[append] ( identifier[T] )
identifier[data] . identifier[append] ( literal[string] )
identifier[hq] . identifier[heappush] ( identifier[queue] , identifier[data] )
identifier[self] . identifier[invasion_running] =[ keyword[True] ]* identifier[len] ( identifier[self] . identifier[queue] )
keyword[for] identifier[c_num] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[queue] )):
keyword[if] identifier[c_num] > identifier[initial_num] :
identifier[self] . identifier[invasion_running] [ identifier[c_num] ]= keyword[False] | def set_residual(self, pores=[], overwrite=False):
"""
Method to start invasion in a network w. residual saturation.
Called after inlets are set.
Parameters
----------
pores : array_like
The pores locations that are to be filled with invader at the
beginning of the simulation.
overwrite : boolean
If ``True`` then all existing inlet locations will be removed and
then the supplied locations will be added. If ``False``, then
supplied locations are added to any already existing locations.
Notes
-----
Currently works for pores only and treats inner throats, i.e.
those that connect two pores in the cluster as invaded and outer ones
as uninvaded. Uninvaded throats are added to a new residual cluster
queue but do not start invading independently if not connected to an
inlet.
Step 1. Identify clusters in the phase occupancy.
Step 2. Look for clusters that are connected or contain an inlet
Step 3. For those that are merge into inlet cluster. May be connected
to more than one - run should sort this out
Step 4. For those that are isolated set the queue to not invading.
Step 5. (in run) When isolated cluster is met my invading cluster it
merges in and starts invading
"""
Ps = self._parse_indices(pores)
if overwrite:
self['pore.residual'] = False # depends on [control=['if'], data=[]]
self['pore.residual'][Ps] = True
residual = self['pore.residual']
net = self.project.network
conns = net['throat.conns']
rclusters = site_percolation(conns, residual).sites
rcluster_ids = np.unique(rclusters[rclusters > -1])
initial_num = len(self.queue) - 1
for rcluster_id in rcluster_ids:
rPs = rclusters == rcluster_id
existing = np.unique(self['pore.cluster'][rPs])
existing = existing[existing > -1]
if len(existing) > 0:
# There was at least one inlet cluster connected to this
# residual cluster, pick the first one.
cluster_num = existing[0] # depends on [control=['if'], data=[]]
else:
# Make a new cluster queue
cluster_num = len(self.queue)
self.queue.append([])
queue = self.queue[cluster_num]
# Set the residual pores and inner throats as part of cluster
self['pore.cluster'][rPs] = cluster_num
Ts = net.find_neighbor_throats(pores=rPs, flatten=True, mode='xnor')
self['throat.cluster'][Ts] = cluster_num
self['pore.invasion_sequence'][rPs] = 0
self['throat.invasion_sequence'][Ts] = 0
self['pore.invasion_pressure'][rPs] = -np.inf
self['throat.invasion_pressure'][Ts] = -np.inf
# Add all the outer throats to the queue
Ts = net.find_neighbor_throats(pores=rPs, flatten=True, mode='exclusive_or')
for T in Ts:
data = []
# Pc
data.append(self['throat.entry_pressure'][T])
# Element Index
data.append(T)
# Element Type (Pore of Throat)
data.append('throat')
hq.heappush(queue, data) # depends on [control=['for'], data=['T']] # depends on [control=['for'], data=['rcluster_id']]
self.invasion_running = [True] * len(self.queue)
# we have added new clusters that are currently isolated and we
# need to stop them invading until they merge into an invading
# cluster
for c_num in range(len(self.queue)):
if c_num > initial_num:
self.invasion_running[c_num] = False # depends on [control=['if'], data=['c_num']] # depends on [control=['for'], data=['c_num']] |
def _call_salt_command(self,
fun,
args,
kwargs,
assertion_section=None):
'''
Generic call of salt Caller command
'''
value = False
try:
if args and kwargs:
value = self.salt_lc.cmd(fun, *args, **kwargs)
elif args and not kwargs:
value = self.salt_lc.cmd(fun, *args)
elif not args and kwargs:
value = self.salt_lc.cmd(fun, **kwargs)
else:
value = self.salt_lc.cmd(fun)
except salt.exceptions.SaltException:
raise
except Exception:
raise
if isinstance(value, dict) and assertion_section:
return value.get(assertion_section, False)
else:
return value | def function[_call_salt_command, parameter[self, fun, args, kwargs, assertion_section]]:
constant[
Generic call of salt Caller command
]
variable[value] assign[=] constant[False]
<ast.Try object at 0x7da204567ac0>
if <ast.BoolOp object at 0x7da2045670d0> begin[:]
return[call[name[value].get, parameter[name[assertion_section], constant[False]]]] | keyword[def] identifier[_call_salt_command] ( identifier[self] ,
identifier[fun] ,
identifier[args] ,
identifier[kwargs] ,
identifier[assertion_section] = keyword[None] ):
literal[string]
identifier[value] = keyword[False]
keyword[try] :
keyword[if] identifier[args] keyword[and] identifier[kwargs] :
identifier[value] = identifier[self] . identifier[salt_lc] . identifier[cmd] ( identifier[fun] ,* identifier[args] ,** identifier[kwargs] )
keyword[elif] identifier[args] keyword[and] keyword[not] identifier[kwargs] :
identifier[value] = identifier[self] . identifier[salt_lc] . identifier[cmd] ( identifier[fun] ,* identifier[args] )
keyword[elif] keyword[not] identifier[args] keyword[and] identifier[kwargs] :
identifier[value] = identifier[self] . identifier[salt_lc] . identifier[cmd] ( identifier[fun] ,** identifier[kwargs] )
keyword[else] :
identifier[value] = identifier[self] . identifier[salt_lc] . identifier[cmd] ( identifier[fun] )
keyword[except] identifier[salt] . identifier[exceptions] . identifier[SaltException] :
keyword[raise]
keyword[except] identifier[Exception] :
keyword[raise]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[and] identifier[assertion_section] :
keyword[return] identifier[value] . identifier[get] ( identifier[assertion_section] , keyword[False] )
keyword[else] :
keyword[return] identifier[value] | def _call_salt_command(self, fun, args, kwargs, assertion_section=None):
"""
Generic call of salt Caller command
"""
value = False
try:
if args and kwargs:
value = self.salt_lc.cmd(fun, *args, **kwargs) # depends on [control=['if'], data=[]]
elif args and (not kwargs):
value = self.salt_lc.cmd(fun, *args) # depends on [control=['if'], data=[]]
elif not args and kwargs:
value = self.salt_lc.cmd(fun, **kwargs) # depends on [control=['if'], data=[]]
else:
value = self.salt_lc.cmd(fun) # depends on [control=['try'], data=[]]
except salt.exceptions.SaltException:
raise # depends on [control=['except'], data=[]]
except Exception:
raise # depends on [control=['except'], data=[]]
if isinstance(value, dict) and assertion_section:
return value.get(assertion_section, False) # depends on [control=['if'], data=[]]
else:
return value |
def template(tem, queue=False, **kwargs):
'''
Execute the information stored in a template file on the minion.
This function does not ask a master for a SLS file to render but
instead directly processes the file at the provided path on the minion.
CLI Example:
.. code-block:: bash
salt '*' state.template '<Path to template on the minion>'
'''
if 'env' in kwargs:
# "env" is not supported; Use "saltenv".
kwargs.pop('env')
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.HighState(opts,
context=__context__,
proxy=__proxy__,
initial_pillar=_get_initial_pillar(opts))
except NameError:
st_ = salt.state.HighState(opts,
context=__context__,
initial_pillar=_get_initial_pillar(opts))
errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
raise CommandExecutionError('Pillar failed to render', info=errors)
if not tem.endswith('.sls'):
tem = '{sls}.sls'.format(sls=tem)
high_state, errors = st_.render_state(tem,
kwargs.get('saltenv', ''),
'',
None,
local=True)
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
return errors
ret = st_.state.call_high(high_state)
_set_retcode(ret, highstate=high_state)
return ret | def function[template, parameter[tem, queue]]:
constant[
Execute the information stored in a template file on the minion.
This function does not ask a master for a SLS file to render but
instead directly processes the file at the provided path on the minion.
CLI Example:
.. code-block:: bash
salt '*' state.template '<Path to template on the minion>'
]
if compare[constant[env] in name[kwargs]] begin[:]
call[name[kwargs].pop, parameter[constant[env]]]
variable[conflict] assign[=] call[name[_check_queue], parameter[name[queue], name[kwargs]]]
if compare[name[conflict] is_not constant[None]] begin[:]
return[name[conflict]]
variable[opts] assign[=] call[name[salt].utils.state.get_sls_opts, parameter[name[__opts__]]]
<ast.Try object at 0x7da1b2002cb0>
variable[errors] assign[=] call[name[_get_pillar_errors], parameter[name[kwargs]]]
if name[errors] begin[:]
call[name[__context__]][constant[retcode]] assign[=] name[salt].defaults.exitcodes.EX_PILLAR_FAILURE
<ast.Raise object at 0x7da1b20017e0>
if <ast.UnaryOp object at 0x7da1b2002290> begin[:]
variable[tem] assign[=] call[constant[{sls}.sls].format, parameter[]]
<ast.Tuple object at 0x7da1b2001540> assign[=] call[name[st_].render_state, parameter[name[tem], call[name[kwargs].get, parameter[constant[saltenv], constant[]]], constant[], constant[None]]]
if name[errors] begin[:]
call[name[__context__]][constant[retcode]] assign[=] name[salt].defaults.exitcodes.EX_STATE_COMPILER_ERROR
return[name[errors]]
variable[ret] assign[=] call[name[st_].state.call_high, parameter[name[high_state]]]
call[name[_set_retcode], parameter[name[ret]]]
return[name[ret]] | keyword[def] identifier[template] ( identifier[tem] , identifier[queue] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[conflict] = identifier[_check_queue] ( identifier[queue] , identifier[kwargs] )
keyword[if] identifier[conflict] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[conflict]
identifier[opts] = identifier[salt] . identifier[utils] . identifier[state] . identifier[get_sls_opts] ( identifier[__opts__] ,** identifier[kwargs] )
keyword[try] :
identifier[st_] = identifier[salt] . identifier[state] . identifier[HighState] ( identifier[opts] ,
identifier[context] = identifier[__context__] ,
identifier[proxy] = identifier[__proxy__] ,
identifier[initial_pillar] = identifier[_get_initial_pillar] ( identifier[opts] ))
keyword[except] identifier[NameError] :
identifier[st_] = identifier[salt] . identifier[state] . identifier[HighState] ( identifier[opts] ,
identifier[context] = identifier[__context__] ,
identifier[initial_pillar] = identifier[_get_initial_pillar] ( identifier[opts] ))
identifier[errors] = identifier[_get_pillar_errors] ( identifier[kwargs] , identifier[pillar] = identifier[st_] . identifier[opts] [ literal[string] ])
keyword[if] identifier[errors] :
identifier[__context__] [ literal[string] ]= identifier[salt] . identifier[defaults] . identifier[exitcodes] . identifier[EX_PILLAR_FAILURE]
keyword[raise] identifier[CommandExecutionError] ( literal[string] , identifier[info] = identifier[errors] )
keyword[if] keyword[not] identifier[tem] . identifier[endswith] ( literal[string] ):
identifier[tem] = literal[string] . identifier[format] ( identifier[sls] = identifier[tem] )
identifier[high_state] , identifier[errors] = identifier[st_] . identifier[render_state] ( identifier[tem] ,
identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ),
literal[string] ,
keyword[None] ,
identifier[local] = keyword[True] )
keyword[if] identifier[errors] :
identifier[__context__] [ literal[string] ]= identifier[salt] . identifier[defaults] . identifier[exitcodes] . identifier[EX_STATE_COMPILER_ERROR]
keyword[return] identifier[errors]
identifier[ret] = identifier[st_] . identifier[state] . identifier[call_high] ( identifier[high_state] )
identifier[_set_retcode] ( identifier[ret] , identifier[highstate] = identifier[high_state] )
keyword[return] identifier[ret] | def template(tem, queue=False, **kwargs):
"""
Execute the information stored in a template file on the minion.
This function does not ask a master for a SLS file to render but
instead directly processes the file at the provided path on the minion.
CLI Example:
.. code-block:: bash
salt '*' state.template '<Path to template on the minion>'
"""
if 'env' in kwargs:
# "env" is not supported; Use "saltenv".
kwargs.pop('env') # depends on [control=['if'], data=['kwargs']]
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict # depends on [control=['if'], data=['conflict']]
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.HighState(opts, context=__context__, proxy=__proxy__, initial_pillar=_get_initial_pillar(opts)) # depends on [control=['try'], data=[]]
except NameError:
st_ = salt.state.HighState(opts, context=__context__, initial_pillar=_get_initial_pillar(opts)) # depends on [control=['except'], data=[]]
errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
raise CommandExecutionError('Pillar failed to render', info=errors) # depends on [control=['if'], data=[]]
if not tem.endswith('.sls'):
tem = '{sls}.sls'.format(sls=tem) # depends on [control=['if'], data=[]]
(high_state, errors) = st_.render_state(tem, kwargs.get('saltenv', ''), '', None, local=True)
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
return errors # depends on [control=['if'], data=[]]
ret = st_.state.call_high(high_state)
_set_retcode(ret, highstate=high_state)
return ret |
def _update_simulation_end_from_lsm(self):
"""
Update simulation end time from LSM
"""
te = self.l2g.xd.lsm.datetime[-1]
simulation_end = te.replace(tzinfo=utc) \
.astimezone(tz=self.tz) \
.replace(tzinfo=None)
if self.simulation_end is None:
self.simulation_end = simulation_end
elif self.simulation_end > simulation_end:
self.simulation_end = simulation_end
self._update_card("END_TIME",
self.simulation_end
.strftime("%Y %m %d %H %M")) | def function[_update_simulation_end_from_lsm, parameter[self]]:
constant[
Update simulation end time from LSM
]
variable[te] assign[=] call[name[self].l2g.xd.lsm.datetime][<ast.UnaryOp object at 0x7da204621870>]
variable[simulation_end] assign[=] call[call[call[name[te].replace, parameter[]].astimezone, parameter[]].replace, parameter[]]
if compare[name[self].simulation_end is constant[None]] begin[:]
name[self].simulation_end assign[=] name[simulation_end]
call[name[self]._update_card, parameter[constant[END_TIME], call[name[self].simulation_end.strftime, parameter[constant[%Y %m %d %H %M]]]]] | keyword[def] identifier[_update_simulation_end_from_lsm] ( identifier[self] ):
literal[string]
identifier[te] = identifier[self] . identifier[l2g] . identifier[xd] . identifier[lsm] . identifier[datetime] [- literal[int] ]
identifier[simulation_end] = identifier[te] . identifier[replace] ( identifier[tzinfo] = identifier[utc] ). identifier[astimezone] ( identifier[tz] = identifier[self] . identifier[tz] ). identifier[replace] ( identifier[tzinfo] = keyword[None] )
keyword[if] identifier[self] . identifier[simulation_end] keyword[is] keyword[None] :
identifier[self] . identifier[simulation_end] = identifier[simulation_end]
keyword[elif] identifier[self] . identifier[simulation_end] > identifier[simulation_end] :
identifier[self] . identifier[simulation_end] = identifier[simulation_end]
identifier[self] . identifier[_update_card] ( literal[string] ,
identifier[self] . identifier[simulation_end]
. identifier[strftime] ( literal[string] )) | def _update_simulation_end_from_lsm(self):
"""
Update simulation end time from LSM
"""
te = self.l2g.xd.lsm.datetime[-1]
simulation_end = te.replace(tzinfo=utc).astimezone(tz=self.tz).replace(tzinfo=None)
if self.simulation_end is None:
self.simulation_end = simulation_end # depends on [control=['if'], data=[]]
elif self.simulation_end > simulation_end:
self.simulation_end = simulation_end # depends on [control=['if'], data=['simulation_end']]
self._update_card('END_TIME', self.simulation_end.strftime('%Y %m %d %H %M')) |
def cluster_sample1():
"Start with wrong number of clusters."
start_centers = [[3.7, 5.5]]
template_clustering(start_centers, SIMPLE_SAMPLES.SAMPLE_SIMPLE1, criterion = splitting_type.BAYESIAN_INFORMATION_CRITERION)
template_clustering(start_centers, SIMPLE_SAMPLES.SAMPLE_SIMPLE1, criterion = splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH) | def function[cluster_sample1, parameter[]]:
constant[Start with wrong number of clusters.]
variable[start_centers] assign[=] list[[<ast.List object at 0x7da1b015ab00>]]
call[name[template_clustering], parameter[name[start_centers], name[SIMPLE_SAMPLES].SAMPLE_SIMPLE1]]
call[name[template_clustering], parameter[name[start_centers], name[SIMPLE_SAMPLES].SAMPLE_SIMPLE1]] | keyword[def] identifier[cluster_sample1] ():
literal[string]
identifier[start_centers] =[[ literal[int] , literal[int] ]]
identifier[template_clustering] ( identifier[start_centers] , identifier[SIMPLE_SAMPLES] . identifier[SAMPLE_SIMPLE1] , identifier[criterion] = identifier[splitting_type] . identifier[BAYESIAN_INFORMATION_CRITERION] )
identifier[template_clustering] ( identifier[start_centers] , identifier[SIMPLE_SAMPLES] . identifier[SAMPLE_SIMPLE1] , identifier[criterion] = identifier[splitting_type] . identifier[MINIMUM_NOISELESS_DESCRIPTION_LENGTH] ) | def cluster_sample1():
"""Start with wrong number of clusters."""
start_centers = [[3.7, 5.5]]
template_clustering(start_centers, SIMPLE_SAMPLES.SAMPLE_SIMPLE1, criterion=splitting_type.BAYESIAN_INFORMATION_CRITERION)
template_clustering(start_centers, SIMPLE_SAMPLES.SAMPLE_SIMPLE1, criterion=splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH) |
def wash_url_argument(var, new_type):
"""
Wash argument into 'new_type', that can be 'list', 'str',
'int', 'tuple' or 'dict'.
If needed, the check 'type(var) is not None' should be done before
calling this function.
@param var: variable value
@param new_type: variable type, 'list', 'str', 'int', 'tuple' or 'dict'
@return: as much as possible, value var as type new_type
If var is a list, will change first element into new_type.
If int check unsuccessful, returns 0
"""
out = []
if new_type == 'list': # return lst
if isinstance(var, list):
out = var
else:
out = [var]
elif new_type == 'str': # return str
if isinstance(var, list):
try:
out = "%s" % var[0]
except:
out = ""
elif isinstance(var, str):
out = var
else:
out = "%s" % var
elif new_type == 'int': # return int
if isinstance(var, list):
try:
out = int(var[0])
except:
out = 0
elif isinstance(var, (int, long)):
out = var
elif isinstance(var, str):
try:
out = int(var)
except:
out = 0
else:
out = 0
elif new_type == 'tuple': # return tuple
if isinstance(var, tuple):
out = var
else:
out = (var, )
elif new_type == 'dict': # return dictionary
if isinstance(var, dict):
out = var
else:
out = {0: var}
return out | def function[wash_url_argument, parameter[var, new_type]]:
constant[
Wash argument into 'new_type', that can be 'list', 'str',
'int', 'tuple' or 'dict'.
If needed, the check 'type(var) is not None' should be done before
calling this function.
@param var: variable value
@param new_type: variable type, 'list', 'str', 'int', 'tuple' or 'dict'
@return: as much as possible, value var as type new_type
If var is a list, will change first element into new_type.
If int check unsuccessful, returns 0
]
variable[out] assign[=] list[[]]
if compare[name[new_type] equal[==] constant[list]] begin[:]
if call[name[isinstance], parameter[name[var], name[list]]] begin[:]
variable[out] assign[=] name[var]
return[name[out]] | keyword[def] identifier[wash_url_argument] ( identifier[var] , identifier[new_type] ):
literal[string]
identifier[out] =[]
keyword[if] identifier[new_type] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[var] , identifier[list] ):
identifier[out] = identifier[var]
keyword[else] :
identifier[out] =[ identifier[var] ]
keyword[elif] identifier[new_type] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[var] , identifier[list] ):
keyword[try] :
identifier[out] = literal[string] % identifier[var] [ literal[int] ]
keyword[except] :
identifier[out] = literal[string]
keyword[elif] identifier[isinstance] ( identifier[var] , identifier[str] ):
identifier[out] = identifier[var]
keyword[else] :
identifier[out] = literal[string] % identifier[var]
keyword[elif] identifier[new_type] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[var] , identifier[list] ):
keyword[try] :
identifier[out] = identifier[int] ( identifier[var] [ literal[int] ])
keyword[except] :
identifier[out] = literal[int]
keyword[elif] identifier[isinstance] ( identifier[var] ,( identifier[int] , identifier[long] )):
identifier[out] = identifier[var]
keyword[elif] identifier[isinstance] ( identifier[var] , identifier[str] ):
keyword[try] :
identifier[out] = identifier[int] ( identifier[var] )
keyword[except] :
identifier[out] = literal[int]
keyword[else] :
identifier[out] = literal[int]
keyword[elif] identifier[new_type] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[var] , identifier[tuple] ):
identifier[out] = identifier[var]
keyword[else] :
identifier[out] =( identifier[var] ,)
keyword[elif] identifier[new_type] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[var] , identifier[dict] ):
identifier[out] = identifier[var]
keyword[else] :
identifier[out] ={ literal[int] : identifier[var] }
keyword[return] identifier[out] | def wash_url_argument(var, new_type):
"""
Wash argument into 'new_type', that can be 'list', 'str',
'int', 'tuple' or 'dict'.
If needed, the check 'type(var) is not None' should be done before
calling this function.
@param var: variable value
@param new_type: variable type, 'list', 'str', 'int', 'tuple' or 'dict'
@return: as much as possible, value var as type new_type
If var is a list, will change first element into new_type.
If int check unsuccessful, returns 0
"""
out = []
if new_type == 'list': # return lst
if isinstance(var, list):
out = var # depends on [control=['if'], data=[]]
else:
out = [var] # depends on [control=['if'], data=[]]
elif new_type == 'str': # return str
if isinstance(var, list):
try:
out = '%s' % var[0] # depends on [control=['try'], data=[]]
except:
out = '' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(var, str):
out = var # depends on [control=['if'], data=[]]
else:
out = '%s' % var # depends on [control=['if'], data=[]]
elif new_type == 'int': # return int
if isinstance(var, list):
try:
out = int(var[0]) # depends on [control=['try'], data=[]]
except:
out = 0 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(var, (int, long)):
out = var # depends on [control=['if'], data=[]]
elif isinstance(var, str):
try:
out = int(var) # depends on [control=['try'], data=[]]
except:
out = 0 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
out = 0 # depends on [control=['if'], data=[]]
elif new_type == 'tuple': # return tuple
if isinstance(var, tuple):
out = var # depends on [control=['if'], data=[]]
else:
out = (var,) # depends on [control=['if'], data=[]]
elif new_type == 'dict': # return dictionary
if isinstance(var, dict):
out = var # depends on [control=['if'], data=[]]
else:
out = {0: var} # depends on [control=['if'], data=[]]
return out |
def parse_querystring(self, req, name, field):
"""Pull a querystring value from the request."""
return get_value(req.query_arguments, name, field) | def function[parse_querystring, parameter[self, req, name, field]]:
constant[Pull a querystring value from the request.]
return[call[name[get_value], parameter[name[req].query_arguments, name[name], name[field]]]] | keyword[def] identifier[parse_querystring] ( identifier[self] , identifier[req] , identifier[name] , identifier[field] ):
literal[string]
keyword[return] identifier[get_value] ( identifier[req] . identifier[query_arguments] , identifier[name] , identifier[field] ) | def parse_querystring(self, req, name, field):
"""Pull a querystring value from the request."""
return get_value(req.query_arguments, name, field) |
def dev_dir(self) -> str:
"""Generate a random path to development directory.
:return: Path.
:Example:
/home/sherrell/Development/Python
"""
user = self.user()
folder = self.random.choice(['Development', 'Dev'])
stack = self.random.choice(PROGRAMMING_LANGS)
return str(self._pathlib_home / user / folder / stack) | def function[dev_dir, parameter[self]]:
constant[Generate a random path to development directory.
:return: Path.
:Example:
/home/sherrell/Development/Python
]
variable[user] assign[=] call[name[self].user, parameter[]]
variable[folder] assign[=] call[name[self].random.choice, parameter[list[[<ast.Constant object at 0x7da20c6c75b0>, <ast.Constant object at 0x7da20c6c56c0>]]]]
variable[stack] assign[=] call[name[self].random.choice, parameter[name[PROGRAMMING_LANGS]]]
return[call[name[str], parameter[binary_operation[binary_operation[binary_operation[name[self]._pathlib_home / name[user]] / name[folder]] / name[stack]]]]] | keyword[def] identifier[dev_dir] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[user] = identifier[self] . identifier[user] ()
identifier[folder] = identifier[self] . identifier[random] . identifier[choice] ([ literal[string] , literal[string] ])
identifier[stack] = identifier[self] . identifier[random] . identifier[choice] ( identifier[PROGRAMMING_LANGS] )
keyword[return] identifier[str] ( identifier[self] . identifier[_pathlib_home] / identifier[user] / identifier[folder] / identifier[stack] ) | def dev_dir(self) -> str:
"""Generate a random path to development directory.
:return: Path.
:Example:
/home/sherrell/Development/Python
"""
user = self.user()
folder = self.random.choice(['Development', 'Dev'])
stack = self.random.choice(PROGRAMMING_LANGS)
return str(self._pathlib_home / user / folder / stack) |
def profile_update_args_v3(self, profile):
"""Update v1 profile args to v3 schema for args.
.. code-block:: javascript
"args": {
"app": {
"required": {
"input_strings": "capitalize",
"tc_action": "Capitalize"
},
"optional": {
"fail_on_error": true
}
}
},
"default": {
"api_access_id": "$env.API_ACCESS_ID",
"api_default_org": "$env.API_DEFAULT_ORG",
},
Args:
profile (dict): The dictionary containting the profile settings.
"""
ij = self.load_install_json(profile.get('install_json', 'install.json'))
ijp = self.install_json_params(ij)
if (
profile.get('args', {}).get('app', {}).get('optional') is None
and profile.get('args', {}).get('app', {}).get('required') is None
):
app_args = profile['args'].pop('app')
profile['args']['app'] = {}
profile['args']['app']['optional'] = {}
profile['args']['app']['required'] = {}
for arg in self.profile_settings_args_install_json(ij, None):
required = ijp.get(arg).get('required', False)
try:
if required:
profile['args']['app']['required'][arg] = app_args.pop(arg)
else:
profile['args']['app']['optional'][arg] = app_args.pop(arg)
except KeyError:
if self.args.verbose:
print(
'{}{}Input "{}" not found in profile "{}".'.format(
c.Style.BRIGHT, c.Fore.YELLOW, arg, profile.get('profile_name')
)
)
print(
'{}{}Updating args section to v3 schema for profile {}.'.format(
c.Style.BRIGHT, c.Fore.YELLOW, profile.get('profile_name')
)
) | def function[profile_update_args_v3, parameter[self, profile]]:
constant[Update v1 profile args to v3 schema for args.
.. code-block:: javascript
"args": {
"app": {
"required": {
"input_strings": "capitalize",
"tc_action": "Capitalize"
},
"optional": {
"fail_on_error": true
}
}
},
"default": {
"api_access_id": "$env.API_ACCESS_ID",
"api_default_org": "$env.API_DEFAULT_ORG",
},
Args:
profile (dict): The dictionary containting the profile settings.
]
variable[ij] assign[=] call[name[self].load_install_json, parameter[call[name[profile].get, parameter[constant[install_json], constant[install.json]]]]]
variable[ijp] assign[=] call[name[self].install_json_params, parameter[name[ij]]]
if <ast.BoolOp object at 0x7da18ede7100> begin[:]
variable[app_args] assign[=] call[call[name[profile]][constant[args]].pop, parameter[constant[app]]]
call[call[name[profile]][constant[args]]][constant[app]] assign[=] dictionary[[], []]
call[call[call[name[profile]][constant[args]]][constant[app]]][constant[optional]] assign[=] dictionary[[], []]
call[call[call[name[profile]][constant[args]]][constant[app]]][constant[required]] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[call[name[self].profile_settings_args_install_json, parameter[name[ij], constant[None]]]] begin[:]
variable[required] assign[=] call[call[name[ijp].get, parameter[name[arg]]].get, parameter[constant[required], constant[False]]]
<ast.Try object at 0x7da18fe92350>
call[name[print], parameter[call[constant[{}{}Updating args section to v3 schema for profile {}.].format, parameter[name[c].Style.BRIGHT, name[c].Fore.YELLOW, call[name[profile].get, parameter[constant[profile_name]]]]]]] | keyword[def] identifier[profile_update_args_v3] ( identifier[self] , identifier[profile] ):
literal[string]
identifier[ij] = identifier[self] . identifier[load_install_json] ( identifier[profile] . identifier[get] ( literal[string] , literal[string] ))
identifier[ijp] = identifier[self] . identifier[install_json_params] ( identifier[ij] )
keyword[if] (
identifier[profile] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) keyword[is] keyword[None]
keyword[and] identifier[profile] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) keyword[is] keyword[None]
):
identifier[app_args] = identifier[profile] [ literal[string] ]. identifier[pop] ( literal[string] )
identifier[profile] [ literal[string] ][ literal[string] ]={}
identifier[profile] [ literal[string] ][ literal[string] ][ literal[string] ]={}
identifier[profile] [ literal[string] ][ literal[string] ][ literal[string] ]={}
keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[profile_settings_args_install_json] ( identifier[ij] , keyword[None] ):
identifier[required] = identifier[ijp] . identifier[get] ( identifier[arg] ). identifier[get] ( literal[string] , keyword[False] )
keyword[try] :
keyword[if] identifier[required] :
identifier[profile] [ literal[string] ][ literal[string] ][ literal[string] ][ identifier[arg] ]= identifier[app_args] . identifier[pop] ( identifier[arg] )
keyword[else] :
identifier[profile] [ literal[string] ][ literal[string] ][ literal[string] ][ identifier[arg] ]= identifier[app_args] . identifier[pop] ( identifier[arg] )
keyword[except] identifier[KeyError] :
keyword[if] identifier[self] . identifier[args] . identifier[verbose] :
identifier[print] (
literal[string] . identifier[format] (
identifier[c] . identifier[Style] . identifier[BRIGHT] , identifier[c] . identifier[Fore] . identifier[YELLOW] , identifier[arg] , identifier[profile] . identifier[get] ( literal[string] )
)
)
identifier[print] (
literal[string] . identifier[format] (
identifier[c] . identifier[Style] . identifier[BRIGHT] , identifier[c] . identifier[Fore] . identifier[YELLOW] , identifier[profile] . identifier[get] ( literal[string] )
)
) | def profile_update_args_v3(self, profile):
"""Update v1 profile args to v3 schema for args.
.. code-block:: javascript
"args": {
"app": {
"required": {
"input_strings": "capitalize",
"tc_action": "Capitalize"
},
"optional": {
"fail_on_error": true
}
}
},
"default": {
"api_access_id": "$env.API_ACCESS_ID",
"api_default_org": "$env.API_DEFAULT_ORG",
},
Args:
profile (dict): The dictionary containting the profile settings.
"""
ij = self.load_install_json(profile.get('install_json', 'install.json'))
ijp = self.install_json_params(ij)
if profile.get('args', {}).get('app', {}).get('optional') is None and profile.get('args', {}).get('app', {}).get('required') is None:
app_args = profile['args'].pop('app')
profile['args']['app'] = {}
profile['args']['app']['optional'] = {}
profile['args']['app']['required'] = {}
for arg in self.profile_settings_args_install_json(ij, None):
required = ijp.get(arg).get('required', False)
try:
if required:
profile['args']['app']['required'][arg] = app_args.pop(arg) # depends on [control=['if'], data=[]]
else:
profile['args']['app']['optional'][arg] = app_args.pop(arg) # depends on [control=['try'], data=[]]
except KeyError:
if self.args.verbose:
print('{}{}Input "{}" not found in profile "{}".'.format(c.Style.BRIGHT, c.Fore.YELLOW, arg, profile.get('profile_name'))) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['arg']]
print('{}{}Updating args section to v3 schema for profile {}.'.format(c.Style.BRIGHT, c.Fore.YELLOW, profile.get('profile_name'))) # depends on [control=['if'], data=[]] |
def model_macros(vk, model):
"""Fill the model with macros
model['macros'] = {'name': value, ...}
"""
model['macros'] = {}
# API Macros
macros = [x for x in vk['registry']['enums']
if x.get('@type') not in ('bitmask', 'enum')]
# TODO: Check theses values
special_values = {'1000.0f': '1000.0',
'(~0U)': 0xffffffff,
'(~0ULL)': -1,
'(~0U-1)': 0xfffffffe,
'(~0U-2)': 0xfffffffd}
for macro in macros[0]['enum']:
if '@name' not in macro or '@value' not in macro:
continue
name = macro['@name']
value = macro['@value']
if value in special_values:
value = special_values[value]
model['macros'][name] = value
# Extension Macros
for ext in get_extensions_filtered(vk):
model['macros'][ext['@name']] = 1
for req in ext['require']:
for enum in req['enum']:
ename = enum['@name']
evalue = parse_constant(enum, int(ext['@number']))
if enum.get('@extends') == 'VkResult':
model['enums']['VkResult'][ename] = evalue
else:
model['macros'][ename] = evalue | def function[model_macros, parameter[vk, model]]:
constant[Fill the model with macros
model['macros'] = {'name': value, ...}
]
call[name[model]][constant[macros]] assign[=] dictionary[[], []]
variable[macros] assign[=] <ast.ListComp object at 0x7da204564310>
variable[special_values] assign[=] dictionary[[<ast.Constant object at 0x7da204567c10>, <ast.Constant object at 0x7da204564640>, <ast.Constant object at 0x7da204565a80>, <ast.Constant object at 0x7da204565fc0>, <ast.Constant object at 0x7da204564040>], [<ast.Constant object at 0x7da204566890>, <ast.Constant object at 0x7da2045651e0>, <ast.UnaryOp object at 0x7da2045663e0>, <ast.Constant object at 0x7da2045650f0>, <ast.Constant object at 0x7da204565420>]]
for taget[name[macro]] in starred[call[call[name[macros]][constant[0]]][constant[enum]]] begin[:]
if <ast.BoolOp object at 0x7da204565990> begin[:]
continue
variable[name] assign[=] call[name[macro]][constant[@name]]
variable[value] assign[=] call[name[macro]][constant[@value]]
if compare[name[value] in name[special_values]] begin[:]
variable[value] assign[=] call[name[special_values]][name[value]]
call[call[name[model]][constant[macros]]][name[name]] assign[=] name[value]
for taget[name[ext]] in starred[call[name[get_extensions_filtered], parameter[name[vk]]]] begin[:]
call[call[name[model]][constant[macros]]][call[name[ext]][constant[@name]]] assign[=] constant[1]
for taget[name[req]] in starred[call[name[ext]][constant[require]]] begin[:]
for taget[name[enum]] in starred[call[name[req]][constant[enum]]] begin[:]
variable[ename] assign[=] call[name[enum]][constant[@name]]
variable[evalue] assign[=] call[name[parse_constant], parameter[name[enum], call[name[int], parameter[call[name[ext]][constant[@number]]]]]]
if compare[call[name[enum].get, parameter[constant[@extends]]] equal[==] constant[VkResult]] begin[:]
call[call[call[name[model]][constant[enums]]][constant[VkResult]]][name[ename]] assign[=] name[evalue] | keyword[def] identifier[model_macros] ( identifier[vk] , identifier[model] ):
literal[string]
identifier[model] [ literal[string] ]={}
identifier[macros] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[vk] [ literal[string] ][ literal[string] ]
keyword[if] identifier[x] . identifier[get] ( literal[string] ) keyword[not] keyword[in] ( literal[string] , literal[string] )]
identifier[special_values] ={ literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] :- literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] }
keyword[for] identifier[macro] keyword[in] identifier[macros] [ literal[int] ][ literal[string] ]:
keyword[if] literal[string] keyword[not] keyword[in] identifier[macro] keyword[or] literal[string] keyword[not] keyword[in] identifier[macro] :
keyword[continue]
identifier[name] = identifier[macro] [ literal[string] ]
identifier[value] = identifier[macro] [ literal[string] ]
keyword[if] identifier[value] keyword[in] identifier[special_values] :
identifier[value] = identifier[special_values] [ identifier[value] ]
identifier[model] [ literal[string] ][ identifier[name] ]= identifier[value]
keyword[for] identifier[ext] keyword[in] identifier[get_extensions_filtered] ( identifier[vk] ):
identifier[model] [ literal[string] ][ identifier[ext] [ literal[string] ]]= literal[int]
keyword[for] identifier[req] keyword[in] identifier[ext] [ literal[string] ]:
keyword[for] identifier[enum] keyword[in] identifier[req] [ literal[string] ]:
identifier[ename] = identifier[enum] [ literal[string] ]
identifier[evalue] = identifier[parse_constant] ( identifier[enum] , identifier[int] ( identifier[ext] [ literal[string] ]))
keyword[if] identifier[enum] . identifier[get] ( literal[string] )== literal[string] :
identifier[model] [ literal[string] ][ literal[string] ][ identifier[ename] ]= identifier[evalue]
keyword[else] :
identifier[model] [ literal[string] ][ identifier[ename] ]= identifier[evalue] | def model_macros(vk, model):
"""Fill the model with macros
model['macros'] = {'name': value, ...}
"""
model['macros'] = {}
# API Macros
macros = [x for x in vk['registry']['enums'] if x.get('@type') not in ('bitmask', 'enum')]
# TODO: Check theses values
special_values = {'1000.0f': '1000.0', '(~0U)': 4294967295, '(~0ULL)': -1, '(~0U-1)': 4294967294, '(~0U-2)': 4294967293}
for macro in macros[0]['enum']:
if '@name' not in macro or '@value' not in macro:
continue # depends on [control=['if'], data=[]]
name = macro['@name']
value = macro['@value']
if value in special_values:
value = special_values[value] # depends on [control=['if'], data=['value', 'special_values']]
model['macros'][name] = value # depends on [control=['for'], data=['macro']]
# Extension Macros
for ext in get_extensions_filtered(vk):
model['macros'][ext['@name']] = 1
for req in ext['require']:
for enum in req['enum']:
ename = enum['@name']
evalue = parse_constant(enum, int(ext['@number']))
if enum.get('@extends') == 'VkResult':
model['enums']['VkResult'][ename] = evalue # depends on [control=['if'], data=[]]
else:
model['macros'][ename] = evalue # depends on [control=['for'], data=['enum']] # depends on [control=['for'], data=['req']] # depends on [control=['for'], data=['ext']] |
def knock_out(self):
"""Knockout gene by marking it as non-functional and setting all
associated reactions bounds to zero.
The change is reverted upon exit if executed within the model as
context.
"""
self.functional = False
for reaction in self.reactions:
if not reaction.functional:
reaction.bounds = (0, 0) | def function[knock_out, parameter[self]]:
constant[Knockout gene by marking it as non-functional and setting all
associated reactions bounds to zero.
The change is reverted upon exit if executed within the model as
context.
]
name[self].functional assign[=] constant[False]
for taget[name[reaction]] in starred[name[self].reactions] begin[:]
if <ast.UnaryOp object at 0x7da1b014d450> begin[:]
name[reaction].bounds assign[=] tuple[[<ast.Constant object at 0x7da1b014e3e0>, <ast.Constant object at 0x7da1b014fdf0>]] | keyword[def] identifier[knock_out] ( identifier[self] ):
literal[string]
identifier[self] . identifier[functional] = keyword[False]
keyword[for] identifier[reaction] keyword[in] identifier[self] . identifier[reactions] :
keyword[if] keyword[not] identifier[reaction] . identifier[functional] :
identifier[reaction] . identifier[bounds] =( literal[int] , literal[int] ) | def knock_out(self):
"""Knockout gene by marking it as non-functional and setting all
associated reactions bounds to zero.
The change is reverted upon exit if executed within the model as
context.
"""
self.functional = False
for reaction in self.reactions:
if not reaction.functional:
reaction.bounds = (0, 0) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reaction']] |
def config(path=None, root=None, db=None):
"""Return the default run_config object for this installation."""
import ambry.run
return ambry.run.load(path=path, root=root, db=db) | def function[config, parameter[path, root, db]]:
constant[Return the default run_config object for this installation.]
import module[ambry.run]
return[call[name[ambry].run.load, parameter[]]] | keyword[def] identifier[config] ( identifier[path] = keyword[None] , identifier[root] = keyword[None] , identifier[db] = keyword[None] ):
literal[string]
keyword[import] identifier[ambry] . identifier[run]
keyword[return] identifier[ambry] . identifier[run] . identifier[load] ( identifier[path] = identifier[path] , identifier[root] = identifier[root] , identifier[db] = identifier[db] ) | def config(path=None, root=None, db=None):
"""Return the default run_config object for this installation."""
import ambry.run
return ambry.run.load(path=path, root=root, db=db) |
def include_flags(self, arch):
'''Returns a string with the include folders'''
openssl_includes = join(self.get_build_dir(arch.arch), 'include')
return (' -I' + openssl_includes +
' -I' + join(openssl_includes, 'internal') +
' -I' + join(openssl_includes, 'openssl')) | def function[include_flags, parameter[self, arch]]:
constant[Returns a string with the include folders]
variable[openssl_includes] assign[=] call[name[join], parameter[call[name[self].get_build_dir, parameter[name[arch].arch]], constant[include]]]
return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[ -I] + name[openssl_includes]] + constant[ -I]] + call[name[join], parameter[name[openssl_includes], constant[internal]]]] + constant[ -I]] + call[name[join], parameter[name[openssl_includes], constant[openssl]]]]] | keyword[def] identifier[include_flags] ( identifier[self] , identifier[arch] ):
literal[string]
identifier[openssl_includes] = identifier[join] ( identifier[self] . identifier[get_build_dir] ( identifier[arch] . identifier[arch] ), literal[string] )
keyword[return] ( literal[string] + identifier[openssl_includes] +
literal[string] + identifier[join] ( identifier[openssl_includes] , literal[string] )+
literal[string] + identifier[join] ( identifier[openssl_includes] , literal[string] )) | def include_flags(self, arch):
"""Returns a string with the include folders"""
openssl_includes = join(self.get_build_dir(arch.arch), 'include')
return ' -I' + openssl_includes + ' -I' + join(openssl_includes, 'internal') + ' -I' + join(openssl_includes, 'openssl') |
def get_cell_rect(self, row, col, tab):
"""Returns rectangle of cell on canvas"""
top_row = self.row_tb[0]
left_col = self.col_rl[0]
pos_x = self.x_offset
pos_y = self.y_offset
merge_area = self._get_merge_area((row, col, tab))
for __row in xrange(top_row, row):
__row_height = self.code_array.get_row_height(__row, tab)
pos_y += __row_height
for __col in xrange(left_col, col):
__col_width = self.code_array.get_col_width(__col, tab)
pos_x += __col_width
if merge_area is None:
height = self.code_array.get_row_height(row, tab)
width = self.code_array.get_col_width(col, tab)
else:
# We have a merged cell
top, left, bottom, right = merge_area
# Are we drawing the top left cell?
if top == row and left == col:
# Set rect to merge area
heights = (self.code_array.get_row_height(__row, tab)
for __row in xrange(top, bottom+1))
widths = (self.code_array.get_col_width(__col, tab)
for __col in xrange(left, right+1))
height = sum(heights)
width = sum(widths)
else:
# Do not draw the cell because it is hidden
return
return pos_x, pos_y, width, height | def function[get_cell_rect, parameter[self, row, col, tab]]:
constant[Returns rectangle of cell on canvas]
variable[top_row] assign[=] call[name[self].row_tb][constant[0]]
variable[left_col] assign[=] call[name[self].col_rl][constant[0]]
variable[pos_x] assign[=] name[self].x_offset
variable[pos_y] assign[=] name[self].y_offset
variable[merge_area] assign[=] call[name[self]._get_merge_area, parameter[tuple[[<ast.Name object at 0x7da1b16bff70>, <ast.Name object at 0x7da1b16bfc40>, <ast.Name object at 0x7da1b16bf700>]]]]
for taget[name[__row]] in starred[call[name[xrange], parameter[name[top_row], name[row]]]] begin[:]
variable[__row_height] assign[=] call[name[self].code_array.get_row_height, parameter[name[__row], name[tab]]]
<ast.AugAssign object at 0x7da1b16bc250>
for taget[name[__col]] in starred[call[name[xrange], parameter[name[left_col], name[col]]]] begin[:]
variable[__col_width] assign[=] call[name[self].code_array.get_col_width, parameter[name[__col], name[tab]]]
<ast.AugAssign object at 0x7da1b16be0b0>
if compare[name[merge_area] is constant[None]] begin[:]
variable[height] assign[=] call[name[self].code_array.get_row_height, parameter[name[row], name[tab]]]
variable[width] assign[=] call[name[self].code_array.get_col_width, parameter[name[col], name[tab]]]
return[tuple[[<ast.Name object at 0x7da1b1722f20>, <ast.Name object at 0x7da1b1723940>, <ast.Name object at 0x7da1b1723820>, <ast.Name object at 0x7da1b1722e30>]]] | keyword[def] identifier[get_cell_rect] ( identifier[self] , identifier[row] , identifier[col] , identifier[tab] ):
literal[string]
identifier[top_row] = identifier[self] . identifier[row_tb] [ literal[int] ]
identifier[left_col] = identifier[self] . identifier[col_rl] [ literal[int] ]
identifier[pos_x] = identifier[self] . identifier[x_offset]
identifier[pos_y] = identifier[self] . identifier[y_offset]
identifier[merge_area] = identifier[self] . identifier[_get_merge_area] (( identifier[row] , identifier[col] , identifier[tab] ))
keyword[for] identifier[__row] keyword[in] identifier[xrange] ( identifier[top_row] , identifier[row] ):
identifier[__row_height] = identifier[self] . identifier[code_array] . identifier[get_row_height] ( identifier[__row] , identifier[tab] )
identifier[pos_y] += identifier[__row_height]
keyword[for] identifier[__col] keyword[in] identifier[xrange] ( identifier[left_col] , identifier[col] ):
identifier[__col_width] = identifier[self] . identifier[code_array] . identifier[get_col_width] ( identifier[__col] , identifier[tab] )
identifier[pos_x] += identifier[__col_width]
keyword[if] identifier[merge_area] keyword[is] keyword[None] :
identifier[height] = identifier[self] . identifier[code_array] . identifier[get_row_height] ( identifier[row] , identifier[tab] )
identifier[width] = identifier[self] . identifier[code_array] . identifier[get_col_width] ( identifier[col] , identifier[tab] )
keyword[else] :
identifier[top] , identifier[left] , identifier[bottom] , identifier[right] = identifier[merge_area]
keyword[if] identifier[top] == identifier[row] keyword[and] identifier[left] == identifier[col] :
identifier[heights] =( identifier[self] . identifier[code_array] . identifier[get_row_height] ( identifier[__row] , identifier[tab] )
keyword[for] identifier[__row] keyword[in] identifier[xrange] ( identifier[top] , identifier[bottom] + literal[int] ))
identifier[widths] =( identifier[self] . identifier[code_array] . identifier[get_col_width] ( identifier[__col] , identifier[tab] )
keyword[for] identifier[__col] keyword[in] identifier[xrange] ( identifier[left] , identifier[right] + literal[int] ))
identifier[height] = identifier[sum] ( identifier[heights] )
identifier[width] = identifier[sum] ( identifier[widths] )
keyword[else] :
keyword[return]
keyword[return] identifier[pos_x] , identifier[pos_y] , identifier[width] , identifier[height] | def get_cell_rect(self, row, col, tab):
"""Returns rectangle of cell on canvas"""
top_row = self.row_tb[0]
left_col = self.col_rl[0]
pos_x = self.x_offset
pos_y = self.y_offset
merge_area = self._get_merge_area((row, col, tab))
for __row in xrange(top_row, row):
__row_height = self.code_array.get_row_height(__row, tab)
pos_y += __row_height # depends on [control=['for'], data=['__row']]
for __col in xrange(left_col, col):
__col_width = self.code_array.get_col_width(__col, tab)
pos_x += __col_width # depends on [control=['for'], data=['__col']]
if merge_area is None:
height = self.code_array.get_row_height(row, tab)
width = self.code_array.get_col_width(col, tab) # depends on [control=['if'], data=[]]
else:
# We have a merged cell
(top, left, bottom, right) = merge_area
# Are we drawing the top left cell?
if top == row and left == col:
# Set rect to merge area
heights = (self.code_array.get_row_height(__row, tab) for __row in xrange(top, bottom + 1))
widths = (self.code_array.get_col_width(__col, tab) for __col in xrange(left, right + 1))
height = sum(heights)
width = sum(widths) # depends on [control=['if'], data=[]]
else:
# Do not draw the cell because it is hidden
return
return (pos_x, pos_y, width, height) |
def get_run_states(self) -> List[RunState]:
"""Get a list of RunStates from the ZoneMinder API."""
raw_states = self.get_state('api/states.json')
if not raw_states:
_LOGGER.warning("Could not fetch runstates from ZoneMinder")
return []
run_states = []
for i in raw_states['states']:
raw_state = i['State']
_LOGGER.info("Initializing runstate %s", raw_state['Id'])
run_states.append(RunState(self, raw_state))
return run_states | def function[get_run_states, parameter[self]]:
constant[Get a list of RunStates from the ZoneMinder API.]
variable[raw_states] assign[=] call[name[self].get_state, parameter[constant[api/states.json]]]
if <ast.UnaryOp object at 0x7da20e9b02e0> begin[:]
call[name[_LOGGER].warning, parameter[constant[Could not fetch runstates from ZoneMinder]]]
return[list[[]]]
variable[run_states] assign[=] list[[]]
for taget[name[i]] in starred[call[name[raw_states]][constant[states]]] begin[:]
variable[raw_state] assign[=] call[name[i]][constant[State]]
call[name[_LOGGER].info, parameter[constant[Initializing runstate %s], call[name[raw_state]][constant[Id]]]]
call[name[run_states].append, parameter[call[name[RunState], parameter[name[self], name[raw_state]]]]]
return[name[run_states]] | keyword[def] identifier[get_run_states] ( identifier[self] )-> identifier[List] [ identifier[RunState] ]:
literal[string]
identifier[raw_states] = identifier[self] . identifier[get_state] ( literal[string] )
keyword[if] keyword[not] identifier[raw_states] :
identifier[_LOGGER] . identifier[warning] ( literal[string] )
keyword[return] []
identifier[run_states] =[]
keyword[for] identifier[i] keyword[in] identifier[raw_states] [ literal[string] ]:
identifier[raw_state] = identifier[i] [ literal[string] ]
identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[raw_state] [ literal[string] ])
identifier[run_states] . identifier[append] ( identifier[RunState] ( identifier[self] , identifier[raw_state] ))
keyword[return] identifier[run_states] | def get_run_states(self) -> List[RunState]:
"""Get a list of RunStates from the ZoneMinder API."""
raw_states = self.get_state('api/states.json')
if not raw_states:
_LOGGER.warning('Could not fetch runstates from ZoneMinder')
return [] # depends on [control=['if'], data=[]]
run_states = []
for i in raw_states['states']:
raw_state = i['State']
_LOGGER.info('Initializing runstate %s', raw_state['Id'])
run_states.append(RunState(self, raw_state)) # depends on [control=['for'], data=['i']]
return run_states |
def loadSessions(self, callback, bare_jid, device_ids):
"""
Return a dict containing the session for each device id. By default, this method
calls loadSession for each device id.
"""
if self.is_async:
self.__loadSessionsAsync(callback, bare_jid, device_ids, {})
else:
return self.__loadSessionsSync(bare_jid, device_ids) | def function[loadSessions, parameter[self, callback, bare_jid, device_ids]]:
constant[
Return a dict containing the session for each device id. By default, this method
calls loadSession for each device id.
]
if name[self].is_async begin[:]
call[name[self].__loadSessionsAsync, parameter[name[callback], name[bare_jid], name[device_ids], dictionary[[], []]]] | keyword[def] identifier[loadSessions] ( identifier[self] , identifier[callback] , identifier[bare_jid] , identifier[device_ids] ):
literal[string]
keyword[if] identifier[self] . identifier[is_async] :
identifier[self] . identifier[__loadSessionsAsync] ( identifier[callback] , identifier[bare_jid] , identifier[device_ids] ,{})
keyword[else] :
keyword[return] identifier[self] . identifier[__loadSessionsSync] ( identifier[bare_jid] , identifier[device_ids] ) | def loadSessions(self, callback, bare_jid, device_ids):
"""
Return a dict containing the session for each device id. By default, this method
calls loadSession for each device id.
"""
if self.is_async:
self.__loadSessionsAsync(callback, bare_jid, device_ids, {}) # depends on [control=['if'], data=[]]
else:
return self.__loadSessionsSync(bare_jid, device_ids) |
def create_access_token(identity, fresh=False, expires_delta=None, user_claims=None):
"""
Create a new access token.
:param identity: The identity of this token, which can be any data that is
json serializable. It can also be a python object, in which
case you can use the
:meth:`~flask_jwt_extended.JWTManager.user_identity_loader`
to define a callback function that will be used to pull a
json serializable identity out of the object.
:param fresh: If this token should be marked as fresh, and can thus access
:func:`~flask_jwt_extended.fresh_jwt_required` endpoints.
Defaults to `False`. This value can also be a
`datetime.timedelta` in which case it will indicate how long
this token will be considered fresh.
:param expires_delta: A `datetime.timedelta` for how long this token should
last before it expires. Set to False to disable
expiration. If this is None, it will use the
'JWT_ACCESS_TOKEN_EXPIRES` config value
(see :ref:`Configuration Options`)
:param user_claims: Optionnal JSON serializable to override user claims.
:return: An encoded access token
"""
jwt_manager = _get_jwt_manager()
return jwt_manager._create_access_token(identity, fresh, expires_delta, user_claims) | def function[create_access_token, parameter[identity, fresh, expires_delta, user_claims]]:
constant[
Create a new access token.
:param identity: The identity of this token, which can be any data that is
json serializable. It can also be a python object, in which
case you can use the
:meth:`~flask_jwt_extended.JWTManager.user_identity_loader`
to define a callback function that will be used to pull a
json serializable identity out of the object.
:param fresh: If this token should be marked as fresh, and can thus access
:func:`~flask_jwt_extended.fresh_jwt_required` endpoints.
Defaults to `False`. This value can also be a
`datetime.timedelta` in which case it will indicate how long
this token will be considered fresh.
:param expires_delta: A `datetime.timedelta` for how long this token should
last before it expires. Set to False to disable
expiration. If this is None, it will use the
'JWT_ACCESS_TOKEN_EXPIRES` config value
(see :ref:`Configuration Options`)
:param user_claims: Optionnal JSON serializable to override user claims.
:return: An encoded access token
]
variable[jwt_manager] assign[=] call[name[_get_jwt_manager], parameter[]]
return[call[name[jwt_manager]._create_access_token, parameter[name[identity], name[fresh], name[expires_delta], name[user_claims]]]] | keyword[def] identifier[create_access_token] ( identifier[identity] , identifier[fresh] = keyword[False] , identifier[expires_delta] = keyword[None] , identifier[user_claims] = keyword[None] ):
literal[string]
identifier[jwt_manager] = identifier[_get_jwt_manager] ()
keyword[return] identifier[jwt_manager] . identifier[_create_access_token] ( identifier[identity] , identifier[fresh] , identifier[expires_delta] , identifier[user_claims] ) | def create_access_token(identity, fresh=False, expires_delta=None, user_claims=None):
"""
Create a new access token.
:param identity: The identity of this token, which can be any data that is
json serializable. It can also be a python object, in which
case you can use the
:meth:`~flask_jwt_extended.JWTManager.user_identity_loader`
to define a callback function that will be used to pull a
json serializable identity out of the object.
:param fresh: If this token should be marked as fresh, and can thus access
:func:`~flask_jwt_extended.fresh_jwt_required` endpoints.
Defaults to `False`. This value can also be a
`datetime.timedelta` in which case it will indicate how long
this token will be considered fresh.
:param expires_delta: A `datetime.timedelta` for how long this token should
last before it expires. Set to False to disable
expiration. If this is None, it will use the
'JWT_ACCESS_TOKEN_EXPIRES` config value
(see :ref:`Configuration Options`)
:param user_claims: Optionnal JSON serializable to override user claims.
:return: An encoded access token
"""
jwt_manager = _get_jwt_manager()
return jwt_manager._create_access_token(identity, fresh, expires_delta, user_claims) |
def nhanesi(display=False):
""" A nicely packaged version of NHANES I data with surivival times as labels.
"""
X = pd.read_csv(cache(github_data_url + "NHANESI_subset_X.csv"))
y = pd.read_csv(cache(github_data_url + "NHANESI_subset_y.csv"))["y"]
if display:
X_display = X.copy()
X_display["Sex"] = ["Male" if v == 1 else "Female" for v in X["Sex"]]
return X_display, np.array(y)
else:
return X, np.array(y) | def function[nhanesi, parameter[display]]:
constant[ A nicely packaged version of NHANES I data with surivival times as labels.
]
variable[X] assign[=] call[name[pd].read_csv, parameter[call[name[cache], parameter[binary_operation[name[github_data_url] + constant[NHANESI_subset_X.csv]]]]]]
variable[y] assign[=] call[call[name[pd].read_csv, parameter[call[name[cache], parameter[binary_operation[name[github_data_url] + constant[NHANESI_subset_y.csv]]]]]]][constant[y]]
if name[display] begin[:]
variable[X_display] assign[=] call[name[X].copy, parameter[]]
call[name[X_display]][constant[Sex]] assign[=] <ast.ListComp object at 0x7da1b1f8ce50>
return[tuple[[<ast.Name object at 0x7da1b1f8f190>, <ast.Call object at 0x7da1b1f8ff70>]]] | keyword[def] identifier[nhanesi] ( identifier[display] = keyword[False] ):
literal[string]
identifier[X] = identifier[pd] . identifier[read_csv] ( identifier[cache] ( identifier[github_data_url] + literal[string] ))
identifier[y] = identifier[pd] . identifier[read_csv] ( identifier[cache] ( identifier[github_data_url] + literal[string] ))[ literal[string] ]
keyword[if] identifier[display] :
identifier[X_display] = identifier[X] . identifier[copy] ()
identifier[X_display] [ literal[string] ]=[ literal[string] keyword[if] identifier[v] == literal[int] keyword[else] literal[string] keyword[for] identifier[v] keyword[in] identifier[X] [ literal[string] ]]
keyword[return] identifier[X_display] , identifier[np] . identifier[array] ( identifier[y] )
keyword[else] :
keyword[return] identifier[X] , identifier[np] . identifier[array] ( identifier[y] ) | def nhanesi(display=False):
""" A nicely packaged version of NHANES I data with surivival times as labels.
"""
X = pd.read_csv(cache(github_data_url + 'NHANESI_subset_X.csv'))
y = pd.read_csv(cache(github_data_url + 'NHANESI_subset_y.csv'))['y']
if display:
X_display = X.copy()
X_display['Sex'] = ['Male' if v == 1 else 'Female' for v in X['Sex']]
return (X_display, np.array(y)) # depends on [control=['if'], data=[]]
else:
return (X, np.array(y)) |
def _nested_relations(self, relation):
"""
Get the deeply nested relations for a given top-level relation.
:rtype: dict
"""
nested = {}
for name, constraints in self._eager_load.items():
if self._is_nested(name, relation):
nested[name[len(relation + ".") :]] = constraints
return nested | def function[_nested_relations, parameter[self, relation]]:
constant[
Get the deeply nested relations for a given top-level relation.
:rtype: dict
]
variable[nested] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20c7c8280>, <ast.Name object at 0x7da20c7cb2e0>]]] in starred[call[name[self]._eager_load.items, parameter[]]] begin[:]
if call[name[self]._is_nested, parameter[name[name], name[relation]]] begin[:]
call[name[nested]][call[name[name]][<ast.Slice object at 0x7da20c7c82b0>]] assign[=] name[constraints]
return[name[nested]] | keyword[def] identifier[_nested_relations] ( identifier[self] , identifier[relation] ):
literal[string]
identifier[nested] ={}
keyword[for] identifier[name] , identifier[constraints] keyword[in] identifier[self] . identifier[_eager_load] . identifier[items] ():
keyword[if] identifier[self] . identifier[_is_nested] ( identifier[name] , identifier[relation] ):
identifier[nested] [ identifier[name] [ identifier[len] ( identifier[relation] + literal[string] ):]]= identifier[constraints]
keyword[return] identifier[nested] | def _nested_relations(self, relation):
"""
Get the deeply nested relations for a given top-level relation.
:rtype: dict
"""
nested = {}
for (name, constraints) in self._eager_load.items():
if self._is_nested(name, relation):
nested[name[len(relation + '.'):]] = constraints # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return nested |
def ridgeplot(self, mult, linewidth, alpha, ax):
"""Draw ridgeplot for each plotter.
Parameters
----------
mult : float
How much to multiply height by. Set this to greater than 1 to have some overlap.
linewidth : float
Width of line on border of ridges
alpha : float
Transparency of ridges
ax : Axes
Axes to draw on
"""
if alpha is None:
alpha = 1.0
zorder = 0
for plotter in self.plotters.values():
for x, y_min, y_max, color in plotter.ridgeplot(mult):
if alpha == 0:
border = color
else:
border = "k"
ax.plot(x, y_max, "-", linewidth=linewidth, color=border, zorder=zorder)
ax.plot(x, y_min, "-", linewidth=linewidth, color=border, zorder=zorder)
ax.fill_between(x, y_min, y_max, alpha=alpha, color=color, zorder=zorder)
zorder -= 1
return ax | def function[ridgeplot, parameter[self, mult, linewidth, alpha, ax]]:
constant[Draw ridgeplot for each plotter.
Parameters
----------
mult : float
How much to multiply height by. Set this to greater than 1 to have some overlap.
linewidth : float
Width of line on border of ridges
alpha : float
Transparency of ridges
ax : Axes
Axes to draw on
]
if compare[name[alpha] is constant[None]] begin[:]
variable[alpha] assign[=] constant[1.0]
variable[zorder] assign[=] constant[0]
for taget[name[plotter]] in starred[call[name[self].plotters.values, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1bad030>, <ast.Name object at 0x7da1b1bad150>, <ast.Name object at 0x7da1b1bad0f0>, <ast.Name object at 0x7da1b1baf1f0>]]] in starred[call[name[plotter].ridgeplot, parameter[name[mult]]]] begin[:]
if compare[name[alpha] equal[==] constant[0]] begin[:]
variable[border] assign[=] name[color]
call[name[ax].plot, parameter[name[x], name[y_max], constant[-]]]
call[name[ax].plot, parameter[name[x], name[y_min], constant[-]]]
call[name[ax].fill_between, parameter[name[x], name[y_min], name[y_max]]]
<ast.AugAssign object at 0x7da1b1c0dba0>
return[name[ax]] | keyword[def] identifier[ridgeplot] ( identifier[self] , identifier[mult] , identifier[linewidth] , identifier[alpha] , identifier[ax] ):
literal[string]
keyword[if] identifier[alpha] keyword[is] keyword[None] :
identifier[alpha] = literal[int]
identifier[zorder] = literal[int]
keyword[for] identifier[plotter] keyword[in] identifier[self] . identifier[plotters] . identifier[values] ():
keyword[for] identifier[x] , identifier[y_min] , identifier[y_max] , identifier[color] keyword[in] identifier[plotter] . identifier[ridgeplot] ( identifier[mult] ):
keyword[if] identifier[alpha] == literal[int] :
identifier[border] = identifier[color]
keyword[else] :
identifier[border] = literal[string]
identifier[ax] . identifier[plot] ( identifier[x] , identifier[y_max] , literal[string] , identifier[linewidth] = identifier[linewidth] , identifier[color] = identifier[border] , identifier[zorder] = identifier[zorder] )
identifier[ax] . identifier[plot] ( identifier[x] , identifier[y_min] , literal[string] , identifier[linewidth] = identifier[linewidth] , identifier[color] = identifier[border] , identifier[zorder] = identifier[zorder] )
identifier[ax] . identifier[fill_between] ( identifier[x] , identifier[y_min] , identifier[y_max] , identifier[alpha] = identifier[alpha] , identifier[color] = identifier[color] , identifier[zorder] = identifier[zorder] )
identifier[zorder] -= literal[int]
keyword[return] identifier[ax] | def ridgeplot(self, mult, linewidth, alpha, ax):
"""Draw ridgeplot for each plotter.
Parameters
----------
mult : float
How much to multiply height by. Set this to greater than 1 to have some overlap.
linewidth : float
Width of line on border of ridges
alpha : float
Transparency of ridges
ax : Axes
Axes to draw on
"""
if alpha is None:
alpha = 1.0 # depends on [control=['if'], data=['alpha']]
zorder = 0
for plotter in self.plotters.values():
for (x, y_min, y_max, color) in plotter.ridgeplot(mult):
if alpha == 0:
border = color # depends on [control=['if'], data=[]]
else:
border = 'k'
ax.plot(x, y_max, '-', linewidth=linewidth, color=border, zorder=zorder)
ax.plot(x, y_min, '-', linewidth=linewidth, color=border, zorder=zorder)
ax.fill_between(x, y_min, y_max, alpha=alpha, color=color, zorder=zorder)
zorder -= 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['plotter']]
return ax |
def remove(self, metric_producer):
"""Remove a metric producer.
:type metric_producer: :class: 'MetricProducer'
:param metric_producer: The metric producer to remove.
"""
if metric_producer is None:
raise ValueError
try:
with self.mp_lock:
self.metric_producers.remove(metric_producer)
except KeyError:
pass | def function[remove, parameter[self, metric_producer]]:
constant[Remove a metric producer.
:type metric_producer: :class: 'MetricProducer'
:param metric_producer: The metric producer to remove.
]
if compare[name[metric_producer] is constant[None]] begin[:]
<ast.Raise object at 0x7da20cabfeb0>
<ast.Try object at 0x7da20cabd270> | keyword[def] identifier[remove] ( identifier[self] , identifier[metric_producer] ):
literal[string]
keyword[if] identifier[metric_producer] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError]
keyword[try] :
keyword[with] identifier[self] . identifier[mp_lock] :
identifier[self] . identifier[metric_producers] . identifier[remove] ( identifier[metric_producer] )
keyword[except] identifier[KeyError] :
keyword[pass] | def remove(self, metric_producer):
"""Remove a metric producer.
:type metric_producer: :class: 'MetricProducer'
:param metric_producer: The metric producer to remove.
"""
if metric_producer is None:
raise ValueError # depends on [control=['if'], data=[]]
try:
with self.mp_lock:
self.metric_producers.remove(metric_producer) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] |
def rsa_pss_sign(private_key, data, hash_algorithm):
"""
Generates an RSASSA-PSS signature. For the PSS padding the mask gen
algorithm will be mgf1 using the same hash algorithm as the signature. The
salt length with be the length of the hash algorithm, and the trailer field
with be the standard 0xBC byte.
:param private_key:
The PrivateKey to generate the signature with
:param data:
A byte string of the data the signature is for
:param hash_algorithm:
A unicode string of "md5", "sha1", "sha256", "sha384" or "sha512"
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A byte string of the signature
"""
if private_key.algorithm != 'rsa':
raise ValueError('The key specified is not an RSA private key')
return _sign(private_key, data, hash_algorithm, rsa_pss_padding=True) | def function[rsa_pss_sign, parameter[private_key, data, hash_algorithm]]:
constant[
Generates an RSASSA-PSS signature. For the PSS padding the mask gen
algorithm will be mgf1 using the same hash algorithm as the signature. The
salt length with be the length of the hash algorithm, and the trailer field
with be the standard 0xBC byte.
:param private_key:
The PrivateKey to generate the signature with
:param data:
A byte string of the data the signature is for
:param hash_algorithm:
A unicode string of "md5", "sha1", "sha256", "sha384" or "sha512"
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A byte string of the signature
]
if compare[name[private_key].algorithm not_equal[!=] constant[rsa]] begin[:]
<ast.Raise object at 0x7da1b0030550>
return[call[name[_sign], parameter[name[private_key], name[data], name[hash_algorithm]]]] | keyword[def] identifier[rsa_pss_sign] ( identifier[private_key] , identifier[data] , identifier[hash_algorithm] ):
literal[string]
keyword[if] identifier[private_key] . identifier[algorithm] != literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[_sign] ( identifier[private_key] , identifier[data] , identifier[hash_algorithm] , identifier[rsa_pss_padding] = keyword[True] ) | def rsa_pss_sign(private_key, data, hash_algorithm):
"""
Generates an RSASSA-PSS signature. For the PSS padding the mask gen
algorithm will be mgf1 using the same hash algorithm as the signature. The
salt length with be the length of the hash algorithm, and the trailer field
with be the standard 0xBC byte.
:param private_key:
The PrivateKey to generate the signature with
:param data:
A byte string of the data the signature is for
:param hash_algorithm:
A unicode string of "md5", "sha1", "sha256", "sha384" or "sha512"
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A byte string of the signature
"""
if private_key.algorithm != 'rsa':
raise ValueError('The key specified is not an RSA private key') # depends on [control=['if'], data=[]]
return _sign(private_key, data, hash_algorithm, rsa_pss_padding=True) |
def children(self, val: list):
""" Sets children
:param val: List of citation children
"""
final_value = []
if val is not None:
for citation in val:
if citation is None:
continue
elif not isinstance(citation, (BaseCitation, type(self))):
raise TypeError("Citation children should be Citation")
else:
if isinstance(self, BaseCitation):
citation.root = self.root
else:
citation.root = self
final_value.append(citation)
self._children = final_value | def function[children, parameter[self, val]]:
constant[ Sets children
:param val: List of citation children
]
variable[final_value] assign[=] list[[]]
if compare[name[val] is_not constant[None]] begin[:]
for taget[name[citation]] in starred[name[val]] begin[:]
if compare[name[citation] is constant[None]] begin[:]
continue
name[self]._children assign[=] name[final_value] | keyword[def] identifier[children] ( identifier[self] , identifier[val] : identifier[list] ):
literal[string]
identifier[final_value] =[]
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[citation] keyword[in] identifier[val] :
keyword[if] identifier[citation] keyword[is] keyword[None] :
keyword[continue]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[citation] ,( identifier[BaseCitation] , identifier[type] ( identifier[self] ))):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[self] , identifier[BaseCitation] ):
identifier[citation] . identifier[root] = identifier[self] . identifier[root]
keyword[else] :
identifier[citation] . identifier[root] = identifier[self]
identifier[final_value] . identifier[append] ( identifier[citation] )
identifier[self] . identifier[_children] = identifier[final_value] | def children(self, val: list):
""" Sets children
:param val: List of citation children
"""
final_value = []
if val is not None:
for citation in val:
if citation is None:
continue # depends on [control=['if'], data=[]]
elif not isinstance(citation, (BaseCitation, type(self))):
raise TypeError('Citation children should be Citation') # depends on [control=['if'], data=[]]
else:
if isinstance(self, BaseCitation):
citation.root = self.root # depends on [control=['if'], data=[]]
else:
citation.root = self
final_value.append(citation) # depends on [control=['for'], data=['citation']] # depends on [control=['if'], data=['val']]
self._children = final_value |
def projection(pnormal1, ppoint, tpoint):
"""Calculates the centroid from a 3D point cloud and returns the coordinates
:param pnormal1: normal of plane
:param ppoint: coordinates of point in the plane
:param tpoint: coordinates of point to be projected
:returns : coordinates of point orthogonally projected on the plane
"""
# Choose the plane normal pointing to the point to be projected
pnormal2 = [coo*(-1) for coo in pnormal1]
d1 = self.euclidean3d(tpoint, pnormal1 + ppoint)
d2 = self.euclidean3d(tpoint, pnormal2 + ppoint)
pnormal = pnormal1 if d1 < d2 else pnormal2
# Calculate the projection of tpoint to the plane
sn = -np.dot(pnormal, self.vector(ppoint, tpoint))
sd = np.dot(pnormal, pnormal)
sb = sn / sd
return [c1 + c2 for c1, c2 in zip(tpoint, [sb * pn for pn in pnormal])] | def function[projection, parameter[pnormal1, ppoint, tpoint]]:
constant[Calculates the centroid from a 3D point cloud and returns the coordinates
:param pnormal1: normal of plane
:param ppoint: coordinates of point in the plane
:param tpoint: coordinates of point to be projected
:returns : coordinates of point orthogonally projected on the plane
]
variable[pnormal2] assign[=] <ast.ListComp object at 0x7da204963df0>
variable[d1] assign[=] call[name[self].euclidean3d, parameter[name[tpoint], binary_operation[name[pnormal1] + name[ppoint]]]]
variable[d2] assign[=] call[name[self].euclidean3d, parameter[name[tpoint], binary_operation[name[pnormal2] + name[ppoint]]]]
variable[pnormal] assign[=] <ast.IfExp object at 0x7da18dc04a00>
variable[sn] assign[=] <ast.UnaryOp object at 0x7da18dc07850>
variable[sd] assign[=] call[name[np].dot, parameter[name[pnormal], name[pnormal]]]
variable[sb] assign[=] binary_operation[name[sn] / name[sd]]
return[<ast.ListComp object at 0x7da18dc068f0>] | keyword[def] identifier[projection] ( identifier[pnormal1] , identifier[ppoint] , identifier[tpoint] ):
literal[string]
identifier[pnormal2] =[ identifier[coo] *(- literal[int] ) keyword[for] identifier[coo] keyword[in] identifier[pnormal1] ]
identifier[d1] = identifier[self] . identifier[euclidean3d] ( identifier[tpoint] , identifier[pnormal1] + identifier[ppoint] )
identifier[d2] = identifier[self] . identifier[euclidean3d] ( identifier[tpoint] , identifier[pnormal2] + identifier[ppoint] )
identifier[pnormal] = identifier[pnormal1] keyword[if] identifier[d1] < identifier[d2] keyword[else] identifier[pnormal2]
identifier[sn] =- identifier[np] . identifier[dot] ( identifier[pnormal] , identifier[self] . identifier[vector] ( identifier[ppoint] , identifier[tpoint] ))
identifier[sd] = identifier[np] . identifier[dot] ( identifier[pnormal] , identifier[pnormal] )
identifier[sb] = identifier[sn] / identifier[sd]
keyword[return] [ identifier[c1] + identifier[c2] keyword[for] identifier[c1] , identifier[c2] keyword[in] identifier[zip] ( identifier[tpoint] ,[ identifier[sb] * identifier[pn] keyword[for] identifier[pn] keyword[in] identifier[pnormal] ])] | def projection(pnormal1, ppoint, tpoint):
"""Calculates the centroid from a 3D point cloud and returns the coordinates
:param pnormal1: normal of plane
:param ppoint: coordinates of point in the plane
:param tpoint: coordinates of point to be projected
:returns : coordinates of point orthogonally projected on the plane
"""
# Choose the plane normal pointing to the point to be projected
pnormal2 = [coo * -1 for coo in pnormal1]
d1 = self.euclidean3d(tpoint, pnormal1 + ppoint)
d2 = self.euclidean3d(tpoint, pnormal2 + ppoint)
pnormal = pnormal1 if d1 < d2 else pnormal2
# Calculate the projection of tpoint to the plane
sn = -np.dot(pnormal, self.vector(ppoint, tpoint))
sd = np.dot(pnormal, pnormal)
sb = sn / sd
return [c1 + c2 for (c1, c2) in zip(tpoint, [sb * pn for pn in pnormal])] |
def property_derivative_T(self, T, P, zs, ws, order=1):
r'''Method to calculate a derivative of a mixture property with respect
to temperature at constant pressure and composition,
of a given order. Methods found valid by `select_valid_methods` are
attempted until a method succeeds. If no methods are valid and succeed,
None is returned.
Calls `calculate_derivative_T` internally to perform the actual
calculation.
.. math::
\text{derivative} = \frac{d (\text{property})}{d T}|_{P, z}
Parameters
----------
T : float
Temperature at which to calculate the derivative, [K]
P : float
Pressure at which to calculate the derivative, [Pa]
zs : list[float]
Mole fractions of all species in the mixture, [-]
ws : list[float]
Weight fractions of all species in the mixture, [-]
order : int
Order of the derivative, >= 1
Returns
-------
d_prop_d_T_at_P : float
Calculated derivative property, [`units/K^order`]
'''
sorted_valid_methods = self.select_valid_methods(T, P, zs, ws)
for method in sorted_valid_methods:
try:
return self.calculate_derivative_T(T, P, zs, ws, method, order)
except:
pass
return None | def function[property_derivative_T, parameter[self, T, P, zs, ws, order]]:
constant[Method to calculate a derivative of a mixture property with respect
to temperature at constant pressure and composition,
of a given order. Methods found valid by `select_valid_methods` are
attempted until a method succeeds. If no methods are valid and succeed,
None is returned.
Calls `calculate_derivative_T` internally to perform the actual
calculation.
.. math::
\text{derivative} = \frac{d (\text{property})}{d T}|_{P, z}
Parameters
----------
T : float
Temperature at which to calculate the derivative, [K]
P : float
Pressure at which to calculate the derivative, [Pa]
zs : list[float]
Mole fractions of all species in the mixture, [-]
ws : list[float]
Weight fractions of all species in the mixture, [-]
order : int
Order of the derivative, >= 1
Returns
-------
d_prop_d_T_at_P : float
Calculated derivative property, [`units/K^order`]
]
variable[sorted_valid_methods] assign[=] call[name[self].select_valid_methods, parameter[name[T], name[P], name[zs], name[ws]]]
for taget[name[method]] in starred[name[sorted_valid_methods]] begin[:]
<ast.Try object at 0x7da204564e80>
return[constant[None]] | keyword[def] identifier[property_derivative_T] ( identifier[self] , identifier[T] , identifier[P] , identifier[zs] , identifier[ws] , identifier[order] = literal[int] ):
literal[string]
identifier[sorted_valid_methods] = identifier[self] . identifier[select_valid_methods] ( identifier[T] , identifier[P] , identifier[zs] , identifier[ws] )
keyword[for] identifier[method] keyword[in] identifier[sorted_valid_methods] :
keyword[try] :
keyword[return] identifier[self] . identifier[calculate_derivative_T] ( identifier[T] , identifier[P] , identifier[zs] , identifier[ws] , identifier[method] , identifier[order] )
keyword[except] :
keyword[pass]
keyword[return] keyword[None] | def property_derivative_T(self, T, P, zs, ws, order=1):
"""Method to calculate a derivative of a mixture property with respect
to temperature at constant pressure and composition,
of a given order. Methods found valid by `select_valid_methods` are
attempted until a method succeeds. If no methods are valid and succeed,
None is returned.
Calls `calculate_derivative_T` internally to perform the actual
calculation.
.. math::
\\text{derivative} = \\frac{d (\\text{property})}{d T}|_{P, z}
Parameters
----------
T : float
Temperature at which to calculate the derivative, [K]
P : float
Pressure at which to calculate the derivative, [Pa]
zs : list[float]
Mole fractions of all species in the mixture, [-]
ws : list[float]
Weight fractions of all species in the mixture, [-]
order : int
Order of the derivative, >= 1
Returns
-------
d_prop_d_T_at_P : float
Calculated derivative property, [`units/K^order`]
"""
sorted_valid_methods = self.select_valid_methods(T, P, zs, ws)
for method in sorted_valid_methods:
try:
return self.calculate_derivative_T(T, P, zs, ws, method, order) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['method']]
return None |
def tbw(self, delegate_address, blacklist=None, share_fees=False, compound_interest=False):
"""This function doesn't work yet. Instead use legacy.trueshare() for a functional tbw script"""
if not blacklist:
blacklist = []
delegate_public_key = self.account_details(address=delegate_address)['public_key']
height_at_calculation = self.node_height_details()['height']
# string format of the rawasset
minvote = '{{"votes":["-{0}"]}}'.format(delegate_public_key)
plusvote = '{{"votes":["+{0}"]}}'.format(delegate_public_key)
events = self.get_events_vote_cluster(delegate_address)
votes = self.get_historic_voters(delegate_address)
blocks = self.get_blocks(delegate_address)
# create a map of voters
voter_dict = {}
for voter in votes:
voter_dict.update({voter: {
'balance': 0.0,
'status': False,
'last_payout': votes[voter]['height'],
'share': 0.0,
'vote_height': votes[voter]['height'],
'blocks_forged': []}
})
for blacklisted_address in blacklist:
voter_dict.pop(blacklisted_address, None)
last_payout = self.get_last_out_transactions(delegate_address)
# not all voters have had a payout, thus a KeyError is thrown
for payout in last_payout:
try:
voter_dict[payout]['last_payout'] = last_payout[payout]['height']
except KeyError:
pass
# the change in the previous state of the voter_dict. This is added to the voterdict if
# no state change occurs in the blockchain.
delta_state = {}
no_state_change = False
block_keys = sorted(list(blocks.keys()))
block_nr = 0
try:
for id in events:
# calculating poolbalances and updating shares
if events[id]['height'] > blocks[block_keys[block_nr]]['height']:
# if the state is the same for the votepool, the previous calculation can be reused.
block_nr += 1
if no_state_change:
for x in delta_state:
voter_dict[x]['share'] += delta_state[x]
continue
# update pool balances
poolbalance = 0
delta_state = {}
for i in voter_dict:
# here we update the poolbalance
if compound_interest:
balance = voter_dict[i]['balance'] + voter_dict[i]['share']
else:
balance = voter_dict[i]['balance']
if voter_dict[i]['status']:
# if not voter_dict[i]['balance'] < 0:
poolbalance += balance
# else:
# raise exceptions.NegativeBalanceError('balance lower than zero for: {0}. balance: {1}'.format(i, voter_dict[i]['balance']))
# here we calculate the share per voter
for i in voter_dict:
if compound_interest:
balance = voter_dict[i]['balance'] + voter_dict[i]['share']
else:
balance = voter_dict[i]['balance']
if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_keys[block_nr]]['height']:
if share_fees:
share = (balance / poolbalance) * (blocks[block_keys[block_nr]]['reward'] +
blocks[block_keys[block_nr]]['totalFee'])
else:
share = (balance / poolbalance) * blocks[block_keys[block_nr]]['reward']
voter_dict[i]['share'] += share
delta_state.update({i: share})
no_state_change = True
continue
# parsing an event
no_state_change = False
if events[id]['event_type'] == 'transaction':
if events[id]['recipient_id'] == 'Acw2vAVA48TcV8EnoBmZKJdV8bxnW6Y4E9':
print(events[id]['amount'])
# parsing a transaction
if events[id]['event_type'] == 'transaction':
if events[id]['recipient_id'] in voter_dict:
voter_dict[events[id]['recipient_id']]['balance'] += events[id]['amount']
if events[id]['sender_id'] in voter_dict:
voter_dict[events[id]['sender_id']]['balance'] -= (events[id]['amount'] + events[id]['fee'])
if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and plusvote in events[id]['rawasset']:
voter_dict[events[id]['sender_id']]['status'] = True
if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and minvote in events[id]['rawasset']:
voter_dict[events[id]['sender_id']]['status'] = False
# parsing a forged block (if forged by a voter)
if events[id]['event_type'] == 'block':
voter_dict[events[id]['address']]['balance'] += (events[id]['reward'] + events[id]['total_fee'])
# the transaction for loop ends with the final transaction. However more blocks may be forged. This copies
# the final delta share and adds it to the share x the amount of blocks left.
remaining_blocks = len(block_keys) - block_nr - 1
for i in range(remaining_blocks):
for x in delta_state:
voter_dict[x]['share'] += delta_state[x]
# and indexerror indicates that we have ran out of forged blocks, thus the calculation is done (blocks[block_nr]
# throw the error)
except IndexError:
raise
return voter_dict, height_at_calculation | def function[tbw, parameter[self, delegate_address, blacklist, share_fees, compound_interest]]:
constant[This function doesn't work yet. Instead use legacy.trueshare() for a functional tbw script]
if <ast.UnaryOp object at 0x7da20e9553f0> begin[:]
variable[blacklist] assign[=] list[[]]
variable[delegate_public_key] assign[=] call[call[name[self].account_details, parameter[]]][constant[public_key]]
variable[height_at_calculation] assign[=] call[call[name[self].node_height_details, parameter[]]][constant[height]]
variable[minvote] assign[=] call[constant[{{"votes":["-{0}"]}}].format, parameter[name[delegate_public_key]]]
variable[plusvote] assign[=] call[constant[{{"votes":["+{0}"]}}].format, parameter[name[delegate_public_key]]]
variable[events] assign[=] call[name[self].get_events_vote_cluster, parameter[name[delegate_address]]]
variable[votes] assign[=] call[name[self].get_historic_voters, parameter[name[delegate_address]]]
variable[blocks] assign[=] call[name[self].get_blocks, parameter[name[delegate_address]]]
variable[voter_dict] assign[=] dictionary[[], []]
for taget[name[voter]] in starred[name[votes]] begin[:]
call[name[voter_dict].update, parameter[dictionary[[<ast.Name object at 0x7da20e955270>], [<ast.Dict object at 0x7da20e9541f0>]]]]
for taget[name[blacklisted_address]] in starred[name[blacklist]] begin[:]
call[name[voter_dict].pop, parameter[name[blacklisted_address], constant[None]]]
variable[last_payout] assign[=] call[name[self].get_last_out_transactions, parameter[name[delegate_address]]]
for taget[name[payout]] in starred[name[last_payout]] begin[:]
<ast.Try object at 0x7da20e956da0>
variable[delta_state] assign[=] dictionary[[], []]
variable[no_state_change] assign[=] constant[False]
variable[block_keys] assign[=] call[name[sorted], parameter[call[name[list], parameter[call[name[blocks].keys, parameter[]]]]]]
variable[block_nr] assign[=] constant[0]
<ast.Try object at 0x7da20e9559c0>
return[tuple[[<ast.Name object at 0x7da20c6c5f30>, <ast.Name object at 0x7da20c6c4b20>]]] | keyword[def] identifier[tbw] ( identifier[self] , identifier[delegate_address] , identifier[blacklist] = keyword[None] , identifier[share_fees] = keyword[False] , identifier[compound_interest] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[blacklist] :
identifier[blacklist] =[]
identifier[delegate_public_key] = identifier[self] . identifier[account_details] ( identifier[address] = identifier[delegate_address] )[ literal[string] ]
identifier[height_at_calculation] = identifier[self] . identifier[node_height_details] ()[ literal[string] ]
identifier[minvote] = literal[string] . identifier[format] ( identifier[delegate_public_key] )
identifier[plusvote] = literal[string] . identifier[format] ( identifier[delegate_public_key] )
identifier[events] = identifier[self] . identifier[get_events_vote_cluster] ( identifier[delegate_address] )
identifier[votes] = identifier[self] . identifier[get_historic_voters] ( identifier[delegate_address] )
identifier[blocks] = identifier[self] . identifier[get_blocks] ( identifier[delegate_address] )
identifier[voter_dict] ={}
keyword[for] identifier[voter] keyword[in] identifier[votes] :
identifier[voter_dict] . identifier[update] ({ identifier[voter] :{
literal[string] : literal[int] ,
literal[string] : keyword[False] ,
literal[string] : identifier[votes] [ identifier[voter] ][ literal[string] ],
literal[string] : literal[int] ,
literal[string] : identifier[votes] [ identifier[voter] ][ literal[string] ],
literal[string] :[]}
})
keyword[for] identifier[blacklisted_address] keyword[in] identifier[blacklist] :
identifier[voter_dict] . identifier[pop] ( identifier[blacklisted_address] , keyword[None] )
identifier[last_payout] = identifier[self] . identifier[get_last_out_transactions] ( identifier[delegate_address] )
keyword[for] identifier[payout] keyword[in] identifier[last_payout] :
keyword[try] :
identifier[voter_dict] [ identifier[payout] ][ literal[string] ]= identifier[last_payout] [ identifier[payout] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[delta_state] ={}
identifier[no_state_change] = keyword[False]
identifier[block_keys] = identifier[sorted] ( identifier[list] ( identifier[blocks] . identifier[keys] ()))
identifier[block_nr] = literal[int]
keyword[try] :
keyword[for] identifier[id] keyword[in] identifier[events] :
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ]> identifier[blocks] [ identifier[block_keys] [ identifier[block_nr] ]][ literal[string] ]:
identifier[block_nr] += literal[int]
keyword[if] identifier[no_state_change] :
keyword[for] identifier[x] keyword[in] identifier[delta_state] :
identifier[voter_dict] [ identifier[x] ][ literal[string] ]+= identifier[delta_state] [ identifier[x] ]
keyword[continue]
identifier[poolbalance] = literal[int]
identifier[delta_state] ={}
keyword[for] identifier[i] keyword[in] identifier[voter_dict] :
keyword[if] identifier[compound_interest] :
identifier[balance] = identifier[voter_dict] [ identifier[i] ][ literal[string] ]+ identifier[voter_dict] [ identifier[i] ][ literal[string] ]
keyword[else] :
identifier[balance] = identifier[voter_dict] [ identifier[i] ][ literal[string] ]
keyword[if] identifier[voter_dict] [ identifier[i] ][ literal[string] ]:
identifier[poolbalance] += identifier[balance]
keyword[for] identifier[i] keyword[in] identifier[voter_dict] :
keyword[if] identifier[compound_interest] :
identifier[balance] = identifier[voter_dict] [ identifier[i] ][ literal[string] ]+ identifier[voter_dict] [ identifier[i] ][ literal[string] ]
keyword[else] :
identifier[balance] = identifier[voter_dict] [ identifier[i] ][ literal[string] ]
keyword[if] identifier[voter_dict] [ identifier[i] ][ literal[string] ] keyword[and] identifier[voter_dict] [ identifier[i] ][ literal[string] ]< identifier[blocks] [ identifier[block_keys] [ identifier[block_nr] ]][ literal[string] ]:
keyword[if] identifier[share_fees] :
identifier[share] =( identifier[balance] / identifier[poolbalance] )*( identifier[blocks] [ identifier[block_keys] [ identifier[block_nr] ]][ literal[string] ]+
identifier[blocks] [ identifier[block_keys] [ identifier[block_nr] ]][ literal[string] ])
keyword[else] :
identifier[share] =( identifier[balance] / identifier[poolbalance] )* identifier[blocks] [ identifier[block_keys] [ identifier[block_nr] ]][ literal[string] ]
identifier[voter_dict] [ identifier[i] ][ literal[string] ]+= identifier[share]
identifier[delta_state] . identifier[update] ({ identifier[i] : identifier[share] })
identifier[no_state_change] = keyword[True]
keyword[continue]
identifier[no_state_change] = keyword[False]
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ]== literal[string] :
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ]== literal[string] :
identifier[print] ( identifier[events] [ identifier[id] ][ literal[string] ])
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ]== literal[string] :
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ] keyword[in] identifier[voter_dict] :
identifier[voter_dict] [ identifier[events] [ identifier[id] ][ literal[string] ]][ literal[string] ]+= identifier[events] [ identifier[id] ][ literal[string] ]
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ] keyword[in] identifier[voter_dict] :
identifier[voter_dict] [ identifier[events] [ identifier[id] ][ literal[string] ]][ literal[string] ]-=( identifier[events] [ identifier[id] ][ literal[string] ]+ identifier[events] [ identifier[id] ][ literal[string] ])
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ] keyword[in] identifier[voter_dict] keyword[and] identifier[events] [ identifier[id] ][ literal[string] ]== literal[int] keyword[and] identifier[plusvote] keyword[in] identifier[events] [ identifier[id] ][ literal[string] ]:
identifier[voter_dict] [ identifier[events] [ identifier[id] ][ literal[string] ]][ literal[string] ]= keyword[True]
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ] keyword[in] identifier[voter_dict] keyword[and] identifier[events] [ identifier[id] ][ literal[string] ]== literal[int] keyword[and] identifier[minvote] keyword[in] identifier[events] [ identifier[id] ][ literal[string] ]:
identifier[voter_dict] [ identifier[events] [ identifier[id] ][ literal[string] ]][ literal[string] ]= keyword[False]
keyword[if] identifier[events] [ identifier[id] ][ literal[string] ]== literal[string] :
identifier[voter_dict] [ identifier[events] [ identifier[id] ][ literal[string] ]][ literal[string] ]+=( identifier[events] [ identifier[id] ][ literal[string] ]+ identifier[events] [ identifier[id] ][ literal[string] ])
identifier[remaining_blocks] = identifier[len] ( identifier[block_keys] )- identifier[block_nr] - literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[remaining_blocks] ):
keyword[for] identifier[x] keyword[in] identifier[delta_state] :
identifier[voter_dict] [ identifier[x] ][ literal[string] ]+= identifier[delta_state] [ identifier[x] ]
keyword[except] identifier[IndexError] :
keyword[raise]
keyword[return] identifier[voter_dict] , identifier[height_at_calculation] | def tbw(self, delegate_address, blacklist=None, share_fees=False, compound_interest=False):
"""This function doesn't work yet. Instead use legacy.trueshare() for a functional tbw script"""
if not blacklist:
blacklist = [] # depends on [control=['if'], data=[]]
delegate_public_key = self.account_details(address=delegate_address)['public_key']
height_at_calculation = self.node_height_details()['height']
# string format of the rawasset
minvote = '{{"votes":["-{0}"]}}'.format(delegate_public_key)
plusvote = '{{"votes":["+{0}"]}}'.format(delegate_public_key)
events = self.get_events_vote_cluster(delegate_address)
votes = self.get_historic_voters(delegate_address)
blocks = self.get_blocks(delegate_address)
# create a map of voters
voter_dict = {}
for voter in votes:
voter_dict.update({voter: {'balance': 0.0, 'status': False, 'last_payout': votes[voter]['height'], 'share': 0.0, 'vote_height': votes[voter]['height'], 'blocks_forged': []}}) # depends on [control=['for'], data=['voter']]
for blacklisted_address in blacklist:
voter_dict.pop(blacklisted_address, None) # depends on [control=['for'], data=['blacklisted_address']]
last_payout = self.get_last_out_transactions(delegate_address)
# not all voters have had a payout, thus a KeyError is thrown
for payout in last_payout:
try:
voter_dict[payout]['last_payout'] = last_payout[payout]['height'] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['payout']]
# the change in the previous state of the voter_dict. This is added to the voterdict if
# no state change occurs in the blockchain.
delta_state = {}
no_state_change = False
block_keys = sorted(list(blocks.keys()))
block_nr = 0
try:
for id in events:
# calculating poolbalances and updating shares
if events[id]['height'] > blocks[block_keys[block_nr]]['height']:
# if the state is the same for the votepool, the previous calculation can be reused.
block_nr += 1
if no_state_change:
for x in delta_state:
voter_dict[x]['share'] += delta_state[x] # depends on [control=['for'], data=['x']]
continue # depends on [control=['if'], data=[]]
# update pool balances
poolbalance = 0
delta_state = {}
for i in voter_dict:
# here we update the poolbalance
if compound_interest:
balance = voter_dict[i]['balance'] + voter_dict[i]['share'] # depends on [control=['if'], data=[]]
else:
balance = voter_dict[i]['balance']
if voter_dict[i]['status']:
# if not voter_dict[i]['balance'] < 0:
poolbalance += balance # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# else:
# raise exceptions.NegativeBalanceError('balance lower than zero for: {0}. balance: {1}'.format(i, voter_dict[i]['balance']))
# here we calculate the share per voter
for i in voter_dict:
if compound_interest:
balance = voter_dict[i]['balance'] + voter_dict[i]['share'] # depends on [control=['if'], data=[]]
else:
balance = voter_dict[i]['balance']
if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_keys[block_nr]]['height']:
if share_fees:
share = balance / poolbalance * (blocks[block_keys[block_nr]]['reward'] + blocks[block_keys[block_nr]]['totalFee']) # depends on [control=['if'], data=[]]
else:
share = balance / poolbalance * blocks[block_keys[block_nr]]['reward']
voter_dict[i]['share'] += share
delta_state.update({i: share}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
no_state_change = True
continue # depends on [control=['if'], data=[]]
# parsing an event
no_state_change = False
if events[id]['event_type'] == 'transaction':
if events[id]['recipient_id'] == 'Acw2vAVA48TcV8EnoBmZKJdV8bxnW6Y4E9':
print(events[id]['amount']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# parsing a transaction
if events[id]['event_type'] == 'transaction':
if events[id]['recipient_id'] in voter_dict:
voter_dict[events[id]['recipient_id']]['balance'] += events[id]['amount'] # depends on [control=['if'], data=['voter_dict']]
if events[id]['sender_id'] in voter_dict:
voter_dict[events[id]['sender_id']]['balance'] -= events[id]['amount'] + events[id]['fee'] # depends on [control=['if'], data=['voter_dict']]
if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and (plusvote in events[id]['rawasset']):
voter_dict[events[id]['sender_id']]['status'] = True # depends on [control=['if'], data=[]]
if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and (minvote in events[id]['rawasset']):
voter_dict[events[id]['sender_id']]['status'] = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# parsing a forged block (if forged by a voter)
if events[id]['event_type'] == 'block':
voter_dict[events[id]['address']]['balance'] += events[id]['reward'] + events[id]['total_fee'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['id']]
# the transaction for loop ends with the final transaction. However more blocks may be forged. This copies
# the final delta share and adds it to the share x the amount of blocks left.
remaining_blocks = len(block_keys) - block_nr - 1
for i in range(remaining_blocks):
for x in delta_state:
voter_dict[x]['share'] += delta_state[x] # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
# and indexerror indicates that we have ran out of forged blocks, thus the calculation is done (blocks[block_nr]
# throw the error)
except IndexError:
raise # depends on [control=['except'], data=[]]
return (voter_dict, height_at_calculation) |
def get_site_amplification(self, C, sites):
"""
Returns the site amplification term
"""
# Gets delta normalised z1
dz1 = sites.z1pt0 - np.exp(self._get_lnmu_z1(sites.vs30))
f_s = C["c5"] * dz1
# Calculates site amplification term
f_s[dz1 > self.CONSTANTS["dz1ref"]] = (C["c5"] *
self.CONSTANTS["dz1ref"])
idx = sites.vs30 > self.CONSTANTS["v1"]
f_s[idx] += (C["c4"] * np.log(self.CONSTANTS["v1"] / C["vref"]))
idx = np.logical_not(idx)
f_s[idx] += (C["c4"] * np.log(sites.vs30[idx] / C["vref"]))
return f_s | def function[get_site_amplification, parameter[self, C, sites]]:
constant[
Returns the site amplification term
]
variable[dz1] assign[=] binary_operation[name[sites].z1pt0 - call[name[np].exp, parameter[call[name[self]._get_lnmu_z1, parameter[name[sites].vs30]]]]]
variable[f_s] assign[=] binary_operation[call[name[C]][constant[c5]] * name[dz1]]
call[name[f_s]][compare[name[dz1] greater[>] call[name[self].CONSTANTS][constant[dz1ref]]]] assign[=] binary_operation[call[name[C]][constant[c5]] * call[name[self].CONSTANTS][constant[dz1ref]]]
variable[idx] assign[=] compare[name[sites].vs30 greater[>] call[name[self].CONSTANTS][constant[v1]]]
<ast.AugAssign object at 0x7da1b06517b0>
variable[idx] assign[=] call[name[np].logical_not, parameter[name[idx]]]
<ast.AugAssign object at 0x7da1b06534c0>
return[name[f_s]] | keyword[def] identifier[get_site_amplification] ( identifier[self] , identifier[C] , identifier[sites] ):
literal[string]
identifier[dz1] = identifier[sites] . identifier[z1pt0] - identifier[np] . identifier[exp] ( identifier[self] . identifier[_get_lnmu_z1] ( identifier[sites] . identifier[vs30] ))
identifier[f_s] = identifier[C] [ literal[string] ]* identifier[dz1]
identifier[f_s] [ identifier[dz1] > identifier[self] . identifier[CONSTANTS] [ literal[string] ]]=( identifier[C] [ literal[string] ]*
identifier[self] . identifier[CONSTANTS] [ literal[string] ])
identifier[idx] = identifier[sites] . identifier[vs30] > identifier[self] . identifier[CONSTANTS] [ literal[string] ]
identifier[f_s] [ identifier[idx] ]+=( identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[self] . identifier[CONSTANTS] [ literal[string] ]/ identifier[C] [ literal[string] ]))
identifier[idx] = identifier[np] . identifier[logical_not] ( identifier[idx] )
identifier[f_s] [ identifier[idx] ]+=( identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[sites] . identifier[vs30] [ identifier[idx] ]/ identifier[C] [ literal[string] ]))
keyword[return] identifier[f_s] | def get_site_amplification(self, C, sites):
"""
Returns the site amplification term
"""
# Gets delta normalised z1
dz1 = sites.z1pt0 - np.exp(self._get_lnmu_z1(sites.vs30))
f_s = C['c5'] * dz1
# Calculates site amplification term
f_s[dz1 > self.CONSTANTS['dz1ref']] = C['c5'] * self.CONSTANTS['dz1ref']
idx = sites.vs30 > self.CONSTANTS['v1']
f_s[idx] += C['c4'] * np.log(self.CONSTANTS['v1'] / C['vref'])
idx = np.logical_not(idx)
f_s[idx] += C['c4'] * np.log(sites.vs30[idx] / C['vref'])
return f_s |
async def add(self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None):
"""
Stores the value in the given key with ttl if specified. Raises an error if the
key already exists.
:param key: str
:param value: obj
:param ttl: int the expiration time in seconds. Due to memcached
restrictions if you want compatibility use int. In case you
need miliseconds, redis and memory support float ttls
:param dumps_fn: callable alternative to use as dumps function
:param namespace: str alternative namespace to use
:param timeout: int or float in seconds specifying maximum timeout
for the operations to last
:returns: True if key is inserted
:raises:
- ValueError if key already exists
- :class:`asyncio.TimeoutError` if it lasts more than self.timeout
"""
start = time.monotonic()
dumps = dumps_fn or self._serializer.dumps
ns_key = self.build_key(key, namespace=namespace)
await self._add(ns_key, dumps(value), ttl=self._get_ttl(ttl), _conn=_conn)
logger.debug("ADD %s %s (%.4f)s", ns_key, True, time.monotonic() - start)
return True | <ast.AsyncFunctionDef object at 0x7da18c4cd4b0> | keyword[async] keyword[def] identifier[add] ( identifier[self] , identifier[key] , identifier[value] , identifier[ttl] = identifier[SENTINEL] , identifier[dumps_fn] = keyword[None] , identifier[namespace] = keyword[None] , identifier[_conn] = keyword[None] ):
literal[string]
identifier[start] = identifier[time] . identifier[monotonic] ()
identifier[dumps] = identifier[dumps_fn] keyword[or] identifier[self] . identifier[_serializer] . identifier[dumps]
identifier[ns_key] = identifier[self] . identifier[build_key] ( identifier[key] , identifier[namespace] = identifier[namespace] )
keyword[await] identifier[self] . identifier[_add] ( identifier[ns_key] , identifier[dumps] ( identifier[value] ), identifier[ttl] = identifier[self] . identifier[_get_ttl] ( identifier[ttl] ), identifier[_conn] = identifier[_conn] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[ns_key] , keyword[True] , identifier[time] . identifier[monotonic] ()- identifier[start] )
keyword[return] keyword[True] | async def add(self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None):
"""
Stores the value in the given key with ttl if specified. Raises an error if the
key already exists.
:param key: str
:param value: obj
:param ttl: int the expiration time in seconds. Due to memcached
restrictions if you want compatibility use int. In case you
need miliseconds, redis and memory support float ttls
:param dumps_fn: callable alternative to use as dumps function
:param namespace: str alternative namespace to use
:param timeout: int or float in seconds specifying maximum timeout
for the operations to last
:returns: True if key is inserted
:raises:
- ValueError if key already exists
- :class:`asyncio.TimeoutError` if it lasts more than self.timeout
"""
start = time.monotonic()
dumps = dumps_fn or self._serializer.dumps
ns_key = self.build_key(key, namespace=namespace)
await self._add(ns_key, dumps(value), ttl=self._get_ttl(ttl), _conn=_conn)
logger.debug('ADD %s %s (%.4f)s', ns_key, True, time.monotonic() - start)
return True |
def parseArgsPy26():
""" Argument parser for Python 2.6 """
from gsmtermlib.posoptparse import PosOptionParser, Option
parser = PosOptionParser(description='Simple script for sending SMS messages')
parser.add_option('-i', '--port', metavar='PORT', help='port to which the GSM modem is connected; a number or a device name.')
parser.add_option('-b', '--baud', metavar='BAUDRATE', default=115200, help='set baud rate')
parser.add_option('-p', '--pin', metavar='PIN', default=None, help='SIM card PIN')
parser.add_option('-d', '--deliver', action='store_true', help='wait for SMS delivery report')
parser.add_positional_argument(Option('--destination', metavar='DESTINATION', help='destination mobile number'))
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Incorrect number of arguments - please specify a DESTINATION to send to, e.g. {0} 012789456'.format(sys.argv[0]))
else:
options.destination = args[0]
return options | def function[parseArgsPy26, parameter[]]:
constant[ Argument parser for Python 2.6 ]
from relative_module[gsmtermlib.posoptparse] import module[PosOptionParser], module[Option]
variable[parser] assign[=] call[name[PosOptionParser], parameter[]]
call[name[parser].add_option, parameter[constant[-i], constant[--port]]]
call[name[parser].add_option, parameter[constant[-b], constant[--baud]]]
call[name[parser].add_option, parameter[constant[-p], constant[--pin]]]
call[name[parser].add_option, parameter[constant[-d], constant[--deliver]]]
call[name[parser].add_positional_argument, parameter[call[name[Option], parameter[constant[--destination]]]]]
<ast.Tuple object at 0x7da1b1529ba0> assign[=] call[name[parser].parse_args, parameter[]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[parser].error, parameter[call[constant[Incorrect number of arguments - please specify a DESTINATION to send to, e.g. {0} 012789456].format, parameter[call[name[sys].argv][constant[0]]]]]] | keyword[def] identifier[parseArgsPy26] ():
literal[string]
keyword[from] identifier[gsmtermlib] . identifier[posoptparse] keyword[import] identifier[PosOptionParser] , identifier[Option]
identifier[parser] = identifier[PosOptionParser] ( identifier[description] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[metavar] = literal[string] , identifier[default] = literal[int] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[metavar] = literal[string] , identifier[default] = keyword[None] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_positional_argument] ( identifier[Option] ( literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] ))
identifier[options] , identifier[args] = identifier[parser] . identifier[parse_args] ()
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[parser] . identifier[error] ( literal[string] . identifier[format] ( identifier[sys] . identifier[argv] [ literal[int] ]))
keyword[else] :
identifier[options] . identifier[destination] = identifier[args] [ literal[int] ]
keyword[return] identifier[options] | def parseArgsPy26():
""" Argument parser for Python 2.6 """
from gsmtermlib.posoptparse import PosOptionParser, Option
parser = PosOptionParser(description='Simple script for sending SMS messages')
parser.add_option('-i', '--port', metavar='PORT', help='port to which the GSM modem is connected; a number or a device name.')
parser.add_option('-b', '--baud', metavar='BAUDRATE', default=115200, help='set baud rate')
parser.add_option('-p', '--pin', metavar='PIN', default=None, help='SIM card PIN')
parser.add_option('-d', '--deliver', action='store_true', help='wait for SMS delivery report')
parser.add_positional_argument(Option('--destination', metavar='DESTINATION', help='destination mobile number'))
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error('Incorrect number of arguments - please specify a DESTINATION to send to, e.g. {0} 012789456'.format(sys.argv[0])) # depends on [control=['if'], data=[]]
else:
options.destination = args[0]
return options |
def send_command(self, *args, **kwargs):
"""
If the ASA is in multi-context mode, then the base_prompt needs to be
updated after each context change.
"""
if len(args) >= 1:
command_string = args[0]
else:
command_string = kwargs["command_string"]
# If changeto in command, look for '#' to determine command is done
if "changeto" in command_string:
if len(args) <= 1:
expect_string = kwargs.get("expect_string", "#")
kwargs["expect_string"] = expect_string
output = super(CiscoAsaSSH, self).send_command(*args, **kwargs)
if "changeto" in command_string:
self.set_base_prompt()
return output | def function[send_command, parameter[self]]:
constant[
If the ASA is in multi-context mode, then the base_prompt needs to be
updated after each context change.
]
if compare[call[name[len], parameter[name[args]]] greater_or_equal[>=] constant[1]] begin[:]
variable[command_string] assign[=] call[name[args]][constant[0]]
if compare[constant[changeto] in name[command_string]] begin[:]
if compare[call[name[len], parameter[name[args]]] less_or_equal[<=] constant[1]] begin[:]
variable[expect_string] assign[=] call[name[kwargs].get, parameter[constant[expect_string], constant[#]]]
call[name[kwargs]][constant[expect_string]] assign[=] name[expect_string]
variable[output] assign[=] call[call[name[super], parameter[name[CiscoAsaSSH], name[self]]].send_command, parameter[<ast.Starred object at 0x7da1b1f96a70>]]
if compare[constant[changeto] in name[command_string]] begin[:]
call[name[self].set_base_prompt, parameter[]]
return[name[output]] | keyword[def] identifier[send_command] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[len] ( identifier[args] )>= literal[int] :
identifier[command_string] = identifier[args] [ literal[int] ]
keyword[else] :
identifier[command_string] = identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[command_string] :
keyword[if] identifier[len] ( identifier[args] )<= literal[int] :
identifier[expect_string] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[kwargs] [ literal[string] ]= identifier[expect_string]
identifier[output] = identifier[super] ( identifier[CiscoAsaSSH] , identifier[self] ). identifier[send_command] (* identifier[args] ,** identifier[kwargs] )
keyword[if] literal[string] keyword[in] identifier[command_string] :
identifier[self] . identifier[set_base_prompt] ()
keyword[return] identifier[output] | def send_command(self, *args, **kwargs):
"""
If the ASA is in multi-context mode, then the base_prompt needs to be
updated after each context change.
"""
if len(args) >= 1:
command_string = args[0] # depends on [control=['if'], data=[]]
else:
command_string = kwargs['command_string']
# If changeto in command, look for '#' to determine command is done
if 'changeto' in command_string:
if len(args) <= 1:
expect_string = kwargs.get('expect_string', '#')
kwargs['expect_string'] = expect_string # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
output = super(CiscoAsaSSH, self).send_command(*args, **kwargs)
if 'changeto' in command_string:
self.set_base_prompt() # depends on [control=['if'], data=[]]
return output |
def plot_predict(self, h=5, past_values=20, intervals=True,**kwargs):
""" Plots forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show 95% prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
- Error bars, forecasted_values, plot_values, plot_index
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize',(10,7))
if self.latent_variables.estimated is False:
raise Exception("No latent variables estimated!")
else:
predictions, variance, lower, upper = self._construct_predict(self.latent_variables.get_z_values(),h)
full_predictions = np.append(self.data,predictions)
full_lower = np.append(self.data,lower)
full_upper = np.append(self.data,upper)
date_index = self.shift_dates(h)
# Plot values (how far to look back)
plot_values = full_predictions[-h-past_values:]*self._norm_std + self._norm_mean
plot_index = date_index[-h-past_values:]
# Lower and upper intervals
lower = np.append(full_predictions[-h-1],lower)
upper = np.append(full_predictions[-h-1],upper)
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h-1:],
lower*self._norm_std + self._norm_mean,
upper*self._norm_std + self._norm_mean,
alpha=0.2)
plt.plot(plot_index,plot_values)
plt.title("Forecast for " + self.data_name)
plt.xlabel("Time")
plt.ylabel(self.data_name)
plt.show() | def function[plot_predict, parameter[self, h, past_values, intervals]]:
constant[ Plots forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show 95% prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
- Error bars, forecasted_values, plot_values, plot_index
]
import module[matplotlib.pyplot] as alias[plt]
import module[seaborn] as alias[sns]
variable[figsize] assign[=] call[name[kwargs].get, parameter[constant[figsize], tuple[[<ast.Constant object at 0x7da1b194d960>, <ast.Constant object at 0x7da1b194e470>]]]]
if compare[name[self].latent_variables.estimated is constant[False]] begin[:]
<ast.Raise object at 0x7da1b194c2b0> | keyword[def] identifier[plot_predict] ( identifier[self] , identifier[h] = literal[int] , identifier[past_values] = literal[int] , identifier[intervals] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[import] identifier[seaborn] keyword[as] identifier[sns]
identifier[figsize] = identifier[kwargs] . identifier[get] ( literal[string] ,( literal[int] , literal[int] ))
keyword[if] identifier[self] . identifier[latent_variables] . identifier[estimated] keyword[is] keyword[False] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[else] :
identifier[predictions] , identifier[variance] , identifier[lower] , identifier[upper] = identifier[self] . identifier[_construct_predict] ( identifier[self] . identifier[latent_variables] . identifier[get_z_values] (), identifier[h] )
identifier[full_predictions] = identifier[np] . identifier[append] ( identifier[self] . identifier[data] , identifier[predictions] )
identifier[full_lower] = identifier[np] . identifier[append] ( identifier[self] . identifier[data] , identifier[lower] )
identifier[full_upper] = identifier[np] . identifier[append] ( identifier[self] . identifier[data] , identifier[upper] )
identifier[date_index] = identifier[self] . identifier[shift_dates] ( identifier[h] )
identifier[plot_values] = identifier[full_predictions] [- identifier[h] - identifier[past_values] :]* identifier[self] . identifier[_norm_std] + identifier[self] . identifier[_norm_mean]
identifier[plot_index] = identifier[date_index] [- identifier[h] - identifier[past_values] :]
identifier[lower] = identifier[np] . identifier[append] ( identifier[full_predictions] [- identifier[h] - literal[int] ], identifier[lower] )
identifier[upper] = identifier[np] . identifier[append] ( identifier[full_predictions] [- identifier[h] - literal[int] ], identifier[upper] )
identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
keyword[if] identifier[intervals] == keyword[True] :
identifier[plt] . identifier[fill_between] ( identifier[date_index] [- identifier[h] - literal[int] :],
identifier[lower] * identifier[self] . identifier[_norm_std] + identifier[self] . identifier[_norm_mean] ,
identifier[upper] * identifier[self] . identifier[_norm_std] + identifier[self] . identifier[_norm_mean] ,
identifier[alpha] = literal[int] )
identifier[plt] . identifier[plot] ( identifier[plot_index] , identifier[plot_values] )
identifier[plt] . identifier[title] ( literal[string] + identifier[self] . identifier[data_name] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( identifier[self] . identifier[data_name] )
identifier[plt] . identifier[show] () | def plot_predict(self, h=5, past_values=20, intervals=True, **kwargs):
""" Plots forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show 95% prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
- Error bars, forecasted_values, plot_values, plot_index
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize', (10, 7))
if self.latent_variables.estimated is False:
raise Exception('No latent variables estimated!') # depends on [control=['if'], data=[]]
else:
(predictions, variance, lower, upper) = self._construct_predict(self.latent_variables.get_z_values(), h)
full_predictions = np.append(self.data, predictions)
full_lower = np.append(self.data, lower)
full_upper = np.append(self.data, upper)
date_index = self.shift_dates(h)
# Plot values (how far to look back)
plot_values = full_predictions[-h - past_values:] * self._norm_std + self._norm_mean
plot_index = date_index[-h - past_values:]
# Lower and upper intervals
lower = np.append(full_predictions[-h - 1], lower)
upper = np.append(full_predictions[-h - 1], upper)
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h - 1:], lower * self._norm_std + self._norm_mean, upper * self._norm_std + self._norm_mean, alpha=0.2) # depends on [control=['if'], data=[]]
plt.plot(plot_index, plot_values)
plt.title('Forecast for ' + self.data_name)
plt.xlabel('Time')
plt.ylabel(self.data_name)
plt.show() |
def subclass_from_type(cls, t):
'''
Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a
(PyTypeObject*), determine the corresponding subclass of PyObjectPtr
to use
Ideally, we would look up the symbols for the global types, but that
isn't working yet:
(gdb) python print gdb.lookup_symbol('PyList_Type')[0].value
Traceback (most recent call last):
File "<string>", line 1, in <module>
NotImplementedError: Symbol type not yet supported in Python scripts.
Error while executing Python code.
For now, we use tp_flags, after doing some string comparisons on the
tp_name for some special-cases that don't seem to be visible through
flags
'''
try:
tp_name = t.field('tp_name').string()
tp_flags = int(t.field('tp_flags'))
except RuntimeError:
# Handle any kind of error e.g. NULL ptrs by simply using the base
# class
return cls
#print 'tp_flags = 0x%08x' % tp_flags
#print 'tp_name = %r' % tp_name
name_map = {'bool': PyBoolObjectPtr,
'classobj': PyClassObjectPtr,
'instance': PyInstanceObjectPtr,
'NoneType': PyNoneStructPtr,
'frame': PyFrameObjectPtr,
'set' : PySetObjectPtr,
'frozenset' : PySetObjectPtr,
'builtin_function_or_method' : PyCFunctionObjectPtr,
}
if tp_name in name_map:
return name_map[tp_name]
if tp_flags & Py_TPFLAGS_HEAPTYPE:
return HeapTypeObjectPtr
if tp_flags & Py_TPFLAGS_INT_SUBCLASS:
return PyIntObjectPtr
if tp_flags & Py_TPFLAGS_LONG_SUBCLASS:
return PyLongObjectPtr
if tp_flags & Py_TPFLAGS_LIST_SUBCLASS:
return PyListObjectPtr
if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS:
return PyTupleObjectPtr
if tp_flags & Py_TPFLAGS_STRING_SUBCLASS:
return PyStringObjectPtr
if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS:
return PyUnicodeObjectPtr
if tp_flags & Py_TPFLAGS_DICT_SUBCLASS:
return PyDictObjectPtr
if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS:
return PyBaseExceptionObjectPtr
#if tp_flags & Py_TPFLAGS_TYPE_SUBCLASS:
# return PyTypeObjectPtr
# Use the base class:
return cls | def function[subclass_from_type, parameter[cls, t]]:
constant[
Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a
(PyTypeObject*), determine the corresponding subclass of PyObjectPtr
to use
Ideally, we would look up the symbols for the global types, but that
isn't working yet:
(gdb) python print gdb.lookup_symbol('PyList_Type')[0].value
Traceback (most recent call last):
File "<string>", line 1, in <module>
NotImplementedError: Symbol type not yet supported in Python scripts.
Error while executing Python code.
For now, we use tp_flags, after doing some string comparisons on the
tp_name for some special-cases that don't seem to be visible through
flags
]
<ast.Try object at 0x7da18eb54940>
variable[name_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b11494b0>, <ast.Constant object at 0x7da1b1148760>, <ast.Constant object at 0x7da1b11483a0>, <ast.Constant object at 0x7da1b1148610>, <ast.Constant object at 0x7da1b114aaa0>, <ast.Constant object at 0x7da1b114b850>, <ast.Constant object at 0x7da1b114ad70>, <ast.Constant object at 0x7da1b114bd30>], [<ast.Name object at 0x7da1b1148190>, <ast.Name object at 0x7da1b114be80>, <ast.Name object at 0x7da1b114bbb0>, <ast.Name object at 0x7da1b114b880>, <ast.Name object at 0x7da1b1148fa0>, <ast.Name object at 0x7da1b114b460>, <ast.Name object at 0x7da1b11497b0>, <ast.Name object at 0x7da1b114ad10>]]
if compare[name[tp_name] in name[name_map]] begin[:]
return[call[name[name_map]][name[tp_name]]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_HEAPTYPE]] begin[:]
return[name[HeapTypeObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_INT_SUBCLASS]] begin[:]
return[name[PyIntObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_LONG_SUBCLASS]] begin[:]
return[name[PyLongObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_LIST_SUBCLASS]] begin[:]
return[name[PyListObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_TUPLE_SUBCLASS]] begin[:]
return[name[PyTupleObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_STRING_SUBCLASS]] begin[:]
return[name[PyStringObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_UNICODE_SUBCLASS]] begin[:]
return[name[PyUnicodeObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_DICT_SUBCLASS]] begin[:]
return[name[PyDictObjectPtr]]
if binary_operation[name[tp_flags] <ast.BitAnd object at 0x7da2590d6b60> name[Py_TPFLAGS_BASE_EXC_SUBCLASS]] begin[:]
return[name[PyBaseExceptionObjectPtr]]
return[name[cls]] | keyword[def] identifier[subclass_from_type] ( identifier[cls] , identifier[t] ):
literal[string]
keyword[try] :
identifier[tp_name] = identifier[t] . identifier[field] ( literal[string] ). identifier[string] ()
identifier[tp_flags] = identifier[int] ( identifier[t] . identifier[field] ( literal[string] ))
keyword[except] identifier[RuntimeError] :
keyword[return] identifier[cls]
identifier[name_map] ={ literal[string] : identifier[PyBoolObjectPtr] ,
literal[string] : identifier[PyClassObjectPtr] ,
literal[string] : identifier[PyInstanceObjectPtr] ,
literal[string] : identifier[PyNoneStructPtr] ,
literal[string] : identifier[PyFrameObjectPtr] ,
literal[string] : identifier[PySetObjectPtr] ,
literal[string] : identifier[PySetObjectPtr] ,
literal[string] : identifier[PyCFunctionObjectPtr] ,
}
keyword[if] identifier[tp_name] keyword[in] identifier[name_map] :
keyword[return] identifier[name_map] [ identifier[tp_name] ]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_HEAPTYPE] :
keyword[return] identifier[HeapTypeObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_INT_SUBCLASS] :
keyword[return] identifier[PyIntObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_LONG_SUBCLASS] :
keyword[return] identifier[PyLongObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_LIST_SUBCLASS] :
keyword[return] identifier[PyListObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_TUPLE_SUBCLASS] :
keyword[return] identifier[PyTupleObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_STRING_SUBCLASS] :
keyword[return] identifier[PyStringObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_UNICODE_SUBCLASS] :
keyword[return] identifier[PyUnicodeObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_DICT_SUBCLASS] :
keyword[return] identifier[PyDictObjectPtr]
keyword[if] identifier[tp_flags] & identifier[Py_TPFLAGS_BASE_EXC_SUBCLASS] :
keyword[return] identifier[PyBaseExceptionObjectPtr]
keyword[return] identifier[cls] | def subclass_from_type(cls, t):
"""
Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a
(PyTypeObject*), determine the corresponding subclass of PyObjectPtr
to use
Ideally, we would look up the symbols for the global types, but that
isn't working yet:
(gdb) python print gdb.lookup_symbol('PyList_Type')[0].value
Traceback (most recent call last):
File "<string>", line 1, in <module>
NotImplementedError: Symbol type not yet supported in Python scripts.
Error while executing Python code.
For now, we use tp_flags, after doing some string comparisons on the
tp_name for some special-cases that don't seem to be visible through
flags
"""
try:
tp_name = t.field('tp_name').string()
tp_flags = int(t.field('tp_flags')) # depends on [control=['try'], data=[]]
except RuntimeError:
# Handle any kind of error e.g. NULL ptrs by simply using the base
# class
return cls # depends on [control=['except'], data=[]]
#print 'tp_flags = 0x%08x' % tp_flags
#print 'tp_name = %r' % tp_name
name_map = {'bool': PyBoolObjectPtr, 'classobj': PyClassObjectPtr, 'instance': PyInstanceObjectPtr, 'NoneType': PyNoneStructPtr, 'frame': PyFrameObjectPtr, 'set': PySetObjectPtr, 'frozenset': PySetObjectPtr, 'builtin_function_or_method': PyCFunctionObjectPtr}
if tp_name in name_map:
return name_map[tp_name] # depends on [control=['if'], data=['tp_name', 'name_map']]
if tp_flags & Py_TPFLAGS_HEAPTYPE:
return HeapTypeObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_INT_SUBCLASS:
return PyIntObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_LONG_SUBCLASS:
return PyLongObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_LIST_SUBCLASS:
return PyListObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS:
return PyTupleObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_STRING_SUBCLASS:
return PyStringObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS:
return PyUnicodeObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_DICT_SUBCLASS:
return PyDictObjectPtr # depends on [control=['if'], data=[]]
if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS:
return PyBaseExceptionObjectPtr # depends on [control=['if'], data=[]]
#if tp_flags & Py_TPFLAGS_TYPE_SUBCLASS:
# return PyTypeObjectPtr
# Use the base class:
return cls |
def interpolate_sysenv(line, defaults={}):
'''
Format line system environment variables + defaults
'''
map = ChainMap(os.environ, defaults)
return line.format(**map) | def function[interpolate_sysenv, parameter[line, defaults]]:
constant[
Format line system environment variables + defaults
]
variable[map] assign[=] call[name[ChainMap], parameter[name[os].environ, name[defaults]]]
return[call[name[line].format, parameter[]]] | keyword[def] identifier[interpolate_sysenv] ( identifier[line] , identifier[defaults] ={}):
literal[string]
identifier[map] = identifier[ChainMap] ( identifier[os] . identifier[environ] , identifier[defaults] )
keyword[return] identifier[line] . identifier[format] (** identifier[map] ) | def interpolate_sysenv(line, defaults={}):
"""
Format line system environment variables + defaults
"""
map = ChainMap(os.environ, defaults)
return line.format(**map) |
def get_bytesize(self, key, default=None, minimum=None, maximum=None, default_unit=None, base=DEFAULT_BASE):
"""Size in bytes expressed by value configured under 'key'
Args:
key (str | unicode): Key to lookup
default (int | str | unicode | None): Default to use if key is not configured
minimum (int | str | unicode | None): If specified, result can't be below this minimum
maximum (int | str | unicode | None): If specified, result can't be above this maximum
default_unit (str | unicode | None): Default unit for unqualified values (see UNITS)
base (int): Base to use (usually 1024)
Returns:
(int): Size in bytes
"""
value = to_bytesize(self.get_str(key), default_unit, base)
if value is None:
return to_bytesize(default, default_unit, base)
return capped(value, to_bytesize(minimum, default_unit, base), to_bytesize(maximum, default_unit, base)) | def function[get_bytesize, parameter[self, key, default, minimum, maximum, default_unit, base]]:
constant[Size in bytes expressed by value configured under 'key'
Args:
key (str | unicode): Key to lookup
default (int | str | unicode | None): Default to use if key is not configured
minimum (int | str | unicode | None): If specified, result can't be below this minimum
maximum (int | str | unicode | None): If specified, result can't be above this maximum
default_unit (str | unicode | None): Default unit for unqualified values (see UNITS)
base (int): Base to use (usually 1024)
Returns:
(int): Size in bytes
]
variable[value] assign[=] call[name[to_bytesize], parameter[call[name[self].get_str, parameter[name[key]]], name[default_unit], name[base]]]
if compare[name[value] is constant[None]] begin[:]
return[call[name[to_bytesize], parameter[name[default], name[default_unit], name[base]]]]
return[call[name[capped], parameter[name[value], call[name[to_bytesize], parameter[name[minimum], name[default_unit], name[base]]], call[name[to_bytesize], parameter[name[maximum], name[default_unit], name[base]]]]]] | keyword[def] identifier[get_bytesize] ( identifier[self] , identifier[key] , identifier[default] = keyword[None] , identifier[minimum] = keyword[None] , identifier[maximum] = keyword[None] , identifier[default_unit] = keyword[None] , identifier[base] = identifier[DEFAULT_BASE] ):
literal[string]
identifier[value] = identifier[to_bytesize] ( identifier[self] . identifier[get_str] ( identifier[key] ), identifier[default_unit] , identifier[base] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[to_bytesize] ( identifier[default] , identifier[default_unit] , identifier[base] )
keyword[return] identifier[capped] ( identifier[value] , identifier[to_bytesize] ( identifier[minimum] , identifier[default_unit] , identifier[base] ), identifier[to_bytesize] ( identifier[maximum] , identifier[default_unit] , identifier[base] )) | def get_bytesize(self, key, default=None, minimum=None, maximum=None, default_unit=None, base=DEFAULT_BASE):
"""Size in bytes expressed by value configured under 'key'
Args:
key (str | unicode): Key to lookup
default (int | str | unicode | None): Default to use if key is not configured
minimum (int | str | unicode | None): If specified, result can't be below this minimum
maximum (int | str | unicode | None): If specified, result can't be above this maximum
default_unit (str | unicode | None): Default unit for unqualified values (see UNITS)
base (int): Base to use (usually 1024)
Returns:
(int): Size in bytes
"""
value = to_bytesize(self.get_str(key), default_unit, base)
if value is None:
return to_bytesize(default, default_unit, base) # depends on [control=['if'], data=[]]
return capped(value, to_bytesize(minimum, default_unit, base), to_bytesize(maximum, default_unit, base)) |
def _get_file_md5(filename):
"""Compute the md5 checksum of a file"""
md5_data = md5()
with open(filename, 'rb') as f:
for chunk in iter(lambda: f.read(128*md5_data.block_size), b''):
md5_data.update(chunk)
return md5_data.hexdigest() | def function[_get_file_md5, parameter[filename]]:
constant[Compute the md5 checksum of a file]
variable[md5_data] assign[=] call[name[md5], parameter[]]
with call[name[open], parameter[name[filename], constant[rb]]] begin[:]
for taget[name[chunk]] in starred[call[name[iter], parameter[<ast.Lambda object at 0x7da18fe93520>, constant[b'']]]] begin[:]
call[name[md5_data].update, parameter[name[chunk]]]
return[call[name[md5_data].hexdigest, parameter[]]] | keyword[def] identifier[_get_file_md5] ( identifier[filename] ):
literal[string]
identifier[md5_data] = identifier[md5] ()
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[chunk] keyword[in] identifier[iter] ( keyword[lambda] : identifier[f] . identifier[read] ( literal[int] * identifier[md5_data] . identifier[block_size] ), literal[string] ):
identifier[md5_data] . identifier[update] ( identifier[chunk] )
keyword[return] identifier[md5_data] . identifier[hexdigest] () | def _get_file_md5(filename):
"""Compute the md5 checksum of a file"""
md5_data = md5()
with open(filename, 'rb') as f:
for chunk in iter(lambda : f.read(128 * md5_data.block_size), b''):
md5_data.update(chunk) # depends on [control=['for'], data=['chunk']] # depends on [control=['with'], data=['f']]
return md5_data.hexdigest() |
def queue_exists(name, region, opts=None, user=None):
'''
Returns True or False on whether the queue exists in the region
name
Name of the SQS queue to search for
region
Name of the region to search for the queue in
opts : None
Any additional options to add to the command line
user : None
Run hg as a user other than what the minion runs as
CLI Example:
salt '*' aws_sqs.queue_exists <sqs queue> <region>
'''
output = list_queues(region, opts, user)
return name in _parse_queue_list(output) | def function[queue_exists, parameter[name, region, opts, user]]:
constant[
Returns True or False on whether the queue exists in the region
name
Name of the SQS queue to search for
region
Name of the region to search for the queue in
opts : None
Any additional options to add to the command line
user : None
Run hg as a user other than what the minion runs as
CLI Example:
salt '*' aws_sqs.queue_exists <sqs queue> <region>
]
variable[output] assign[=] call[name[list_queues], parameter[name[region], name[opts], name[user]]]
return[compare[name[name] in call[name[_parse_queue_list], parameter[name[output]]]]] | keyword[def] identifier[queue_exists] ( identifier[name] , identifier[region] , identifier[opts] = keyword[None] , identifier[user] = keyword[None] ):
literal[string]
identifier[output] = identifier[list_queues] ( identifier[region] , identifier[opts] , identifier[user] )
keyword[return] identifier[name] keyword[in] identifier[_parse_queue_list] ( identifier[output] ) | def queue_exists(name, region, opts=None, user=None):
"""
Returns True or False on whether the queue exists in the region
name
Name of the SQS queue to search for
region
Name of the region to search for the queue in
opts : None
Any additional options to add to the command line
user : None
Run hg as a user other than what the minion runs as
CLI Example:
salt '*' aws_sqs.queue_exists <sqs queue> <region>
"""
output = list_queues(region, opts, user)
return name in _parse_queue_list(output) |
def get_signature(func):
"""
Gathers information about the call signature of `func`.
"""
code = func.__code__
# Names of regular parameters
parameters = tuple(code.co_varnames[:code.co_argcount])
# Flags
has_varargs = bool(code.co_flags & inspect.CO_VARARGS)
has_varkw = bool(code.co_flags & inspect.CO_VARKEYWORDS)
has_kwonly = bool(code.co_kwonlyargcount)
# A mapping of parameter names to default values
default_values = func.__defaults__ or ()
defaults = dict(zip(parameters[-len(default_values):], default_values))
# Type annotations for all parameters
type_hints = typing.get_type_hints(func) if typing else func.__annotations__
types = tuple(normalize_type(type_hints.get(param, AnyType)) for param in parameters)
# Type annotations for required parameters
required = types[:-len(defaults)] if defaults else types
# Complexity
complexity = tuple(map(type_complexity, types)) if typing else None
return Signature(parameters, types, complexity, defaults, required,
has_varargs, has_varkw, has_kwonly) | def function[get_signature, parameter[func]]:
constant[
Gathers information about the call signature of `func`.
]
variable[code] assign[=] name[func].__code__
variable[parameters] assign[=] call[name[tuple], parameter[call[name[code].co_varnames][<ast.Slice object at 0x7da1b255d810>]]]
variable[has_varargs] assign[=] call[name[bool], parameter[binary_operation[name[code].co_flags <ast.BitAnd object at 0x7da2590d6b60> name[inspect].CO_VARARGS]]]
variable[has_varkw] assign[=] call[name[bool], parameter[binary_operation[name[code].co_flags <ast.BitAnd object at 0x7da2590d6b60> name[inspect].CO_VARKEYWORDS]]]
variable[has_kwonly] assign[=] call[name[bool], parameter[name[code].co_kwonlyargcount]]
variable[default_values] assign[=] <ast.BoolOp object at 0x7da1b2563790>
variable[defaults] assign[=] call[name[dict], parameter[call[name[zip], parameter[call[name[parameters]][<ast.Slice object at 0x7da1b25636a0>], name[default_values]]]]]
variable[type_hints] assign[=] <ast.IfExp object at 0x7da1b2563550>
variable[types] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b25631c0>]]
variable[required] assign[=] <ast.IfExp object at 0x7da1b2563010>
variable[complexity] assign[=] <ast.IfExp object at 0x7da1b2562a10>
return[call[name[Signature], parameter[name[parameters], name[types], name[complexity], name[defaults], name[required], name[has_varargs], name[has_varkw], name[has_kwonly]]]] | keyword[def] identifier[get_signature] ( identifier[func] ):
literal[string]
identifier[code] = identifier[func] . identifier[__code__]
identifier[parameters] = identifier[tuple] ( identifier[code] . identifier[co_varnames] [: identifier[code] . identifier[co_argcount] ])
identifier[has_varargs] = identifier[bool] ( identifier[code] . identifier[co_flags] & identifier[inspect] . identifier[CO_VARARGS] )
identifier[has_varkw] = identifier[bool] ( identifier[code] . identifier[co_flags] & identifier[inspect] . identifier[CO_VARKEYWORDS] )
identifier[has_kwonly] = identifier[bool] ( identifier[code] . identifier[co_kwonlyargcount] )
identifier[default_values] = identifier[func] . identifier[__defaults__] keyword[or] ()
identifier[defaults] = identifier[dict] ( identifier[zip] ( identifier[parameters] [- identifier[len] ( identifier[default_values] ):], identifier[default_values] ))
identifier[type_hints] = identifier[typing] . identifier[get_type_hints] ( identifier[func] ) keyword[if] identifier[typing] keyword[else] identifier[func] . identifier[__annotations__]
identifier[types] = identifier[tuple] ( identifier[normalize_type] ( identifier[type_hints] . identifier[get] ( identifier[param] , identifier[AnyType] )) keyword[for] identifier[param] keyword[in] identifier[parameters] )
identifier[required] = identifier[types] [:- identifier[len] ( identifier[defaults] )] keyword[if] identifier[defaults] keyword[else] identifier[types]
identifier[complexity] = identifier[tuple] ( identifier[map] ( identifier[type_complexity] , identifier[types] )) keyword[if] identifier[typing] keyword[else] keyword[None]
keyword[return] identifier[Signature] ( identifier[parameters] , identifier[types] , identifier[complexity] , identifier[defaults] , identifier[required] ,
identifier[has_varargs] , identifier[has_varkw] , identifier[has_kwonly] ) | def get_signature(func):
"""
Gathers information about the call signature of `func`.
"""
code = func.__code__
# Names of regular parameters
parameters = tuple(code.co_varnames[:code.co_argcount])
# Flags
has_varargs = bool(code.co_flags & inspect.CO_VARARGS)
has_varkw = bool(code.co_flags & inspect.CO_VARKEYWORDS)
has_kwonly = bool(code.co_kwonlyargcount)
# A mapping of parameter names to default values
default_values = func.__defaults__ or ()
defaults = dict(zip(parameters[-len(default_values):], default_values))
# Type annotations for all parameters
type_hints = typing.get_type_hints(func) if typing else func.__annotations__
types = tuple((normalize_type(type_hints.get(param, AnyType)) for param in parameters))
# Type annotations for required parameters
required = types[:-len(defaults)] if defaults else types
# Complexity
complexity = tuple(map(type_complexity, types)) if typing else None
return Signature(parameters, types, complexity, defaults, required, has_varargs, has_varkw, has_kwonly) |
def from_api_repr(cls, resource, client):
"""Factory: construct a job given its API representation
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: Job parsed from ``resource``.
"""
job_id, config = cls._get_resource_config(resource)
query = config["query"]["query"]
job = cls(job_id, query, client=client)
job._set_properties(resource)
return job | def function[from_api_repr, parameter[cls, resource, client]]:
constant[Factory: construct a job given its API representation
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: Job parsed from ``resource``.
]
<ast.Tuple object at 0x7da207f03c70> assign[=] call[name[cls]._get_resource_config, parameter[name[resource]]]
variable[query] assign[=] call[call[name[config]][constant[query]]][constant[query]]
variable[job] assign[=] call[name[cls], parameter[name[job_id], name[query]]]
call[name[job]._set_properties, parameter[name[resource]]]
return[name[job]] | keyword[def] identifier[from_api_repr] ( identifier[cls] , identifier[resource] , identifier[client] ):
literal[string]
identifier[job_id] , identifier[config] = identifier[cls] . identifier[_get_resource_config] ( identifier[resource] )
identifier[query] = identifier[config] [ literal[string] ][ literal[string] ]
identifier[job] = identifier[cls] ( identifier[job_id] , identifier[query] , identifier[client] = identifier[client] )
identifier[job] . identifier[_set_properties] ( identifier[resource] )
keyword[return] identifier[job] | def from_api_repr(cls, resource, client):
"""Factory: construct a job given its API representation
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: Job parsed from ``resource``.
"""
(job_id, config) = cls._get_resource_config(resource)
query = config['query']['query']
job = cls(job_id, query, client=client)
job._set_properties(resource)
return job |
def user_active_directory_deactivate(user, attributes, created, updated):
"""
Deactivate user accounts based on Active Directory's
userAccountControl flags. Requires 'userAccountControl'
to be included in LDAP_SYNC_USER_EXTRA_ATTRIBUTES.
"""
try:
user_account_control = int(attributes['userAccountControl'][0])
if user_account_control & 2:
user.is_active = False
except KeyError:
pass | def function[user_active_directory_deactivate, parameter[user, attributes, created, updated]]:
constant[
Deactivate user accounts based on Active Directory's
userAccountControl flags. Requires 'userAccountControl'
to be included in LDAP_SYNC_USER_EXTRA_ATTRIBUTES.
]
<ast.Try object at 0x7da204620ee0> | keyword[def] identifier[user_active_directory_deactivate] ( identifier[user] , identifier[attributes] , identifier[created] , identifier[updated] ):
literal[string]
keyword[try] :
identifier[user_account_control] = identifier[int] ( identifier[attributes] [ literal[string] ][ literal[int] ])
keyword[if] identifier[user_account_control] & literal[int] :
identifier[user] . identifier[is_active] = keyword[False]
keyword[except] identifier[KeyError] :
keyword[pass] | def user_active_directory_deactivate(user, attributes, created, updated):
"""
Deactivate user accounts based on Active Directory's
userAccountControl flags. Requires 'userAccountControl'
to be included in LDAP_SYNC_USER_EXTRA_ATTRIBUTES.
"""
try:
user_account_control = int(attributes['userAccountControl'][0])
if user_account_control & 2:
user.is_active = False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] |
def plot(self, freq=None, figsize=(15, 5), title=None,
logy=False, **kwargs):
"""
Helper function for plotting the series.
Args:
* freq (str): Data frequency used for display purposes.
Refer to pandas docs for valid freq strings.
* figsize ((x,y)): figure size
* title (str): Title if default not appropriate
* logy (bool): log-scale for y axis
* kwargs: passed to pandas' plot method
"""
if title is None:
title = self._get_default_plot_title(
freq, 'Equity Progression')
ser = self._get_series(freq).rebase()
return ser.plot(figsize=figsize, logy=logy,
title=title, **kwargs) | def function[plot, parameter[self, freq, figsize, title, logy]]:
constant[
Helper function for plotting the series.
Args:
* freq (str): Data frequency used for display purposes.
Refer to pandas docs for valid freq strings.
* figsize ((x,y)): figure size
* title (str): Title if default not appropriate
* logy (bool): log-scale for y axis
* kwargs: passed to pandas' plot method
]
if compare[name[title] is constant[None]] begin[:]
variable[title] assign[=] call[name[self]._get_default_plot_title, parameter[name[freq], constant[Equity Progression]]]
variable[ser] assign[=] call[call[name[self]._get_series, parameter[name[freq]]].rebase, parameter[]]
return[call[name[ser].plot, parameter[]]] | keyword[def] identifier[plot] ( identifier[self] , identifier[freq] = keyword[None] , identifier[figsize] =( literal[int] , literal[int] ), identifier[title] = keyword[None] ,
identifier[logy] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[title] keyword[is] keyword[None] :
identifier[title] = identifier[self] . identifier[_get_default_plot_title] (
identifier[freq] , literal[string] )
identifier[ser] = identifier[self] . identifier[_get_series] ( identifier[freq] ). identifier[rebase] ()
keyword[return] identifier[ser] . identifier[plot] ( identifier[figsize] = identifier[figsize] , identifier[logy] = identifier[logy] ,
identifier[title] = identifier[title] ,** identifier[kwargs] ) | def plot(self, freq=None, figsize=(15, 5), title=None, logy=False, **kwargs):
"""
Helper function for plotting the series.
Args:
* freq (str): Data frequency used for display purposes.
Refer to pandas docs for valid freq strings.
* figsize ((x,y)): figure size
* title (str): Title if default not appropriate
* logy (bool): log-scale for y axis
* kwargs: passed to pandas' plot method
"""
if title is None:
title = self._get_default_plot_title(freq, 'Equity Progression') # depends on [control=['if'], data=['title']]
ser = self._get_series(freq).rebase()
return ser.plot(figsize=figsize, logy=logy, title=title, **kwargs) |
def until(self, method, message=''):
"""Calls the method provided with the driver as an argument until the \
return value does not evaluate to ``False``.
:param method: callable(WebDriver)
:param message: optional message for :exc:`TimeoutException`
:returns: the result of the last call to `method`
:raises: :exc:`selenium.common.exceptions.TimeoutException` if timeout occurs
"""
screen = None
stacktrace = None
end_time = time.time() + self._timeout
while True:
try:
value = method(self._driver)
if value:
return value
except self._ignored_exceptions as exc:
screen = getattr(exc, 'screen', None)
stacktrace = getattr(exc, 'stacktrace', None)
time.sleep(self._poll)
if time.time() > end_time:
break
raise TimeoutException(message, screen, stacktrace) | def function[until, parameter[self, method, message]]:
constant[Calls the method provided with the driver as an argument until the return value does not evaluate to ``False``.
:param method: callable(WebDriver)
:param message: optional message for :exc:`TimeoutException`
:returns: the result of the last call to `method`
:raises: :exc:`selenium.common.exceptions.TimeoutException` if timeout occurs
]
variable[screen] assign[=] constant[None]
variable[stacktrace] assign[=] constant[None]
variable[end_time] assign[=] binary_operation[call[name[time].time, parameter[]] + name[self]._timeout]
while constant[True] begin[:]
<ast.Try object at 0x7da1b1eb4610>
call[name[time].sleep, parameter[name[self]._poll]]
if compare[call[name[time].time, parameter[]] greater[>] name[end_time]] begin[:]
break
<ast.Raise object at 0x7da20cabfbe0> | keyword[def] identifier[until] ( identifier[self] , identifier[method] , identifier[message] = literal[string] ):
literal[string]
identifier[screen] = keyword[None]
identifier[stacktrace] = keyword[None]
identifier[end_time] = identifier[time] . identifier[time] ()+ identifier[self] . identifier[_timeout]
keyword[while] keyword[True] :
keyword[try] :
identifier[value] = identifier[method] ( identifier[self] . identifier[_driver] )
keyword[if] identifier[value] :
keyword[return] identifier[value]
keyword[except] identifier[self] . identifier[_ignored_exceptions] keyword[as] identifier[exc] :
identifier[screen] = identifier[getattr] ( identifier[exc] , literal[string] , keyword[None] )
identifier[stacktrace] = identifier[getattr] ( identifier[exc] , literal[string] , keyword[None] )
identifier[time] . identifier[sleep] ( identifier[self] . identifier[_poll] )
keyword[if] identifier[time] . identifier[time] ()> identifier[end_time] :
keyword[break]
keyword[raise] identifier[TimeoutException] ( identifier[message] , identifier[screen] , identifier[stacktrace] ) | def until(self, method, message=''):
"""Calls the method provided with the driver as an argument until the return value does not evaluate to ``False``.
:param method: callable(WebDriver)
:param message: optional message for :exc:`TimeoutException`
:returns: the result of the last call to `method`
:raises: :exc:`selenium.common.exceptions.TimeoutException` if timeout occurs
"""
screen = None
stacktrace = None
end_time = time.time() + self._timeout
while True:
try:
value = method(self._driver)
if value:
return value # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except self._ignored_exceptions as exc:
screen = getattr(exc, 'screen', None)
stacktrace = getattr(exc, 'stacktrace', None) # depends on [control=['except'], data=['exc']]
time.sleep(self._poll)
if time.time() > end_time:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
raise TimeoutException(message, screen, stacktrace) |
def write_downloadable(self, organism, export_type='FASTA',
seq_type='peptide', export_format='text',
export_gff3_fasta=False, sequences=[], region=None):
"""
Prepare a download for an organism
:type organism: str
:param organism: organism common name
:type sequences: str
:param sequences: Names of references sequences to add (default is all)
:type export_type: str
:param export_type: Export type. Choices: FASTA, GFF3, VCF
:type seq_type: str
:param seq_type: Export selection. Choices: peptide, cds, cdna, genomic
:type export_format: str
:param export_format: Export format, either gzip or text
:type export_gff3_fasta: bool
:param export_gff3_fasta: Export reference sequence when exporting GFF3 annotations.
:type region: str
:param region: Region to export in form sequence:min..max e.g., chr3:1001..1034
:rtype: dict
:return: a dictionary containing download information
"""
if export_format.lower() not in ('gzip', 'text'):
raise Exception("export_format must be one of file, text")
if export_type.lower() not in ('fasta', 'gff3', 'vcf'):
raise Exception("export_type must be one of FASTA, GFF3, VCF")
data = {
'type': export_type,
'seq_type': seq_type,
'format': export_format,
'sequences': sequences,
'organism': organism,
'output': 'file',
'exportAllSequences': True if not sequences else len(sequences) == 0,
'exportGff3Fasta': export_gff3_fasta,
}
if region:
data['region'] = region
return self.post('write', data) | def function[write_downloadable, parameter[self, organism, export_type, seq_type, export_format, export_gff3_fasta, sequences, region]]:
constant[
Prepare a download for an organism
:type organism: str
:param organism: organism common name
:type sequences: str
:param sequences: Names of references sequences to add (default is all)
:type export_type: str
:param export_type: Export type. Choices: FASTA, GFF3, VCF
:type seq_type: str
:param seq_type: Export selection. Choices: peptide, cds, cdna, genomic
:type export_format: str
:param export_format: Export format, either gzip or text
:type export_gff3_fasta: bool
:param export_gff3_fasta: Export reference sequence when exporting GFF3 annotations.
:type region: str
:param region: Region to export in form sequence:min..max e.g., chr3:1001..1034
:rtype: dict
:return: a dictionary containing download information
]
if compare[call[name[export_format].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b254d540>, <ast.Constant object at 0x7da1b254e230>]]] begin[:]
<ast.Raise object at 0x7da1b254ec50>
if compare[call[name[export_type].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b254d690>, <ast.Constant object at 0x7da1b254da50>, <ast.Constant object at 0x7da1b254c940>]]] begin[:]
<ast.Raise object at 0x7da1b254d4e0>
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b254c460>, <ast.Constant object at 0x7da1b254f730>, <ast.Constant object at 0x7da1b254d360>, <ast.Constant object at 0x7da1b254ded0>, <ast.Constant object at 0x7da1b254d600>, <ast.Constant object at 0x7da1b254e3b0>, <ast.Constant object at 0x7da1b254ca90>, <ast.Constant object at 0x7da1b254d3c0>], [<ast.Name object at 0x7da1b254cc40>, <ast.Name object at 0x7da1b254fa30>, <ast.Name object at 0x7da1b254e590>, <ast.Name object at 0x7da1b254e8c0>, <ast.Name object at 0x7da1b254cc70>, <ast.Constant object at 0x7da1b254eb90>, <ast.IfExp object at 0x7da1b254cd30>, <ast.Name object at 0x7da1b254f4f0>]]
if name[region] begin[:]
call[name[data]][constant[region]] assign[=] name[region]
return[call[name[self].post, parameter[constant[write], name[data]]]] | keyword[def] identifier[write_downloadable] ( identifier[self] , identifier[organism] , identifier[export_type] = literal[string] ,
identifier[seq_type] = literal[string] , identifier[export_format] = literal[string] ,
identifier[export_gff3_fasta] = keyword[False] , identifier[sequences] =[], identifier[region] = keyword[None] ):
literal[string]
keyword[if] identifier[export_format] . identifier[lower] () keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[export_type] . identifier[lower] () keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[data] ={
literal[string] : identifier[export_type] ,
literal[string] : identifier[seq_type] ,
literal[string] : identifier[export_format] ,
literal[string] : identifier[sequences] ,
literal[string] : identifier[organism] ,
literal[string] : literal[string] ,
literal[string] : keyword[True] keyword[if] keyword[not] identifier[sequences] keyword[else] identifier[len] ( identifier[sequences] )== literal[int] ,
literal[string] : identifier[export_gff3_fasta] ,
}
keyword[if] identifier[region] :
identifier[data] [ literal[string] ]= identifier[region]
keyword[return] identifier[self] . identifier[post] ( literal[string] , identifier[data] ) | def write_downloadable(self, organism, export_type='FASTA', seq_type='peptide', export_format='text', export_gff3_fasta=False, sequences=[], region=None):
"""
Prepare a download for an organism
:type organism: str
:param organism: organism common name
:type sequences: str
:param sequences: Names of references sequences to add (default is all)
:type export_type: str
:param export_type: Export type. Choices: FASTA, GFF3, VCF
:type seq_type: str
:param seq_type: Export selection. Choices: peptide, cds, cdna, genomic
:type export_format: str
:param export_format: Export format, either gzip or text
:type export_gff3_fasta: bool
:param export_gff3_fasta: Export reference sequence when exporting GFF3 annotations.
:type region: str
:param region: Region to export in form sequence:min..max e.g., chr3:1001..1034
:rtype: dict
:return: a dictionary containing download information
"""
if export_format.lower() not in ('gzip', 'text'):
raise Exception('export_format must be one of file, text') # depends on [control=['if'], data=[]]
if export_type.lower() not in ('fasta', 'gff3', 'vcf'):
raise Exception('export_type must be one of FASTA, GFF3, VCF') # depends on [control=['if'], data=[]]
data = {'type': export_type, 'seq_type': seq_type, 'format': export_format, 'sequences': sequences, 'organism': organism, 'output': 'file', 'exportAllSequences': True if not sequences else len(sequences) == 0, 'exportGff3Fasta': export_gff3_fasta}
if region:
data['region'] = region # depends on [control=['if'], data=[]]
return self.post('write', data) |
def deploy_raiden_contracts(
self,
max_num_of_token_networks: Optional[int],
) -> DeployedContracts:
""" Deploy all required raiden contracts and return a dict of contract_name:address
Args:
max_num_of_token_networks (Optional[int]): The max number of tokens that can be
registered to the TokenNetworkRegistry. If None, the argument is omitted from
the call to the constructor of TokenNetworkRegistry.
"""
deployed_contracts: DeployedContracts = {
'contracts_version': self.contract_version_string(),
'chain_id': int(self.web3.version.network),
'contracts': {},
}
self._deploy_and_remember(CONTRACT_ENDPOINT_REGISTRY, [], deployed_contracts)
secret_registry = self._deploy_and_remember(
contract_name=CONTRACT_SECRET_REGISTRY,
arguments=[],
deployed_contracts=deployed_contracts,
)
token_network_registry_args = [
secret_registry.address,
deployed_contracts['chain_id'],
DEPLOY_SETTLE_TIMEOUT_MIN,
DEPLOY_SETTLE_TIMEOUT_MAX,
]
if max_num_of_token_networks:
token_network_registry_args.append(max_num_of_token_networks)
self._deploy_and_remember(
contract_name=CONTRACT_TOKEN_NETWORK_REGISTRY,
arguments=token_network_registry_args,
deployed_contracts=deployed_contracts,
)
return deployed_contracts | def function[deploy_raiden_contracts, parameter[self, max_num_of_token_networks]]:
constant[ Deploy all required raiden contracts and return a dict of contract_name:address
Args:
max_num_of_token_networks (Optional[int]): The max number of tokens that can be
registered to the TokenNetworkRegistry. If None, the argument is omitted from
the call to the constructor of TokenNetworkRegistry.
]
<ast.AnnAssign object at 0x7da20e9b0820>
call[name[self]._deploy_and_remember, parameter[name[CONTRACT_ENDPOINT_REGISTRY], list[[]], name[deployed_contracts]]]
variable[secret_registry] assign[=] call[name[self]._deploy_and_remember, parameter[]]
variable[token_network_registry_args] assign[=] list[[<ast.Attribute object at 0x7da2054a40d0>, <ast.Subscript object at 0x7da2054a6920>, <ast.Name object at 0x7da2054a4f40>, <ast.Name object at 0x7da2054a6a70>]]
if name[max_num_of_token_networks] begin[:]
call[name[token_network_registry_args].append, parameter[name[max_num_of_token_networks]]]
call[name[self]._deploy_and_remember, parameter[]]
return[name[deployed_contracts]] | keyword[def] identifier[deploy_raiden_contracts] (
identifier[self] ,
identifier[max_num_of_token_networks] : identifier[Optional] [ identifier[int] ],
)-> identifier[DeployedContracts] :
literal[string]
identifier[deployed_contracts] : identifier[DeployedContracts] ={
literal[string] : identifier[self] . identifier[contract_version_string] (),
literal[string] : identifier[int] ( identifier[self] . identifier[web3] . identifier[version] . identifier[network] ),
literal[string] :{},
}
identifier[self] . identifier[_deploy_and_remember] ( identifier[CONTRACT_ENDPOINT_REGISTRY] ,[], identifier[deployed_contracts] )
identifier[secret_registry] = identifier[self] . identifier[_deploy_and_remember] (
identifier[contract_name] = identifier[CONTRACT_SECRET_REGISTRY] ,
identifier[arguments] =[],
identifier[deployed_contracts] = identifier[deployed_contracts] ,
)
identifier[token_network_registry_args] =[
identifier[secret_registry] . identifier[address] ,
identifier[deployed_contracts] [ literal[string] ],
identifier[DEPLOY_SETTLE_TIMEOUT_MIN] ,
identifier[DEPLOY_SETTLE_TIMEOUT_MAX] ,
]
keyword[if] identifier[max_num_of_token_networks] :
identifier[token_network_registry_args] . identifier[append] ( identifier[max_num_of_token_networks] )
identifier[self] . identifier[_deploy_and_remember] (
identifier[contract_name] = identifier[CONTRACT_TOKEN_NETWORK_REGISTRY] ,
identifier[arguments] = identifier[token_network_registry_args] ,
identifier[deployed_contracts] = identifier[deployed_contracts] ,
)
keyword[return] identifier[deployed_contracts] | def deploy_raiden_contracts(self, max_num_of_token_networks: Optional[int]) -> DeployedContracts:
""" Deploy all required raiden contracts and return a dict of contract_name:address
Args:
max_num_of_token_networks (Optional[int]): The max number of tokens that can be
registered to the TokenNetworkRegistry. If None, the argument is omitted from
the call to the constructor of TokenNetworkRegistry.
"""
deployed_contracts: DeployedContracts = {'contracts_version': self.contract_version_string(), 'chain_id': int(self.web3.version.network), 'contracts': {}}
self._deploy_and_remember(CONTRACT_ENDPOINT_REGISTRY, [], deployed_contracts)
secret_registry = self._deploy_and_remember(contract_name=CONTRACT_SECRET_REGISTRY, arguments=[], deployed_contracts=deployed_contracts)
token_network_registry_args = [secret_registry.address, deployed_contracts['chain_id'], DEPLOY_SETTLE_TIMEOUT_MIN, DEPLOY_SETTLE_TIMEOUT_MAX]
if max_num_of_token_networks:
token_network_registry_args.append(max_num_of_token_networks) # depends on [control=['if'], data=[]]
self._deploy_and_remember(contract_name=CONTRACT_TOKEN_NETWORK_REGISTRY, arguments=token_network_registry_args, deployed_contracts=deployed_contracts)
return deployed_contracts |
def sys_dup2(self, fd, newfd):
"""
Duplicates an open fd to newfd. If newfd is open, it is first closed
:rtype: int
:param fd: the open file descriptor to duplicate.
:param newfd: the file descriptor to alias the file described by fd.
:return: newfd.
"""
try:
file = self._get_fd(fd)
except FdError as e:
logger.info("DUP2: Passed fd is not open. Returning EBADF")
return -e.err
soft_max, hard_max = self._rlimits[self.RLIMIT_NOFILE]
if newfd >= soft_max:
logger.info("DUP2: newfd is above max descriptor table size")
return -errno.EBADF
if self._is_fd_open(newfd):
self._close(newfd)
if newfd >= len(self.files):
self.files.extend([None] * (newfd + 1 - len(self.files)))
self.files[newfd] = self.files[fd]
logger.debug('sys_dup2(%d,%d) -> %d', fd, newfd, newfd)
return newfd | def function[sys_dup2, parameter[self, fd, newfd]]:
constant[
Duplicates an open fd to newfd. If newfd is open, it is first closed
:rtype: int
:param fd: the open file descriptor to duplicate.
:param newfd: the file descriptor to alias the file described by fd.
:return: newfd.
]
<ast.Try object at 0x7da1b000e7d0>
<ast.Tuple object at 0x7da1b000d9c0> assign[=] call[name[self]._rlimits][name[self].RLIMIT_NOFILE]
if compare[name[newfd] greater_or_equal[>=] name[soft_max]] begin[:]
call[name[logger].info, parameter[constant[DUP2: newfd is above max descriptor table size]]]
return[<ast.UnaryOp object at 0x7da1b000f700>]
if call[name[self]._is_fd_open, parameter[name[newfd]]] begin[:]
call[name[self]._close, parameter[name[newfd]]]
if compare[name[newfd] greater_or_equal[>=] call[name[len], parameter[name[self].files]]] begin[:]
call[name[self].files.extend, parameter[binary_operation[list[[<ast.Constant object at 0x7da1b000cca0>]] * binary_operation[binary_operation[name[newfd] + constant[1]] - call[name[len], parameter[name[self].files]]]]]]
call[name[self].files][name[newfd]] assign[=] call[name[self].files][name[fd]]
call[name[logger].debug, parameter[constant[sys_dup2(%d,%d) -> %d], name[fd], name[newfd], name[newfd]]]
return[name[newfd]] | keyword[def] identifier[sys_dup2] ( identifier[self] , identifier[fd] , identifier[newfd] ):
literal[string]
keyword[try] :
identifier[file] = identifier[self] . identifier[_get_fd] ( identifier[fd] )
keyword[except] identifier[FdError] keyword[as] identifier[e] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] - identifier[e] . identifier[err]
identifier[soft_max] , identifier[hard_max] = identifier[self] . identifier[_rlimits] [ identifier[self] . identifier[RLIMIT_NOFILE] ]
keyword[if] identifier[newfd] >= identifier[soft_max] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] - identifier[errno] . identifier[EBADF]
keyword[if] identifier[self] . identifier[_is_fd_open] ( identifier[newfd] ):
identifier[self] . identifier[_close] ( identifier[newfd] )
keyword[if] identifier[newfd] >= identifier[len] ( identifier[self] . identifier[files] ):
identifier[self] . identifier[files] . identifier[extend] ([ keyword[None] ]*( identifier[newfd] + literal[int] - identifier[len] ( identifier[self] . identifier[files] )))
identifier[self] . identifier[files] [ identifier[newfd] ]= identifier[self] . identifier[files] [ identifier[fd] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[fd] , identifier[newfd] , identifier[newfd] )
keyword[return] identifier[newfd] | def sys_dup2(self, fd, newfd):
"""
Duplicates an open fd to newfd. If newfd is open, it is first closed
:rtype: int
:param fd: the open file descriptor to duplicate.
:param newfd: the file descriptor to alias the file described by fd.
:return: newfd.
"""
try:
file = self._get_fd(fd) # depends on [control=['try'], data=[]]
except FdError as e:
logger.info('DUP2: Passed fd is not open. Returning EBADF')
return -e.err # depends on [control=['except'], data=['e']]
(soft_max, hard_max) = self._rlimits[self.RLIMIT_NOFILE]
if newfd >= soft_max:
logger.info('DUP2: newfd is above max descriptor table size')
return -errno.EBADF # depends on [control=['if'], data=[]]
if self._is_fd_open(newfd):
self._close(newfd) # depends on [control=['if'], data=[]]
if newfd >= len(self.files):
self.files.extend([None] * (newfd + 1 - len(self.files))) # depends on [control=['if'], data=['newfd']]
self.files[newfd] = self.files[fd]
logger.debug('sys_dup2(%d,%d) -> %d', fd, newfd, newfd)
return newfd |
def put_shebang(f, version):
"""
Writes a shebang to the first line of the file according to the specified
version. (2 | 3 | default)
"""
if not contains_shebang(f):
f.seek(0)
original_text = f.read()
f.seek(0)
f.write(shebangs[version] + original_text) | def function[put_shebang, parameter[f, version]]:
constant[
Writes a shebang to the first line of the file according to the specified
version. (2 | 3 | default)
]
if <ast.UnaryOp object at 0x7da20c76c940> begin[:]
call[name[f].seek, parameter[constant[0]]]
variable[original_text] assign[=] call[name[f].read, parameter[]]
call[name[f].seek, parameter[constant[0]]]
call[name[f].write, parameter[binary_operation[call[name[shebangs]][name[version]] + name[original_text]]]] | keyword[def] identifier[put_shebang] ( identifier[f] , identifier[version] ):
literal[string]
keyword[if] keyword[not] identifier[contains_shebang] ( identifier[f] ):
identifier[f] . identifier[seek] ( literal[int] )
identifier[original_text] = identifier[f] . identifier[read] ()
identifier[f] . identifier[seek] ( literal[int] )
identifier[f] . identifier[write] ( identifier[shebangs] [ identifier[version] ]+ identifier[original_text] ) | def put_shebang(f, version):
"""
Writes a shebang to the first line of the file according to the specified
version. (2 | 3 | default)
"""
if not contains_shebang(f):
f.seek(0)
original_text = f.read()
f.seek(0)
f.write(shebangs[version] + original_text) # depends on [control=['if'], data=[]] |
def minimize(value_and_gradients_function,
initial_position,
tolerance=1e-8,
x_tolerance=0,
f_relative_tolerance=0,
initial_inverse_hessian_estimate=None,
max_iterations=50,
parallel_iterations=1,
stopping_condition=None,
name=None):
"""Applies the BFGS algorithm to minimize a differentiable function.
Performs unconstrained minimization of a differentiable function using the
BFGS scheme. For details of the algorithm, see [Nocedal and Wright(2006)][1].
### Usage:
The following example demonstrates the BFGS optimizer attempting to find the
minimum for a simple two dimensional quadratic objective function.
```python
minimum = np.array([1.0, 1.0]) # The center of the quadratic bowl.
scales = np.array([2.0, 3.0]) # The scales along the two axes.
# The objective function and the gradient.
def quadratic(x):
value = tf.reduce_sum(scales * (x - minimum) ** 2)
return value, tf.gradients(value, x)[0]
start = tf.constant([0.6, 0.8]) # Starting point for the search.
optim_results = tfp.optimizer.bfgs_minimize(
quadratic, initial_position=start, tolerance=1e-8)
with tf.Session() as session:
results = session.run(optim_results)
# Check that the search converged
assert(results.converged)
# Check that the argmin is close to the actual value.
np.testing.assert_allclose(results.position, minimum)
# Print out the total number of function evaluations it took. Should be 6.
print ("Function evaluations: %d" % results.num_objective_evaluations)
```
### References:
[1]: Jorge Nocedal, Stephen Wright. Numerical Optimization. Springer Series in
Operations Research. pp 136-140. 2006
http://pages.mtu.edu/~struther/Courses/OLD/Sp2013/5630/Jorge_Nocedal_Numerical_optimization_267490.pdf
Args:
value_and_gradients_function: A Python callable that accepts a point as a
real `Tensor` and returns a tuple of `Tensor`s of real dtype containing
the value of the function and its gradient at that point. The function
to be minimized. The input should be of shape `[..., n]`, where `n` is
the size of the domain of input points, and all others are batching
dimensions. The first component of the return value should be a real
`Tensor` of matching shape `[...]`. The second component (the gradient)
should also be of shape `[..., n]` like the input value to the function.
initial_position: real `Tensor` of shape `[..., n]`. The starting point, or
points when using batching dimensions, of the search procedure. At these
points the function value and the gradient norm should be finite.
tolerance: Scalar `Tensor` of real dtype. Specifies the gradient tolerance
for the procedure. If the supremum norm of the gradient vector is below
this number, the algorithm is stopped.
x_tolerance: Scalar `Tensor` of real dtype. If the absolute change in the
position between one iteration and the next is smaller than this number,
the algorithm is stopped.
f_relative_tolerance: Scalar `Tensor` of real dtype. If the relative change
in the objective value between one iteration and the next is smaller
than this value, the algorithm is stopped.
initial_inverse_hessian_estimate: Optional `Tensor` of the same dtype
as the components of the output of the `value_and_gradients_function`.
If specified, the shape should broadcastable to shape `[..., n, n]`; e.g.
if a single `[n, n]` matrix is provided, it will be automatically
broadcasted to all batches. Alternatively, one can also specify a
different hessian estimate for each batch member.
For the correctness of the algorithm, it is required that this parameter
be symmetric and positive definite. Specifies the starting estimate for
the inverse of the Hessian at the initial point. If not specified,
the identity matrix is used as the starting estimate for the
inverse Hessian.
max_iterations: Scalar positive int32 `Tensor`. The maximum number of
iterations for BFGS updates.
parallel_iterations: Positive integer. The number of iterations allowed to
run in parallel.
stopping_condition: (Optional) A Python function that takes as input two
Boolean tensors of shape `[...]`, and returns a Boolean scalar tensor.
The input tensors are `converged` and `failed`, indicating the current
status of each respective batch member; the return value states whether
the algorithm should stop. The default is tfp.optimizer.converged_all
which only stops when all batch members have either converged or failed.
An alternative is tfp.optimizer.converged_any which stops as soon as one
batch member has converged, or when all have failed.
name: (Optional) Python str. The name prefixed to the ops created by this
function. If not supplied, the default name 'minimize' is used.
Returns:
optimizer_results: A namedtuple containing the following items:
converged: boolean tensor of shape `[...]` indicating for each batch
member whether the minimum was found within tolerance.
failed: boolean tensor of shape `[...]` indicating for each batch
member whether a line search step failed to find a suitable step size
satisfying Wolfe conditions. In the absence of any constraints on the
number of objective evaluations permitted, this value will
be the complement of `converged`. However, if there is
a constraint and the search stopped due to available
evaluations being exhausted, both `failed` and `converged`
will be simultaneously False.
num_objective_evaluations: The total number of objective
evaluations performed.
position: A tensor of shape `[..., n]` containing the last argument value
found during the search from each starting point. If the search
converged, then this value is the argmin of the objective function.
objective_value: A tensor of shape `[...]` with the value of the
objective function at the `position`. If the search converged, then
this is the (local) minimum of the objective function.
objective_gradient: A tensor of shape `[..., n]` containing the gradient
of the objective function at the `position`. If the search converged
the max-norm of this tensor should be below the tolerance.
inverse_hessian_estimate: A tensor of shape `[..., n, n]` containing the
inverse of the estimated Hessian.
"""
with tf.compat.v1.name_scope(
name, 'minimize',
[initial_position, tolerance, initial_inverse_hessian_estimate]):
initial_position = tf.convert_to_tensor(
value=initial_position, name='initial_position')
dtype = initial_position.dtype.base_dtype
tolerance = tf.convert_to_tensor(
value=tolerance, dtype=dtype, name='grad_tolerance')
f_relative_tolerance = tf.convert_to_tensor(
value=f_relative_tolerance, dtype=dtype, name='f_relative_tolerance')
x_tolerance = tf.convert_to_tensor(
value=x_tolerance, dtype=dtype, name='x_tolerance')
max_iterations = tf.convert_to_tensor(
value=max_iterations, name='max_iterations')
input_shape = distribution_util.prefer_static_shape(initial_position)
batch_shape, domain_size = input_shape[:-1], input_shape[-1]
if stopping_condition is None:
stopping_condition = bfgs_utils.converged_all
# Control inputs are an optional list of tensors to evaluate before
# the start of the search procedure. These can be used to assert the
# validity of inputs to the search procedure.
control_inputs = None
if initial_inverse_hessian_estimate is None:
# Create a default initial inverse Hessian.
initial_inv_hessian = tf.eye(domain_size,
batch_shape=batch_shape,
dtype=dtype,
name='initial_inv_hessian')
else:
# If an initial inverse Hessian is supplied, compute some control inputs
# to ensure that it is positive definite and symmetric.
initial_inv_hessian = tf.convert_to_tensor(
value=initial_inverse_hessian_estimate,
dtype=dtype,
name='initial_inv_hessian')
control_inputs = _inv_hessian_control_inputs(initial_inv_hessian)
hessian_shape = tf.concat([batch_shape, [domain_size, domain_size]], 0)
initial_inv_hessian = tf.broadcast_to(initial_inv_hessian, hessian_shape)
# The `state` here is a `BfgsOptimizerResults` tuple with values for the
# current state of the algorithm computation.
def _cond(state):
"""Continue if iterations remain and stopping condition is not met."""
return ((state.num_iterations < max_iterations) &
tf.logical_not(stopping_condition(state.converged, state.failed)))
def _body(state):
"""Main optimization loop."""
search_direction = _get_search_direction(state.inverse_hessian_estimate,
state.objective_gradient)
derivative_at_start_pt = tf.reduce_sum(
input_tensor=state.objective_gradient * search_direction, axis=-1)
# If the derivative at the start point is not negative, recompute the
# search direction with the initial inverse Hessian.
needs_reset = (~state.failed & ~state.converged &
(derivative_at_start_pt >= 0))
search_direction_reset = _get_search_direction(
initial_inv_hessian, state.objective_gradient)
actual_serch_direction = tf.where(
needs_reset, search_direction_reset, search_direction)
actual_inv_hessian = tf.where(
needs_reset, initial_inv_hessian, state.inverse_hessian_estimate)
# Replace the hessian estimate in the state, in case it had to be reset.
current_state = bfgs_utils.update_fields(
state, inverse_hessian_estimate=actual_inv_hessian)
next_state = bfgs_utils.line_search_step(
current_state,
value_and_gradients_function, actual_serch_direction,
tolerance, f_relative_tolerance, x_tolerance, stopping_condition)
# Update the inverse Hessian if needed and continue.
return [_update_inv_hessian(current_state, next_state)]
kwargs = bfgs_utils.get_initial_state_args(
value_and_gradients_function,
initial_position,
tolerance,
control_inputs)
kwargs['inverse_hessian_estimate'] = initial_inv_hessian
initial_state = BfgsOptimizerResults(**kwargs)
return tf.while_loop(
cond=_cond,
body=_body,
loop_vars=[initial_state],
parallel_iterations=parallel_iterations)[0] | def function[minimize, parameter[value_and_gradients_function, initial_position, tolerance, x_tolerance, f_relative_tolerance, initial_inverse_hessian_estimate, max_iterations, parallel_iterations, stopping_condition, name]]:
constant[Applies the BFGS algorithm to minimize a differentiable function.
Performs unconstrained minimization of a differentiable function using the
BFGS scheme. For details of the algorithm, see [Nocedal and Wright(2006)][1].
### Usage:
The following example demonstrates the BFGS optimizer attempting to find the
minimum for a simple two dimensional quadratic objective function.
```python
minimum = np.array([1.0, 1.0]) # The center of the quadratic bowl.
scales = np.array([2.0, 3.0]) # The scales along the two axes.
# The objective function and the gradient.
def quadratic(x):
value = tf.reduce_sum(scales * (x - minimum) ** 2)
return value, tf.gradients(value, x)[0]
start = tf.constant([0.6, 0.8]) # Starting point for the search.
optim_results = tfp.optimizer.bfgs_minimize(
quadratic, initial_position=start, tolerance=1e-8)
with tf.Session() as session:
results = session.run(optim_results)
# Check that the search converged
assert(results.converged)
# Check that the argmin is close to the actual value.
np.testing.assert_allclose(results.position, minimum)
# Print out the total number of function evaluations it took. Should be 6.
print ("Function evaluations: %d" % results.num_objective_evaluations)
```
### References:
[1]: Jorge Nocedal, Stephen Wright. Numerical Optimization. Springer Series in
Operations Research. pp 136-140. 2006
http://pages.mtu.edu/~struther/Courses/OLD/Sp2013/5630/Jorge_Nocedal_Numerical_optimization_267490.pdf
Args:
value_and_gradients_function: A Python callable that accepts a point as a
real `Tensor` and returns a tuple of `Tensor`s of real dtype containing
the value of the function and its gradient at that point. The function
to be minimized. The input should be of shape `[..., n]`, where `n` is
the size of the domain of input points, and all others are batching
dimensions. The first component of the return value should be a real
`Tensor` of matching shape `[...]`. The second component (the gradient)
should also be of shape `[..., n]` like the input value to the function.
initial_position: real `Tensor` of shape `[..., n]`. The starting point, or
points when using batching dimensions, of the search procedure. At these
points the function value and the gradient norm should be finite.
tolerance: Scalar `Tensor` of real dtype. Specifies the gradient tolerance
for the procedure. If the supremum norm of the gradient vector is below
this number, the algorithm is stopped.
x_tolerance: Scalar `Tensor` of real dtype. If the absolute change in the
position between one iteration and the next is smaller than this number,
the algorithm is stopped.
f_relative_tolerance: Scalar `Tensor` of real dtype. If the relative change
in the objective value between one iteration and the next is smaller
than this value, the algorithm is stopped.
initial_inverse_hessian_estimate: Optional `Tensor` of the same dtype
as the components of the output of the `value_and_gradients_function`.
If specified, the shape should broadcastable to shape `[..., n, n]`; e.g.
if a single `[n, n]` matrix is provided, it will be automatically
broadcasted to all batches. Alternatively, one can also specify a
different hessian estimate for each batch member.
For the correctness of the algorithm, it is required that this parameter
be symmetric and positive definite. Specifies the starting estimate for
the inverse of the Hessian at the initial point. If not specified,
the identity matrix is used as the starting estimate for the
inverse Hessian.
max_iterations: Scalar positive int32 `Tensor`. The maximum number of
iterations for BFGS updates.
parallel_iterations: Positive integer. The number of iterations allowed to
run in parallel.
stopping_condition: (Optional) A Python function that takes as input two
Boolean tensors of shape `[...]`, and returns a Boolean scalar tensor.
The input tensors are `converged` and `failed`, indicating the current
status of each respective batch member; the return value states whether
the algorithm should stop. The default is tfp.optimizer.converged_all
which only stops when all batch members have either converged or failed.
An alternative is tfp.optimizer.converged_any which stops as soon as one
batch member has converged, or when all have failed.
name: (Optional) Python str. The name prefixed to the ops created by this
function. If not supplied, the default name 'minimize' is used.
Returns:
optimizer_results: A namedtuple containing the following items:
converged: boolean tensor of shape `[...]` indicating for each batch
member whether the minimum was found within tolerance.
failed: boolean tensor of shape `[...]` indicating for each batch
member whether a line search step failed to find a suitable step size
satisfying Wolfe conditions. In the absence of any constraints on the
number of objective evaluations permitted, this value will
be the complement of `converged`. However, if there is
a constraint and the search stopped due to available
evaluations being exhausted, both `failed` and `converged`
will be simultaneously False.
num_objective_evaluations: The total number of objective
evaluations performed.
position: A tensor of shape `[..., n]` containing the last argument value
found during the search from each starting point. If the search
converged, then this value is the argmin of the objective function.
objective_value: A tensor of shape `[...]` with the value of the
objective function at the `position`. If the search converged, then
this is the (local) minimum of the objective function.
objective_gradient: A tensor of shape `[..., n]` containing the gradient
of the objective function at the `position`. If the search converged
the max-norm of this tensor should be below the tolerance.
inverse_hessian_estimate: A tensor of shape `[..., n, n]` containing the
inverse of the estimated Hessian.
]
with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[minimize], list[[<ast.Name object at 0x7da1b023fb80>, <ast.Name object at 0x7da1b023fb50>, <ast.Name object at 0x7da1b023fb20>]]]] begin[:]
variable[initial_position] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[dtype] assign[=] name[initial_position].dtype.base_dtype
variable[tolerance] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[f_relative_tolerance] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[x_tolerance] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[max_iterations] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[input_shape] assign[=] call[name[distribution_util].prefer_static_shape, parameter[name[initial_position]]]
<ast.Tuple object at 0x7da1b023e260> assign[=] tuple[[<ast.Subscript object at 0x7da1b023e1a0>, <ast.Subscript object at 0x7da1b023e0b0>]]
if compare[name[stopping_condition] is constant[None]] begin[:]
variable[stopping_condition] assign[=] name[bfgs_utils].converged_all
variable[control_inputs] assign[=] constant[None]
if compare[name[initial_inverse_hessian_estimate] is constant[None]] begin[:]
variable[initial_inv_hessian] assign[=] call[name[tf].eye, parameter[name[domain_size]]]
def function[_cond, parameter[state]]:
constant[Continue if iterations remain and stopping condition is not met.]
return[binary_operation[compare[name[state].num_iterations less[<] name[max_iterations]] <ast.BitAnd object at 0x7da2590d6b60> call[name[tf].logical_not, parameter[call[name[stopping_condition], parameter[name[state].converged, name[state].failed]]]]]]
def function[_body, parameter[state]]:
constant[Main optimization loop.]
variable[search_direction] assign[=] call[name[_get_search_direction], parameter[name[state].inverse_hessian_estimate, name[state].objective_gradient]]
variable[derivative_at_start_pt] assign[=] call[name[tf].reduce_sum, parameter[]]
variable[needs_reset] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b023ca90> <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b023ca00>] <ast.BitAnd object at 0x7da2590d6b60> compare[name[derivative_at_start_pt] greater_or_equal[>=] constant[0]]]
variable[search_direction_reset] assign[=] call[name[_get_search_direction], parameter[name[initial_inv_hessian], name[state].objective_gradient]]
variable[actual_serch_direction] assign[=] call[name[tf].where, parameter[name[needs_reset], name[search_direction_reset], name[search_direction]]]
variable[actual_inv_hessian] assign[=] call[name[tf].where, parameter[name[needs_reset], name[initial_inv_hessian], name[state].inverse_hessian_estimate]]
variable[current_state] assign[=] call[name[bfgs_utils].update_fields, parameter[name[state]]]
variable[next_state] assign[=] call[name[bfgs_utils].line_search_step, parameter[name[current_state], name[value_and_gradients_function], name[actual_serch_direction], name[tolerance], name[f_relative_tolerance], name[x_tolerance], name[stopping_condition]]]
return[list[[<ast.Call object at 0x7da1b023c040>]]]
variable[kwargs] assign[=] call[name[bfgs_utils].get_initial_state_args, parameter[name[value_and_gradients_function], name[initial_position], name[tolerance], name[control_inputs]]]
call[name[kwargs]][constant[inverse_hessian_estimate]] assign[=] name[initial_inv_hessian]
variable[initial_state] assign[=] call[name[BfgsOptimizerResults], parameter[]]
return[call[call[name[tf].while_loop, parameter[]]][constant[0]]] | keyword[def] identifier[minimize] ( identifier[value_and_gradients_function] ,
identifier[initial_position] ,
identifier[tolerance] = literal[int] ,
identifier[x_tolerance] = literal[int] ,
identifier[f_relative_tolerance] = literal[int] ,
identifier[initial_inverse_hessian_estimate] = keyword[None] ,
identifier[max_iterations] = literal[int] ,
identifier[parallel_iterations] = literal[int] ,
identifier[stopping_condition] = keyword[None] ,
identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] (
identifier[name] , literal[string] ,
[ identifier[initial_position] , identifier[tolerance] , identifier[initial_inverse_hessian_estimate] ]):
identifier[initial_position] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[initial_position] , identifier[name] = literal[string] )
identifier[dtype] = identifier[initial_position] . identifier[dtype] . identifier[base_dtype]
identifier[tolerance] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[tolerance] , identifier[dtype] = identifier[dtype] , identifier[name] = literal[string] )
identifier[f_relative_tolerance] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[f_relative_tolerance] , identifier[dtype] = identifier[dtype] , identifier[name] = literal[string] )
identifier[x_tolerance] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[x_tolerance] , identifier[dtype] = identifier[dtype] , identifier[name] = literal[string] )
identifier[max_iterations] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[max_iterations] , identifier[name] = literal[string] )
identifier[input_shape] = identifier[distribution_util] . identifier[prefer_static_shape] ( identifier[initial_position] )
identifier[batch_shape] , identifier[domain_size] = identifier[input_shape] [:- literal[int] ], identifier[input_shape] [- literal[int] ]
keyword[if] identifier[stopping_condition] keyword[is] keyword[None] :
identifier[stopping_condition] = identifier[bfgs_utils] . identifier[converged_all]
identifier[control_inputs] = keyword[None]
keyword[if] identifier[initial_inverse_hessian_estimate] keyword[is] keyword[None] :
identifier[initial_inv_hessian] = identifier[tf] . identifier[eye] ( identifier[domain_size] ,
identifier[batch_shape] = identifier[batch_shape] ,
identifier[dtype] = identifier[dtype] ,
identifier[name] = literal[string] )
keyword[else] :
identifier[initial_inv_hessian] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[initial_inverse_hessian_estimate] ,
identifier[dtype] = identifier[dtype] ,
identifier[name] = literal[string] )
identifier[control_inputs] = identifier[_inv_hessian_control_inputs] ( identifier[initial_inv_hessian] )
identifier[hessian_shape] = identifier[tf] . identifier[concat] ([ identifier[batch_shape] ,[ identifier[domain_size] , identifier[domain_size] ]], literal[int] )
identifier[initial_inv_hessian] = identifier[tf] . identifier[broadcast_to] ( identifier[initial_inv_hessian] , identifier[hessian_shape] )
keyword[def] identifier[_cond] ( identifier[state] ):
literal[string]
keyword[return] (( identifier[state] . identifier[num_iterations] < identifier[max_iterations] )&
identifier[tf] . identifier[logical_not] ( identifier[stopping_condition] ( identifier[state] . identifier[converged] , identifier[state] . identifier[failed] )))
keyword[def] identifier[_body] ( identifier[state] ):
literal[string]
identifier[search_direction] = identifier[_get_search_direction] ( identifier[state] . identifier[inverse_hessian_estimate] ,
identifier[state] . identifier[objective_gradient] )
identifier[derivative_at_start_pt] = identifier[tf] . identifier[reduce_sum] (
identifier[input_tensor] = identifier[state] . identifier[objective_gradient] * identifier[search_direction] , identifier[axis] =- literal[int] )
identifier[needs_reset] =(~ identifier[state] . identifier[failed] &~ identifier[state] . identifier[converged] &
( identifier[derivative_at_start_pt] >= literal[int] ))
identifier[search_direction_reset] = identifier[_get_search_direction] (
identifier[initial_inv_hessian] , identifier[state] . identifier[objective_gradient] )
identifier[actual_serch_direction] = identifier[tf] . identifier[where] (
identifier[needs_reset] , identifier[search_direction_reset] , identifier[search_direction] )
identifier[actual_inv_hessian] = identifier[tf] . identifier[where] (
identifier[needs_reset] , identifier[initial_inv_hessian] , identifier[state] . identifier[inverse_hessian_estimate] )
identifier[current_state] = identifier[bfgs_utils] . identifier[update_fields] (
identifier[state] , identifier[inverse_hessian_estimate] = identifier[actual_inv_hessian] )
identifier[next_state] = identifier[bfgs_utils] . identifier[line_search_step] (
identifier[current_state] ,
identifier[value_and_gradients_function] , identifier[actual_serch_direction] ,
identifier[tolerance] , identifier[f_relative_tolerance] , identifier[x_tolerance] , identifier[stopping_condition] )
keyword[return] [ identifier[_update_inv_hessian] ( identifier[current_state] , identifier[next_state] )]
identifier[kwargs] = identifier[bfgs_utils] . identifier[get_initial_state_args] (
identifier[value_and_gradients_function] ,
identifier[initial_position] ,
identifier[tolerance] ,
identifier[control_inputs] )
identifier[kwargs] [ literal[string] ]= identifier[initial_inv_hessian]
identifier[initial_state] = identifier[BfgsOptimizerResults] (** identifier[kwargs] )
keyword[return] identifier[tf] . identifier[while_loop] (
identifier[cond] = identifier[_cond] ,
identifier[body] = identifier[_body] ,
identifier[loop_vars] =[ identifier[initial_state] ],
identifier[parallel_iterations] = identifier[parallel_iterations] )[ literal[int] ] | def minimize(value_and_gradients_function, initial_position, tolerance=1e-08, x_tolerance=0, f_relative_tolerance=0, initial_inverse_hessian_estimate=None, max_iterations=50, parallel_iterations=1, stopping_condition=None, name=None):
"""Applies the BFGS algorithm to minimize a differentiable function.
Performs unconstrained minimization of a differentiable function using the
BFGS scheme. For details of the algorithm, see [Nocedal and Wright(2006)][1].
### Usage:
The following example demonstrates the BFGS optimizer attempting to find the
minimum for a simple two dimensional quadratic objective function.
```python
minimum = np.array([1.0, 1.0]) # The center of the quadratic bowl.
scales = np.array([2.0, 3.0]) # The scales along the two axes.
# The objective function and the gradient.
def quadratic(x):
value = tf.reduce_sum(scales * (x - minimum) ** 2)
return value, tf.gradients(value, x)[0]
start = tf.constant([0.6, 0.8]) # Starting point for the search.
optim_results = tfp.optimizer.bfgs_minimize(
quadratic, initial_position=start, tolerance=1e-8)
with tf.Session() as session:
results = session.run(optim_results)
# Check that the search converged
assert(results.converged)
# Check that the argmin is close to the actual value.
np.testing.assert_allclose(results.position, minimum)
# Print out the total number of function evaluations it took. Should be 6.
print ("Function evaluations: %d" % results.num_objective_evaluations)
```
### References:
[1]: Jorge Nocedal, Stephen Wright. Numerical Optimization. Springer Series in
Operations Research. pp 136-140. 2006
http://pages.mtu.edu/~struther/Courses/OLD/Sp2013/5630/Jorge_Nocedal_Numerical_optimization_267490.pdf
Args:
value_and_gradients_function: A Python callable that accepts a point as a
real `Tensor` and returns a tuple of `Tensor`s of real dtype containing
the value of the function and its gradient at that point. The function
to be minimized. The input should be of shape `[..., n]`, where `n` is
the size of the domain of input points, and all others are batching
dimensions. The first component of the return value should be a real
`Tensor` of matching shape `[...]`. The second component (the gradient)
should also be of shape `[..., n]` like the input value to the function.
initial_position: real `Tensor` of shape `[..., n]`. The starting point, or
points when using batching dimensions, of the search procedure. At these
points the function value and the gradient norm should be finite.
tolerance: Scalar `Tensor` of real dtype. Specifies the gradient tolerance
for the procedure. If the supremum norm of the gradient vector is below
this number, the algorithm is stopped.
x_tolerance: Scalar `Tensor` of real dtype. If the absolute change in the
position between one iteration and the next is smaller than this number,
the algorithm is stopped.
f_relative_tolerance: Scalar `Tensor` of real dtype. If the relative change
in the objective value between one iteration and the next is smaller
than this value, the algorithm is stopped.
initial_inverse_hessian_estimate: Optional `Tensor` of the same dtype
as the components of the output of the `value_and_gradients_function`.
If specified, the shape should broadcastable to shape `[..., n, n]`; e.g.
if a single `[n, n]` matrix is provided, it will be automatically
broadcasted to all batches. Alternatively, one can also specify a
different hessian estimate for each batch member.
For the correctness of the algorithm, it is required that this parameter
be symmetric and positive definite. Specifies the starting estimate for
the inverse of the Hessian at the initial point. If not specified,
the identity matrix is used as the starting estimate for the
inverse Hessian.
max_iterations: Scalar positive int32 `Tensor`. The maximum number of
iterations for BFGS updates.
parallel_iterations: Positive integer. The number of iterations allowed to
run in parallel.
stopping_condition: (Optional) A Python function that takes as input two
Boolean tensors of shape `[...]`, and returns a Boolean scalar tensor.
The input tensors are `converged` and `failed`, indicating the current
status of each respective batch member; the return value states whether
the algorithm should stop. The default is tfp.optimizer.converged_all
which only stops when all batch members have either converged or failed.
An alternative is tfp.optimizer.converged_any which stops as soon as one
batch member has converged, or when all have failed.
name: (Optional) Python str. The name prefixed to the ops created by this
function. If not supplied, the default name 'minimize' is used.
Returns:
optimizer_results: A namedtuple containing the following items:
converged: boolean tensor of shape `[...]` indicating for each batch
member whether the minimum was found within tolerance.
failed: boolean tensor of shape `[...]` indicating for each batch
member whether a line search step failed to find a suitable step size
satisfying Wolfe conditions. In the absence of any constraints on the
number of objective evaluations permitted, this value will
be the complement of `converged`. However, if there is
a constraint and the search stopped due to available
evaluations being exhausted, both `failed` and `converged`
will be simultaneously False.
num_objective_evaluations: The total number of objective
evaluations performed.
position: A tensor of shape `[..., n]` containing the last argument value
found during the search from each starting point. If the search
converged, then this value is the argmin of the objective function.
objective_value: A tensor of shape `[...]` with the value of the
objective function at the `position`. If the search converged, then
this is the (local) minimum of the objective function.
objective_gradient: A tensor of shape `[..., n]` containing the gradient
of the objective function at the `position`. If the search converged
the max-norm of this tensor should be below the tolerance.
inverse_hessian_estimate: A tensor of shape `[..., n, n]` containing the
inverse of the estimated Hessian.
"""
with tf.compat.v1.name_scope(name, 'minimize', [initial_position, tolerance, initial_inverse_hessian_estimate]):
initial_position = tf.convert_to_tensor(value=initial_position, name='initial_position')
dtype = initial_position.dtype.base_dtype
tolerance = tf.convert_to_tensor(value=tolerance, dtype=dtype, name='grad_tolerance')
f_relative_tolerance = tf.convert_to_tensor(value=f_relative_tolerance, dtype=dtype, name='f_relative_tolerance')
x_tolerance = tf.convert_to_tensor(value=x_tolerance, dtype=dtype, name='x_tolerance')
max_iterations = tf.convert_to_tensor(value=max_iterations, name='max_iterations')
input_shape = distribution_util.prefer_static_shape(initial_position)
(batch_shape, domain_size) = (input_shape[:-1], input_shape[-1])
if stopping_condition is None:
stopping_condition = bfgs_utils.converged_all # depends on [control=['if'], data=['stopping_condition']]
# Control inputs are an optional list of tensors to evaluate before
# the start of the search procedure. These can be used to assert the
# validity of inputs to the search procedure.
control_inputs = None
if initial_inverse_hessian_estimate is None:
# Create a default initial inverse Hessian.
initial_inv_hessian = tf.eye(domain_size, batch_shape=batch_shape, dtype=dtype, name='initial_inv_hessian') # depends on [control=['if'], data=[]]
else:
# If an initial inverse Hessian is supplied, compute some control inputs
# to ensure that it is positive definite and symmetric.
initial_inv_hessian = tf.convert_to_tensor(value=initial_inverse_hessian_estimate, dtype=dtype, name='initial_inv_hessian')
control_inputs = _inv_hessian_control_inputs(initial_inv_hessian)
hessian_shape = tf.concat([batch_shape, [domain_size, domain_size]], 0)
initial_inv_hessian = tf.broadcast_to(initial_inv_hessian, hessian_shape)
# The `state` here is a `BfgsOptimizerResults` tuple with values for the
# current state of the algorithm computation.
def _cond(state):
"""Continue if iterations remain and stopping condition is not met."""
return (state.num_iterations < max_iterations) & tf.logical_not(stopping_condition(state.converged, state.failed))
def _body(state):
"""Main optimization loop."""
search_direction = _get_search_direction(state.inverse_hessian_estimate, state.objective_gradient)
derivative_at_start_pt = tf.reduce_sum(input_tensor=state.objective_gradient * search_direction, axis=-1)
# If the derivative at the start point is not negative, recompute the
# search direction with the initial inverse Hessian.
needs_reset = ~state.failed & ~state.converged & (derivative_at_start_pt >= 0)
search_direction_reset = _get_search_direction(initial_inv_hessian, state.objective_gradient)
actual_serch_direction = tf.where(needs_reset, search_direction_reset, search_direction)
actual_inv_hessian = tf.where(needs_reset, initial_inv_hessian, state.inverse_hessian_estimate)
# Replace the hessian estimate in the state, in case it had to be reset.
current_state = bfgs_utils.update_fields(state, inverse_hessian_estimate=actual_inv_hessian)
next_state = bfgs_utils.line_search_step(current_state, value_and_gradients_function, actual_serch_direction, tolerance, f_relative_tolerance, x_tolerance, stopping_condition)
# Update the inverse Hessian if needed and continue.
return [_update_inv_hessian(current_state, next_state)]
kwargs = bfgs_utils.get_initial_state_args(value_and_gradients_function, initial_position, tolerance, control_inputs)
kwargs['inverse_hessian_estimate'] = initial_inv_hessian
initial_state = BfgsOptimizerResults(**kwargs)
return tf.while_loop(cond=_cond, body=_body, loop_vars=[initial_state], parallel_iterations=parallel_iterations)[0] # depends on [control=['with'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.