code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_new_project_name(self, project_name):
"""
Return a unique project name for the copy.
:param project_name: str: name of project we will copy
:return: str
"""
timestamp_str = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M')
return "{} {}".format(project_name, timestamp_str) | def function[get_new_project_name, parameter[self, project_name]]:
constant[
Return a unique project name for the copy.
:param project_name: str: name of project we will copy
:return: str
]
variable[timestamp_str] assign[=] call[call[name[datetime].datetime.utcnow, parameter[]].strftime, parameter[constant[%Y-%m-%d %H:%M]]]
return[call[constant[{} {}].format, parameter[name[project_name], name[timestamp_str]]]] | keyword[def] identifier[get_new_project_name] ( identifier[self] , identifier[project_name] ):
literal[string]
identifier[timestamp_str] = identifier[datetime] . identifier[datetime] . identifier[utcnow] (). identifier[strftime] ( literal[string] )
keyword[return] literal[string] . identifier[format] ( identifier[project_name] , identifier[timestamp_str] ) | def get_new_project_name(self, project_name):
"""
Return a unique project name for the copy.
:param project_name: str: name of project we will copy
:return: str
"""
timestamp_str = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M')
return '{} {}'.format(project_name, timestamp_str) |
def execute_ccm_remotely(remote_options, ccm_args):
"""
Execute CCM operation(s) remotely
:return A tuple defining the execution of the command
* output - The output of the execution if the output was not displayed
* exit_status - The exit status of remotely executed script
:raises Exception if invalid options are passed for `--dse-credentials`, `--ssl`, or
`--node-ssl` when initiating a remote execution; also if
error occured during ssh connection
"""
if not PARAMIKO_IS_AVAILABLE:
logging.warn("Paramiko is not Availble: Skipping remote execution of CCM command")
return None, None
# Create the SSH client
ssh_client = SSHClient(remote_options.ssh_host, remote_options.ssh_port,
remote_options.ssh_username, remote_options.ssh_password,
remote_options.ssh_private_key)
# Handle CCM arguments that require SFTP
for index, argument in enumerate(ccm_args):
# Determine if DSE credentials argument is being used
if "--dse-credentials" in argument:
# Get the filename being used for the DSE credentials
tokens = argument.split("=")
credentials_path = os.path.join(os.path.expanduser("~"), ".ccm", ".dse.ini")
if len(tokens) == 2:
credentials_path = tokens[1]
# Ensure the credential file exists locally and copy to remote host
if not os.path.isfile(credentials_path):
raise Exception("DSE Credentials File Does not Exist: %s"
% credentials_path)
ssh_client.put(credentials_path, ssh_client.ccm_config_dir)
# Update the DSE credentials argument
ccm_args[index] = "--dse-credentials"
# Determine if SSL or node SSL path argument is being used
if "--ssl" in argument or "--node-ssl" in argument:
# Get the directory being used for the path
tokens = argument.split("=")
if len(tokens) != 2:
raise Exception("Path is not Specified: %s" % argument)
ssl_path = tokens[1]
# Ensure the path exists locally and copy to remote host
if not os.path.isdir(ssl_path):
raise Exception("Path Does not Exist: %s" % ssl_path)
remote_ssl_path = ssh_client.temp + os.path.basename(ssl_path)
ssh_client.put(ssl_path, remote_ssl_path)
# Update the argument
ccm_args[index] = tokens[0] + "=" + remote_ssl_path
# Execute the CCM request, return output and exit status
return ssh_client.execute_ccm_command(ccm_args) | def function[execute_ccm_remotely, parameter[remote_options, ccm_args]]:
constant[
Execute CCM operation(s) remotely
:return A tuple defining the execution of the command
* output - The output of the execution if the output was not displayed
* exit_status - The exit status of remotely executed script
:raises Exception if invalid options are passed for `--dse-credentials`, `--ssl`, or
`--node-ssl` when initiating a remote execution; also if
error occured during ssh connection
]
if <ast.UnaryOp object at 0x7da1b12b57e0> begin[:]
call[name[logging].warn, parameter[constant[Paramiko is not Availble: Skipping remote execution of CCM command]]]
return[tuple[[<ast.Constant object at 0x7da1b12b4c40>, <ast.Constant object at 0x7da1b12b55d0>]]]
variable[ssh_client] assign[=] call[name[SSHClient], parameter[name[remote_options].ssh_host, name[remote_options].ssh_port, name[remote_options].ssh_username, name[remote_options].ssh_password, name[remote_options].ssh_private_key]]
for taget[tuple[[<ast.Name object at 0x7da1b12b4a00>, <ast.Name object at 0x7da1b12b5090>]]] in starred[call[name[enumerate], parameter[name[ccm_args]]]] begin[:]
if compare[constant[--dse-credentials] in name[argument]] begin[:]
variable[tokens] assign[=] call[name[argument].split, parameter[constant[=]]]
variable[credentials_path] assign[=] call[name[os].path.join, parameter[call[name[os].path.expanduser, parameter[constant[~]]], constant[.ccm], constant[.dse.ini]]]
if compare[call[name[len], parameter[name[tokens]]] equal[==] constant[2]] begin[:]
variable[credentials_path] assign[=] call[name[tokens]][constant[1]]
if <ast.UnaryOp object at 0x7da1b12b48e0> begin[:]
<ast.Raise object at 0x7da1b12b5540>
call[name[ssh_client].put, parameter[name[credentials_path], name[ssh_client].ccm_config_dir]]
call[name[ccm_args]][name[index]] assign[=] constant[--dse-credentials]
if <ast.BoolOp object at 0x7da1b12b5d50> begin[:]
variable[tokens] assign[=] call[name[argument].split, parameter[constant[=]]]
if compare[call[name[len], parameter[name[tokens]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da1b12b41c0>
variable[ssl_path] assign[=] call[name[tokens]][constant[1]]
if <ast.UnaryOp object at 0x7da1b1197700> begin[:]
<ast.Raise object at 0x7da1b1196620>
variable[remote_ssl_path] assign[=] binary_operation[name[ssh_client].temp + call[name[os].path.basename, parameter[name[ssl_path]]]]
call[name[ssh_client].put, parameter[name[ssl_path], name[remote_ssl_path]]]
call[name[ccm_args]][name[index]] assign[=] binary_operation[binary_operation[call[name[tokens]][constant[0]] + constant[=]] + name[remote_ssl_path]]
return[call[name[ssh_client].execute_ccm_command, parameter[name[ccm_args]]]] | keyword[def] identifier[execute_ccm_remotely] ( identifier[remote_options] , identifier[ccm_args] ):
literal[string]
keyword[if] keyword[not] identifier[PARAMIKO_IS_AVAILABLE] :
identifier[logging] . identifier[warn] ( literal[string] )
keyword[return] keyword[None] , keyword[None]
identifier[ssh_client] = identifier[SSHClient] ( identifier[remote_options] . identifier[ssh_host] , identifier[remote_options] . identifier[ssh_port] ,
identifier[remote_options] . identifier[ssh_username] , identifier[remote_options] . identifier[ssh_password] ,
identifier[remote_options] . identifier[ssh_private_key] )
keyword[for] identifier[index] , identifier[argument] keyword[in] identifier[enumerate] ( identifier[ccm_args] ):
keyword[if] literal[string] keyword[in] identifier[argument] :
identifier[tokens] = identifier[argument] . identifier[split] ( literal[string] )
identifier[credentials_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ), literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[tokens] )== literal[int] :
identifier[credentials_path] = identifier[tokens] [ literal[int] ]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[credentials_path] ):
keyword[raise] identifier[Exception] ( literal[string]
% identifier[credentials_path] )
identifier[ssh_client] . identifier[put] ( identifier[credentials_path] , identifier[ssh_client] . identifier[ccm_config_dir] )
identifier[ccm_args] [ identifier[index] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[argument] keyword[or] literal[string] keyword[in] identifier[argument] :
identifier[tokens] = identifier[argument] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[tokens] )!= literal[int] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[argument] )
identifier[ssl_path] = identifier[tokens] [ literal[int] ]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[ssl_path] ):
keyword[raise] identifier[Exception] ( literal[string] % identifier[ssl_path] )
identifier[remote_ssl_path] = identifier[ssh_client] . identifier[temp] + identifier[os] . identifier[path] . identifier[basename] ( identifier[ssl_path] )
identifier[ssh_client] . identifier[put] ( identifier[ssl_path] , identifier[remote_ssl_path] )
identifier[ccm_args] [ identifier[index] ]= identifier[tokens] [ literal[int] ]+ literal[string] + identifier[remote_ssl_path]
keyword[return] identifier[ssh_client] . identifier[execute_ccm_command] ( identifier[ccm_args] ) | def execute_ccm_remotely(remote_options, ccm_args):
"""
Execute CCM operation(s) remotely
:return A tuple defining the execution of the command
* output - The output of the execution if the output was not displayed
* exit_status - The exit status of remotely executed script
:raises Exception if invalid options are passed for `--dse-credentials`, `--ssl`, or
`--node-ssl` when initiating a remote execution; also if
error occured during ssh connection
"""
if not PARAMIKO_IS_AVAILABLE:
logging.warn('Paramiko is not Availble: Skipping remote execution of CCM command')
return (None, None) # depends on [control=['if'], data=[]]
# Create the SSH client
ssh_client = SSHClient(remote_options.ssh_host, remote_options.ssh_port, remote_options.ssh_username, remote_options.ssh_password, remote_options.ssh_private_key)
# Handle CCM arguments that require SFTP
for (index, argument) in enumerate(ccm_args):
# Determine if DSE credentials argument is being used
if '--dse-credentials' in argument:
# Get the filename being used for the DSE credentials
tokens = argument.split('=')
credentials_path = os.path.join(os.path.expanduser('~'), '.ccm', '.dse.ini')
if len(tokens) == 2:
credentials_path = tokens[1] # depends on [control=['if'], data=[]]
# Ensure the credential file exists locally and copy to remote host
if not os.path.isfile(credentials_path):
raise Exception('DSE Credentials File Does not Exist: %s' % credentials_path) # depends on [control=['if'], data=[]]
ssh_client.put(credentials_path, ssh_client.ccm_config_dir)
# Update the DSE credentials argument
ccm_args[index] = '--dse-credentials' # depends on [control=['if'], data=['argument']]
# Determine if SSL or node SSL path argument is being used
if '--ssl' in argument or '--node-ssl' in argument:
# Get the directory being used for the path
tokens = argument.split('=')
if len(tokens) != 2:
raise Exception('Path is not Specified: %s' % argument) # depends on [control=['if'], data=[]]
ssl_path = tokens[1]
# Ensure the path exists locally and copy to remote host
if not os.path.isdir(ssl_path):
raise Exception('Path Does not Exist: %s' % ssl_path) # depends on [control=['if'], data=[]]
remote_ssl_path = ssh_client.temp + os.path.basename(ssl_path)
ssh_client.put(ssl_path, remote_ssl_path)
# Update the argument
ccm_args[index] = tokens[0] + '=' + remote_ssl_path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Execute the CCM request, return output and exit status
return ssh_client.execute_ccm_command(ccm_args) |
def _remove_closest_particle(self, p):
"""removes the closest particle in self.pos to ``p``"""
#1. find closest pos:
dp = self.pos - p
dist2 = (dp*dp).sum(axis=1)
ind = dist2.argmin()
rp = self.pos[ind].copy()
#2. delete
self.pos = np.delete(self.pos, ind, axis=0)
return rp | def function[_remove_closest_particle, parameter[self, p]]:
constant[removes the closest particle in self.pos to ``p``]
variable[dp] assign[=] binary_operation[name[self].pos - name[p]]
variable[dist2] assign[=] call[binary_operation[name[dp] * name[dp]].sum, parameter[]]
variable[ind] assign[=] call[name[dist2].argmin, parameter[]]
variable[rp] assign[=] call[call[name[self].pos][name[ind]].copy, parameter[]]
name[self].pos assign[=] call[name[np].delete, parameter[name[self].pos, name[ind]]]
return[name[rp]] | keyword[def] identifier[_remove_closest_particle] ( identifier[self] , identifier[p] ):
literal[string]
identifier[dp] = identifier[self] . identifier[pos] - identifier[p]
identifier[dist2] =( identifier[dp] * identifier[dp] ). identifier[sum] ( identifier[axis] = literal[int] )
identifier[ind] = identifier[dist2] . identifier[argmin] ()
identifier[rp] = identifier[self] . identifier[pos] [ identifier[ind] ]. identifier[copy] ()
identifier[self] . identifier[pos] = identifier[np] . identifier[delete] ( identifier[self] . identifier[pos] , identifier[ind] , identifier[axis] = literal[int] )
keyword[return] identifier[rp] | def _remove_closest_particle(self, p):
"""removes the closest particle in self.pos to ``p``"""
#1. find closest pos:
dp = self.pos - p
dist2 = (dp * dp).sum(axis=1)
ind = dist2.argmin()
rp = self.pos[ind].copy()
#2. delete
self.pos = np.delete(self.pos, ind, axis=0)
return rp |
def open(self, filename, mode='r', **kwargs):
'''
Open the file and return a file-like object.
:param str filename: The storage root-relative filename
:param str mode: The open mode (``(r|w)b?``)
:raises FileNotFound: If trying to read a file that does not exists
'''
if 'r' in mode and not self.backend.exists(filename):
raise FileNotFound(filename)
return self.backend.open(filename, mode, **kwargs) | def function[open, parameter[self, filename, mode]]:
constant[
Open the file and return a file-like object.
:param str filename: The storage root-relative filename
:param str mode: The open mode (``(r|w)b?``)
:raises FileNotFound: If trying to read a file that does not exists
]
if <ast.BoolOp object at 0x7da1b0b517b0> begin[:]
<ast.Raise object at 0x7da1b0b52830>
return[call[name[self].backend.open, parameter[name[filename], name[mode]]]] | keyword[def] identifier[open] ( identifier[self] , identifier[filename] , identifier[mode] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[mode] keyword[and] keyword[not] identifier[self] . identifier[backend] . identifier[exists] ( identifier[filename] ):
keyword[raise] identifier[FileNotFound] ( identifier[filename] )
keyword[return] identifier[self] . identifier[backend] . identifier[open] ( identifier[filename] , identifier[mode] ,** identifier[kwargs] ) | def open(self, filename, mode='r', **kwargs):
"""
Open the file and return a file-like object.
:param str filename: The storage root-relative filename
:param str mode: The open mode (``(r|w)b?``)
:raises FileNotFound: If trying to read a file that does not exists
"""
if 'r' in mode and (not self.backend.exists(filename)):
raise FileNotFound(filename) # depends on [control=['if'], data=[]]
return self.backend.open(filename, mode, **kwargs) |
def _read_files(files):
"""
Read the contents of a list of files
"""
file_contents = [
(
_parse_title(file_path),
_read_file(file_path),
)
for file_path in files
]
return file_contents | def function[_read_files, parameter[files]]:
constant[
Read the contents of a list of files
]
variable[file_contents] assign[=] <ast.ListComp object at 0x7da1b1bc0df0>
return[name[file_contents]] | keyword[def] identifier[_read_files] ( identifier[files] ):
literal[string]
identifier[file_contents] =[
(
identifier[_parse_title] ( identifier[file_path] ),
identifier[_read_file] ( identifier[file_path] ),
)
keyword[for] identifier[file_path] keyword[in] identifier[files]
]
keyword[return] identifier[file_contents] | def _read_files(files):
"""
Read the contents of a list of files
"""
file_contents = [(_parse_title(file_path), _read_file(file_path)) for file_path in files]
return file_contents |
def broadcast_structure(to_structure, from_structure):
"""Broadcasts `from_structure` to `to_structure`.
This is useful for downstream usage of `zip` or `tf.nest.map_structure`.
If `from_structure` is a singleton, it is tiled to match the structure of
`to_structure`. Note that the elements in `from_structure` are not copied if
this tiling occurs.
Args:
to_structure: A structure.
from_structure: A structure.
Returns:
new_from_structure: Same structure as `to_structure`.
#### Example:
```python
a_structure = ['a', 'b', 'c']
b_structure = broadcast_structure(a_structure, 'd')
# -> ['d', 'd', 'd']
c_structure = tf.nest.map_structure(
lambda a, b: a + b, a_structure, b_structure)
# -> ['ad', 'bd', 'cd']
```
"""
from_parts = tf.nest.flatten(from_structure)
if len(from_parts) == 1:
from_structure = tf.nest.map_structure(lambda _: from_parts[0],
to_structure)
return from_structure | def function[broadcast_structure, parameter[to_structure, from_structure]]:
constant[Broadcasts `from_structure` to `to_structure`.
This is useful for downstream usage of `zip` or `tf.nest.map_structure`.
If `from_structure` is a singleton, it is tiled to match the structure of
`to_structure`. Note that the elements in `from_structure` are not copied if
this tiling occurs.
Args:
to_structure: A structure.
from_structure: A structure.
Returns:
new_from_structure: Same structure as `to_structure`.
#### Example:
```python
a_structure = ['a', 'b', 'c']
b_structure = broadcast_structure(a_structure, 'd')
# -> ['d', 'd', 'd']
c_structure = tf.nest.map_structure(
lambda a, b: a + b, a_structure, b_structure)
# -> ['ad', 'bd', 'cd']
```
]
variable[from_parts] assign[=] call[name[tf].nest.flatten, parameter[name[from_structure]]]
if compare[call[name[len], parameter[name[from_parts]]] equal[==] constant[1]] begin[:]
variable[from_structure] assign[=] call[name[tf].nest.map_structure, parameter[<ast.Lambda object at 0x7da1b022a7d0>, name[to_structure]]]
return[name[from_structure]] | keyword[def] identifier[broadcast_structure] ( identifier[to_structure] , identifier[from_structure] ):
literal[string]
identifier[from_parts] = identifier[tf] . identifier[nest] . identifier[flatten] ( identifier[from_structure] )
keyword[if] identifier[len] ( identifier[from_parts] )== literal[int] :
identifier[from_structure] = identifier[tf] . identifier[nest] . identifier[map_structure] ( keyword[lambda] identifier[_] : identifier[from_parts] [ literal[int] ],
identifier[to_structure] )
keyword[return] identifier[from_structure] | def broadcast_structure(to_structure, from_structure):
"""Broadcasts `from_structure` to `to_structure`.
This is useful for downstream usage of `zip` or `tf.nest.map_structure`.
If `from_structure` is a singleton, it is tiled to match the structure of
`to_structure`. Note that the elements in `from_structure` are not copied if
this tiling occurs.
Args:
to_structure: A structure.
from_structure: A structure.
Returns:
new_from_structure: Same structure as `to_structure`.
#### Example:
```python
a_structure = ['a', 'b', 'c']
b_structure = broadcast_structure(a_structure, 'd')
# -> ['d', 'd', 'd']
c_structure = tf.nest.map_structure(
lambda a, b: a + b, a_structure, b_structure)
# -> ['ad', 'bd', 'cd']
```
"""
from_parts = tf.nest.flatten(from_structure)
if len(from_parts) == 1:
from_structure = tf.nest.map_structure(lambda _: from_parts[0], to_structure) # depends on [control=['if'], data=[]]
return from_structure |
def rollback(self, label, plane):
"""Rollback config."""
cm_label = 'condoor-{}'.format(int(time.time()))
self.device.send(self.rollback_cmd.format(label), timeout=120)
return cm_label | def function[rollback, parameter[self, label, plane]]:
constant[Rollback config.]
variable[cm_label] assign[=] call[constant[condoor-{}].format, parameter[call[name[int], parameter[call[name[time].time, parameter[]]]]]]
call[name[self].device.send, parameter[call[name[self].rollback_cmd.format, parameter[name[label]]]]]
return[name[cm_label]] | keyword[def] identifier[rollback] ( identifier[self] , identifier[label] , identifier[plane] ):
literal[string]
identifier[cm_label] = literal[string] . identifier[format] ( identifier[int] ( identifier[time] . identifier[time] ()))
identifier[self] . identifier[device] . identifier[send] ( identifier[self] . identifier[rollback_cmd] . identifier[format] ( identifier[label] ), identifier[timeout] = literal[int] )
keyword[return] identifier[cm_label] | def rollback(self, label, plane):
"""Rollback config."""
cm_label = 'condoor-{}'.format(int(time.time()))
self.device.send(self.rollback_cmd.format(label), timeout=120)
return cm_label |
def search(self, filters=None, fields=None, limit=None, page=1):
"""
Retrieve order list by options using search api. Using this result can
be paginated
:param options: Dictionary of options.
:param filters: `{<attribute>:{<operator>:<value>}}`
:param fields: [<String: magento field names>, ...]
:param limit: `page limit`
:param page: `current page`
:return: `list` of `dict`
"""
options = {
'imported': False,
'filters': filters or {},
'fields': fields or [],
'limit': limit or 1000,
'page': page,
}
return self.call('sales_order.search', [options]) | def function[search, parameter[self, filters, fields, limit, page]]:
constant[
Retrieve order list by options using search api. Using this result can
be paginated
:param options: Dictionary of options.
:param filters: `{<attribute>:{<operator>:<value>}}`
:param fields: [<String: magento field names>, ...]
:param limit: `page limit`
:param page: `current page`
:return: `list` of `dict`
]
variable[options] assign[=] dictionary[[<ast.Constant object at 0x7da1b0494760>, <ast.Constant object at 0x7da1b04946a0>, <ast.Constant object at 0x7da1b04945e0>, <ast.Constant object at 0x7da1b0494430>, <ast.Constant object at 0x7da1b0494160>], [<ast.Constant object at 0x7da1b0494790>, <ast.BoolOp object at 0x7da1b0494220>, <ast.BoolOp object at 0x7da1b0494280>, <ast.BoolOp object at 0x7da1b0494070>, <ast.Name object at 0x7da1b0494640>]]
return[call[name[self].call, parameter[constant[sales_order.search], list[[<ast.Name object at 0x7da1b04942b0>]]]]] | keyword[def] identifier[search] ( identifier[self] , identifier[filters] = keyword[None] , identifier[fields] = keyword[None] , identifier[limit] = keyword[None] , identifier[page] = literal[int] ):
literal[string]
identifier[options] ={
literal[string] : keyword[False] ,
literal[string] : identifier[filters] keyword[or] {},
literal[string] : identifier[fields] keyword[or] [],
literal[string] : identifier[limit] keyword[or] literal[int] ,
literal[string] : identifier[page] ,
}
keyword[return] identifier[self] . identifier[call] ( literal[string] ,[ identifier[options] ]) | def search(self, filters=None, fields=None, limit=None, page=1):
"""
Retrieve order list by options using search api. Using this result can
be paginated
:param options: Dictionary of options.
:param filters: `{<attribute>:{<operator>:<value>}}`
:param fields: [<String: magento field names>, ...]
:param limit: `page limit`
:param page: `current page`
:return: `list` of `dict`
"""
options = {'imported': False, 'filters': filters or {}, 'fields': fields or [], 'limit': limit or 1000, 'page': page}
return self.call('sales_order.search', [options]) |
def _dasd_conversion(cls, val, **kwargs):
'''
converts 0/1/2 for dasd reg key
'''
if val is not None:
if val == '0' or val == 0 or val == '':
return 'Administrators'
elif val == '1' or val == 1:
return 'Administrators and Power Users'
elif val == '2' or val == 2:
return 'Administrators and Interactive Users'
else:
return 'Not Defined'
else:
return 'Not Defined' | def function[_dasd_conversion, parameter[cls, val]]:
constant[
converts 0/1/2 for dasd reg key
]
if compare[name[val] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b1cb1ed0> begin[:]
return[constant[Administrators]] | keyword[def] identifier[_dasd_conversion] ( identifier[cls] , identifier[val] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[val] == literal[string] keyword[or] identifier[val] == literal[int] keyword[or] identifier[val] == literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[val] == literal[string] keyword[or] identifier[val] == literal[int] :
keyword[return] literal[string]
keyword[elif] identifier[val] == literal[string] keyword[or] identifier[val] == literal[int] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string] | def _dasd_conversion(cls, val, **kwargs):
"""
converts 0/1/2 for dasd reg key
"""
if val is not None:
if val == '0' or val == 0 or val == '':
return 'Administrators' # depends on [control=['if'], data=[]]
elif val == '1' or val == 1:
return 'Administrators and Power Users' # depends on [control=['if'], data=[]]
elif val == '2' or val == 2:
return 'Administrators and Interactive Users' # depends on [control=['if'], data=[]]
else:
return 'Not Defined' # depends on [control=['if'], data=['val']]
else:
return 'Not Defined' |
def link(**kwargs):
''' Synchronize remote/local directory. '''
if kwargs['yes']:
output, err = cli_syncthing_adapter.link(**kwargs)
click.echo("%s" % output, err=err)
else:
if click.confirm("Are you sure you want to link %s?" % kwargs['path']):
output, err = cli_syncthing_adapter.link(**kwargs)
click.echo("%s" % output, err=err) | def function[link, parameter[]]:
constant[ Synchronize remote/local directory. ]
if call[name[kwargs]][constant[yes]] begin[:]
<ast.Tuple object at 0x7da1b236bd00> assign[=] call[name[cli_syncthing_adapter].link, parameter[]]
call[name[click].echo, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[output]]]] | keyword[def] identifier[link] (** identifier[kwargs] ):
literal[string]
keyword[if] identifier[kwargs] [ literal[string] ]:
identifier[output] , identifier[err] = identifier[cli_syncthing_adapter] . identifier[link] (** identifier[kwargs] )
identifier[click] . identifier[echo] ( literal[string] % identifier[output] , identifier[err] = identifier[err] )
keyword[else] :
keyword[if] identifier[click] . identifier[confirm] ( literal[string] % identifier[kwargs] [ literal[string] ]):
identifier[output] , identifier[err] = identifier[cli_syncthing_adapter] . identifier[link] (** identifier[kwargs] )
identifier[click] . identifier[echo] ( literal[string] % identifier[output] , identifier[err] = identifier[err] ) | def link(**kwargs):
""" Synchronize remote/local directory. """
if kwargs['yes']:
(output, err) = cli_syncthing_adapter.link(**kwargs)
click.echo('%s' % output, err=err) # depends on [control=['if'], data=[]]
elif click.confirm('Are you sure you want to link %s?' % kwargs['path']):
(output, err) = cli_syncthing_adapter.link(**kwargs)
click.echo('%s' % output, err=err) # depends on [control=['if'], data=[]] |
def gmtoffset_from_tm(tm, local):
# type: (float, time.struct_time) -> int
'''
A function to compute the GMT offset from the time in seconds since the epoch
and the local time object.
Parameters:
tm - The time in seconds since the epoch.
local - The struct_time object representing the local time.
Returns:
The gmtoffset.
'''
gmtime = time.gmtime(tm)
tmpyear = gmtime.tm_year - local.tm_year
tmpyday = gmtime.tm_yday - local.tm_yday
tmphour = gmtime.tm_hour - local.tm_hour
tmpmin = gmtime.tm_min - local.tm_min
if tmpyday < 0:
tmpyday = -1
else:
if tmpyear > 0:
tmpyday = 1
return -(tmpmin + 60 * (tmphour + 24 * tmpyday)) // 15 | def function[gmtoffset_from_tm, parameter[tm, local]]:
constant[
A function to compute the GMT offset from the time in seconds since the epoch
and the local time object.
Parameters:
tm - The time in seconds since the epoch.
local - The struct_time object representing the local time.
Returns:
The gmtoffset.
]
variable[gmtime] assign[=] call[name[time].gmtime, parameter[name[tm]]]
variable[tmpyear] assign[=] binary_operation[name[gmtime].tm_year - name[local].tm_year]
variable[tmpyday] assign[=] binary_operation[name[gmtime].tm_yday - name[local].tm_yday]
variable[tmphour] assign[=] binary_operation[name[gmtime].tm_hour - name[local].tm_hour]
variable[tmpmin] assign[=] binary_operation[name[gmtime].tm_min - name[local].tm_min]
if compare[name[tmpyday] less[<] constant[0]] begin[:]
variable[tmpyday] assign[=] <ast.UnaryOp object at 0x7da1b0f0cc70>
return[binary_operation[<ast.UnaryOp object at 0x7da1b0f0f460> <ast.FloorDiv object at 0x7da2590d6bc0> constant[15]]] | keyword[def] identifier[gmtoffset_from_tm] ( identifier[tm] , identifier[local] ):
literal[string]
identifier[gmtime] = identifier[time] . identifier[gmtime] ( identifier[tm] )
identifier[tmpyear] = identifier[gmtime] . identifier[tm_year] - identifier[local] . identifier[tm_year]
identifier[tmpyday] = identifier[gmtime] . identifier[tm_yday] - identifier[local] . identifier[tm_yday]
identifier[tmphour] = identifier[gmtime] . identifier[tm_hour] - identifier[local] . identifier[tm_hour]
identifier[tmpmin] = identifier[gmtime] . identifier[tm_min] - identifier[local] . identifier[tm_min]
keyword[if] identifier[tmpyday] < literal[int] :
identifier[tmpyday] =- literal[int]
keyword[else] :
keyword[if] identifier[tmpyear] > literal[int] :
identifier[tmpyday] = literal[int]
keyword[return] -( identifier[tmpmin] + literal[int] *( identifier[tmphour] + literal[int] * identifier[tmpyday] ))// literal[int] | def gmtoffset_from_tm(tm, local):
# type: (float, time.struct_time) -> int
'\n A function to compute the GMT offset from the time in seconds since the epoch\n and the local time object.\n\n Parameters:\n tm - The time in seconds since the epoch.\n local - The struct_time object representing the local time.\n Returns:\n The gmtoffset.\n '
gmtime = time.gmtime(tm)
tmpyear = gmtime.tm_year - local.tm_year
tmpyday = gmtime.tm_yday - local.tm_yday
tmphour = gmtime.tm_hour - local.tm_hour
tmpmin = gmtime.tm_min - local.tm_min
if tmpyday < 0:
tmpyday = -1 # depends on [control=['if'], data=['tmpyday']]
elif tmpyear > 0:
tmpyday = 1 # depends on [control=['if'], data=[]]
return -(tmpmin + 60 * (tmphour + 24 * tmpyday)) // 15 |
def strip_optional_suffix(string, suffix, log=None):
"""
>>> strip_optional_suffix('abcdef', 'def')
'abc'
>>> strip_optional_suffix('abcdef', '123')
'abcdef'
>>> strip_optional_suffix('abcdef', '123', PrintingLogger())
String ends with 'def', not '123'
'abcdef'
"""
if string.endswith(suffix):
return string[:-len(suffix)]
if log:
log.warn('String ends with %r, not %r', string[-len(suffix):], suffix)
return string | def function[strip_optional_suffix, parameter[string, suffix, log]]:
constant[
>>> strip_optional_suffix('abcdef', 'def')
'abc'
>>> strip_optional_suffix('abcdef', '123')
'abcdef'
>>> strip_optional_suffix('abcdef', '123', PrintingLogger())
String ends with 'def', not '123'
'abcdef'
]
if call[name[string].endswith, parameter[name[suffix]]] begin[:]
return[call[name[string]][<ast.Slice object at 0x7da1b15a3a30>]]
if name[log] begin[:]
call[name[log].warn, parameter[constant[String ends with %r, not %r], call[name[string]][<ast.Slice object at 0x7da1b15a0940>], name[suffix]]]
return[name[string]] | keyword[def] identifier[strip_optional_suffix] ( identifier[string] , identifier[suffix] , identifier[log] = keyword[None] ):
literal[string]
keyword[if] identifier[string] . identifier[endswith] ( identifier[suffix] ):
keyword[return] identifier[string] [:- identifier[len] ( identifier[suffix] )]
keyword[if] identifier[log] :
identifier[log] . identifier[warn] ( literal[string] , identifier[string] [- identifier[len] ( identifier[suffix] ):], identifier[suffix] )
keyword[return] identifier[string] | def strip_optional_suffix(string, suffix, log=None):
"""
>>> strip_optional_suffix('abcdef', 'def')
'abc'
>>> strip_optional_suffix('abcdef', '123')
'abcdef'
>>> strip_optional_suffix('abcdef', '123', PrintingLogger())
String ends with 'def', not '123'
'abcdef'
"""
if string.endswith(suffix):
return string[:-len(suffix)] # depends on [control=['if'], data=[]]
if log:
log.warn('String ends with %r, not %r', string[-len(suffix):], suffix) # depends on [control=['if'], data=[]]
return string |
def _broker_shutdown(self):
"""
Invoke :meth:`Stream.on_shutdown` for every active stream, then allow
up to :attr:`shutdown_timeout` seconds for the streams to unregister
themselves, logging an error if any did not unregister during the grace
period.
"""
for _, (side, _) in self.poller.readers + self.poller.writers:
self._call(side.stream, side.stream.on_shutdown)
deadline = time.time() + self.shutdown_timeout
while self.keep_alive() and time.time() < deadline:
self._loop_once(max(0, deadline - time.time()))
if self.keep_alive():
LOG.error('%r: some streams did not close gracefully. '
'The most likely cause for this is one or '
'more child processes still connected to '
'our stdout/stderr pipes.', self) | def function[_broker_shutdown, parameter[self]]:
constant[
Invoke :meth:`Stream.on_shutdown` for every active stream, then allow
up to :attr:`shutdown_timeout` seconds for the streams to unregister
themselves, logging an error if any did not unregister during the grace
period.
]
for taget[tuple[[<ast.Name object at 0x7da1b1dd1b40>, <ast.Tuple object at 0x7da1b1dd1630>]]] in starred[binary_operation[name[self].poller.readers + name[self].poller.writers]] begin[:]
call[name[self]._call, parameter[name[side].stream, name[side].stream.on_shutdown]]
variable[deadline] assign[=] binary_operation[call[name[time].time, parameter[]] + name[self].shutdown_timeout]
while <ast.BoolOp object at 0x7da1b1d0d360> begin[:]
call[name[self]._loop_once, parameter[call[name[max], parameter[constant[0], binary_operation[name[deadline] - call[name[time].time, parameter[]]]]]]]
if call[name[self].keep_alive, parameter[]] begin[:]
call[name[LOG].error, parameter[constant[%r: some streams did not close gracefully. The most likely cause for this is one or more child processes still connected to our stdout/stderr pipes.], name[self]]] | keyword[def] identifier[_broker_shutdown] ( identifier[self] ):
literal[string]
keyword[for] identifier[_] ,( identifier[side] , identifier[_] ) keyword[in] identifier[self] . identifier[poller] . identifier[readers] + identifier[self] . identifier[poller] . identifier[writers] :
identifier[self] . identifier[_call] ( identifier[side] . identifier[stream] , identifier[side] . identifier[stream] . identifier[on_shutdown] )
identifier[deadline] = identifier[time] . identifier[time] ()+ identifier[self] . identifier[shutdown_timeout]
keyword[while] identifier[self] . identifier[keep_alive] () keyword[and] identifier[time] . identifier[time] ()< identifier[deadline] :
identifier[self] . identifier[_loop_once] ( identifier[max] ( literal[int] , identifier[deadline] - identifier[time] . identifier[time] ()))
keyword[if] identifier[self] . identifier[keep_alive] ():
identifier[LOG] . identifier[error] ( literal[string]
literal[string]
literal[string]
literal[string] , identifier[self] ) | def _broker_shutdown(self):
"""
Invoke :meth:`Stream.on_shutdown` for every active stream, then allow
up to :attr:`shutdown_timeout` seconds for the streams to unregister
themselves, logging an error if any did not unregister during the grace
period.
"""
for (_, (side, _)) in self.poller.readers + self.poller.writers:
self._call(side.stream, side.stream.on_shutdown) # depends on [control=['for'], data=[]]
deadline = time.time() + self.shutdown_timeout
while self.keep_alive() and time.time() < deadline:
self._loop_once(max(0, deadline - time.time())) # depends on [control=['while'], data=[]]
if self.keep_alive():
LOG.error('%r: some streams did not close gracefully. The most likely cause for this is one or more child processes still connected to our stdout/stderr pipes.', self) # depends on [control=['if'], data=[]] |
def __update_mouse(self, milliseconds):
"""
Use the mouse to control selection of the buttons.
"""
for button in self.gui_buttons:
was_hovering = button.is_mouse_hovering
button.update(milliseconds)
#Provides capibilities for the mouse to select a button if the mouse is the focus of input.
if was_hovering == False and button.is_mouse_hovering:
#The user has just moved the mouse over the button. Set it as active.
old_index = self.current_index
self.current_index = self.gui_buttons.index(button)
self.__handle_selections(old_index, self.current_index)
elif Ragnarok.get_world().Mouse.is_clicked(self.mouse_select_button) and button.is_mouse_hovering:
#The main mouse button has just depressed, click the current button.
button.clicked_action() | def function[__update_mouse, parameter[self, milliseconds]]:
constant[
Use the mouse to control selection of the buttons.
]
for taget[name[button]] in starred[name[self].gui_buttons] begin[:]
variable[was_hovering] assign[=] name[button].is_mouse_hovering
call[name[button].update, parameter[name[milliseconds]]]
if <ast.BoolOp object at 0x7da207f03190> begin[:]
variable[old_index] assign[=] name[self].current_index
name[self].current_index assign[=] call[name[self].gui_buttons.index, parameter[name[button]]]
call[name[self].__handle_selections, parameter[name[old_index], name[self].current_index]] | keyword[def] identifier[__update_mouse] ( identifier[self] , identifier[milliseconds] ):
literal[string]
keyword[for] identifier[button] keyword[in] identifier[self] . identifier[gui_buttons] :
identifier[was_hovering] = identifier[button] . identifier[is_mouse_hovering]
identifier[button] . identifier[update] ( identifier[milliseconds] )
keyword[if] identifier[was_hovering] == keyword[False] keyword[and] identifier[button] . identifier[is_mouse_hovering] :
identifier[old_index] = identifier[self] . identifier[current_index]
identifier[self] . identifier[current_index] = identifier[self] . identifier[gui_buttons] . identifier[index] ( identifier[button] )
identifier[self] . identifier[__handle_selections] ( identifier[old_index] , identifier[self] . identifier[current_index] )
keyword[elif] identifier[Ragnarok] . identifier[get_world] (). identifier[Mouse] . identifier[is_clicked] ( identifier[self] . identifier[mouse_select_button] ) keyword[and] identifier[button] . identifier[is_mouse_hovering] :
identifier[button] . identifier[clicked_action] () | def __update_mouse(self, milliseconds):
"""
Use the mouse to control selection of the buttons.
"""
for button in self.gui_buttons:
was_hovering = button.is_mouse_hovering
button.update(milliseconds)
#Provides capibilities for the mouse to select a button if the mouse is the focus of input.
if was_hovering == False and button.is_mouse_hovering:
#The user has just moved the mouse over the button. Set it as active.
old_index = self.current_index
self.current_index = self.gui_buttons.index(button)
self.__handle_selections(old_index, self.current_index) # depends on [control=['if'], data=[]]
elif Ragnarok.get_world().Mouse.is_clicked(self.mouse_select_button) and button.is_mouse_hovering:
#The main mouse button has just depressed, click the current button.
button.clicked_action() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['button']] |
def _do(self, nodes):
""" Recursive method to instantiate services """
if not isinstance(nodes, dict):
raise TypeError('"nodes" must be a dictionary')
if not nodes:
# we're done!
return
starting_num_nodes = len(nodes)
newly_instantiated = set()
# Instantiate services with an empty dependency set
for (name, dependency_set) in six.iteritems(nodes):
if dependency_set:
# Skip non-empty dependency sets
continue
# Instantiate
config = self._config[name]
service = self._factory.create_from_dict(config)
self._factory.add_instantiated_service(name, service)
newly_instantiated.add(name)
# We ALWAYS should have instantiated a new service
# or we'll end up in an infinite loop.
if not newly_instantiated:
raise Exception('No newly instantiated services')
# Remove from Nodes
for name in newly_instantiated:
del nodes[name]
# Check if the number of nodes have changed
# to prevent infinite loops.
# We should ALWAYS have removed a node.
if starting_num_nodes == len(nodes):
raise Exception('No nodes removed!')
# Remove newly instantiated services from dependency sets
for (name, dependency_set) in six.iteritems(nodes):
nodes[name] = dependency_set.difference(newly_instantiated)
# Recursion is recursion is ...
self._do(nodes) | def function[_do, parameter[self, nodes]]:
constant[ Recursive method to instantiate services ]
if <ast.UnaryOp object at 0x7da1b149d870> begin[:]
<ast.Raise object at 0x7da1b149e6b0>
if <ast.UnaryOp object at 0x7da1b149e200> begin[:]
return[None]
variable[starting_num_nodes] assign[=] call[name[len], parameter[name[nodes]]]
variable[newly_instantiated] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b149fe80>, <ast.Name object at 0x7da1b149c820>]]] in starred[call[name[six].iteritems, parameter[name[nodes]]]] begin[:]
if name[dependency_set] begin[:]
continue
variable[config] assign[=] call[name[self]._config][name[name]]
variable[service] assign[=] call[name[self]._factory.create_from_dict, parameter[name[config]]]
call[name[self]._factory.add_instantiated_service, parameter[name[name], name[service]]]
call[name[newly_instantiated].add, parameter[name[name]]]
if <ast.UnaryOp object at 0x7da1b149cf10> begin[:]
<ast.Raise object at 0x7da1b149cca0>
for taget[name[name]] in starred[name[newly_instantiated]] begin[:]
<ast.Delete object at 0x7da1b149ee90>
if compare[name[starting_num_nodes] equal[==] call[name[len], parameter[name[nodes]]]] begin[:]
<ast.Raise object at 0x7da1b149e770>
for taget[tuple[[<ast.Name object at 0x7da1b149d720>, <ast.Name object at 0x7da1b149e950>]]] in starred[call[name[six].iteritems, parameter[name[nodes]]]] begin[:]
call[name[nodes]][name[name]] assign[=] call[name[dependency_set].difference, parameter[name[newly_instantiated]]]
call[name[self]._do, parameter[name[nodes]]] | keyword[def] identifier[_do] ( identifier[self] , identifier[nodes] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[nodes] , identifier[dict] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[nodes] :
keyword[return]
identifier[starting_num_nodes] = identifier[len] ( identifier[nodes] )
identifier[newly_instantiated] = identifier[set] ()
keyword[for] ( identifier[name] , identifier[dependency_set] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[nodes] ):
keyword[if] identifier[dependency_set] :
keyword[continue]
identifier[config] = identifier[self] . identifier[_config] [ identifier[name] ]
identifier[service] = identifier[self] . identifier[_factory] . identifier[create_from_dict] ( identifier[config] )
identifier[self] . identifier[_factory] . identifier[add_instantiated_service] ( identifier[name] , identifier[service] )
identifier[newly_instantiated] . identifier[add] ( identifier[name] )
keyword[if] keyword[not] identifier[newly_instantiated] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[for] identifier[name] keyword[in] identifier[newly_instantiated] :
keyword[del] identifier[nodes] [ identifier[name] ]
keyword[if] identifier[starting_num_nodes] == identifier[len] ( identifier[nodes] ):
keyword[raise] identifier[Exception] ( literal[string] )
keyword[for] ( identifier[name] , identifier[dependency_set] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[nodes] ):
identifier[nodes] [ identifier[name] ]= identifier[dependency_set] . identifier[difference] ( identifier[newly_instantiated] )
identifier[self] . identifier[_do] ( identifier[nodes] ) | def _do(self, nodes):
""" Recursive method to instantiate services """
if not isinstance(nodes, dict):
raise TypeError('"nodes" must be a dictionary') # depends on [control=['if'], data=[]]
if not nodes:
# we're done!
return # depends on [control=['if'], data=[]]
starting_num_nodes = len(nodes)
newly_instantiated = set()
# Instantiate services with an empty dependency set
for (name, dependency_set) in six.iteritems(nodes):
if dependency_set:
# Skip non-empty dependency sets
continue # depends on [control=['if'], data=[]]
# Instantiate
config = self._config[name]
service = self._factory.create_from_dict(config)
self._factory.add_instantiated_service(name, service)
newly_instantiated.add(name) # depends on [control=['for'], data=[]]
# We ALWAYS should have instantiated a new service
# or we'll end up in an infinite loop.
if not newly_instantiated:
raise Exception('No newly instantiated services') # depends on [control=['if'], data=[]]
# Remove from Nodes
for name in newly_instantiated:
del nodes[name] # depends on [control=['for'], data=['name']]
# Check if the number of nodes have changed
# to prevent infinite loops.
# We should ALWAYS have removed a node.
if starting_num_nodes == len(nodes):
raise Exception('No nodes removed!') # depends on [control=['if'], data=[]]
# Remove newly instantiated services from dependency sets
for (name, dependency_set) in six.iteritems(nodes):
nodes[name] = dependency_set.difference(newly_instantiated) # depends on [control=['for'], data=[]]
# Recursion is recursion is ...
self._do(nodes) |
def clear(self, color=True, depth=True, stencil=True):
"""Clear the screen buffers
This is a wrapper for gl.glClear.
Parameters
----------
color : bool | str | tuple | instance of Color
Clear the color buffer bit. If not bool, ``set_clear_color`` will
be used to set the color clear value.
depth : bool | float
Clear the depth buffer bit. If float, ``set_clear_depth`` will
be used to set the depth clear value.
stencil : bool | int
Clear the stencil buffer bit. If int, ``set_clear_stencil`` will
be used to set the stencil clear index.
"""
bits = 0
if isinstance(color, np.ndarray) or bool(color):
if not isinstance(color, bool):
self.set_clear_color(color)
bits |= gl.GL_COLOR_BUFFER_BIT
if depth:
if not isinstance(depth, bool):
self.set_clear_depth(depth)
bits |= gl.GL_DEPTH_BUFFER_BIT
if stencil:
if not isinstance(stencil, bool):
self.set_clear_stencil(stencil)
bits |= gl.GL_STENCIL_BUFFER_BIT
self.glir.command('FUNC', 'glClear', bits) | def function[clear, parameter[self, color, depth, stencil]]:
constant[Clear the screen buffers
This is a wrapper for gl.glClear.
Parameters
----------
color : bool | str | tuple | instance of Color
Clear the color buffer bit. If not bool, ``set_clear_color`` will
be used to set the color clear value.
depth : bool | float
Clear the depth buffer bit. If float, ``set_clear_depth`` will
be used to set the depth clear value.
stencil : bool | int
Clear the stencil buffer bit. If int, ``set_clear_stencil`` will
be used to set the stencil clear index.
]
variable[bits] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b0f2ab60> begin[:]
if <ast.UnaryOp object at 0x7da1b0f2acb0> begin[:]
call[name[self].set_clear_color, parameter[name[color]]]
<ast.AugAssign object at 0x7da1b0f2b490>
if name[depth] begin[:]
if <ast.UnaryOp object at 0x7da1b0f2b310> begin[:]
call[name[self].set_clear_depth, parameter[name[depth]]]
<ast.AugAssign object at 0x7da1b0f2a530>
if name[stencil] begin[:]
if <ast.UnaryOp object at 0x7da1b0f2b8b0> begin[:]
call[name[self].set_clear_stencil, parameter[name[stencil]]]
<ast.AugAssign object at 0x7da1b0f2a4d0>
call[name[self].glir.command, parameter[constant[FUNC], constant[glClear], name[bits]]] | keyword[def] identifier[clear] ( identifier[self] , identifier[color] = keyword[True] , identifier[depth] = keyword[True] , identifier[stencil] = keyword[True] ):
literal[string]
identifier[bits] = literal[int]
keyword[if] identifier[isinstance] ( identifier[color] , identifier[np] . identifier[ndarray] ) keyword[or] identifier[bool] ( identifier[color] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[color] , identifier[bool] ):
identifier[self] . identifier[set_clear_color] ( identifier[color] )
identifier[bits] |= identifier[gl] . identifier[GL_COLOR_BUFFER_BIT]
keyword[if] identifier[depth] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[depth] , identifier[bool] ):
identifier[self] . identifier[set_clear_depth] ( identifier[depth] )
identifier[bits] |= identifier[gl] . identifier[GL_DEPTH_BUFFER_BIT]
keyword[if] identifier[stencil] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[stencil] , identifier[bool] ):
identifier[self] . identifier[set_clear_stencil] ( identifier[stencil] )
identifier[bits] |= identifier[gl] . identifier[GL_STENCIL_BUFFER_BIT]
identifier[self] . identifier[glir] . identifier[command] ( literal[string] , literal[string] , identifier[bits] ) | def clear(self, color=True, depth=True, stencil=True):
"""Clear the screen buffers
This is a wrapper for gl.glClear.
Parameters
----------
color : bool | str | tuple | instance of Color
Clear the color buffer bit. If not bool, ``set_clear_color`` will
be used to set the color clear value.
depth : bool | float
Clear the depth buffer bit. If float, ``set_clear_depth`` will
be used to set the depth clear value.
stencil : bool | int
Clear the stencil buffer bit. If int, ``set_clear_stencil`` will
be used to set the stencil clear index.
"""
bits = 0
if isinstance(color, np.ndarray) or bool(color):
if not isinstance(color, bool):
self.set_clear_color(color) # depends on [control=['if'], data=[]]
bits |= gl.GL_COLOR_BUFFER_BIT # depends on [control=['if'], data=[]]
if depth:
if not isinstance(depth, bool):
self.set_clear_depth(depth) # depends on [control=['if'], data=[]]
bits |= gl.GL_DEPTH_BUFFER_BIT # depends on [control=['if'], data=[]]
if stencil:
if not isinstance(stencil, bool):
self.set_clear_stencil(stencil) # depends on [control=['if'], data=[]]
bits |= gl.GL_STENCIL_BUFFER_BIT # depends on [control=['if'], data=[]]
self.glir.command('FUNC', 'glClear', bits) |
def _bind_topics(self, topics):
"""Subscribe to all the topics we need to communication with this device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we are connecting to.
"""
# FIXME: Allow for these subscriptions to fail and clean up the previous ones
# so that this function is atomic
self.client.subscribe(topics.status, self._on_status_message)
self.client.subscribe(topics.tracing, self._on_trace)
self.client.subscribe(topics.streaming, self._on_report)
self.client.subscribe(topics.response, self._on_response_message) | def function[_bind_topics, parameter[self, topics]]:
constant[Subscribe to all the topics we need to communication with this device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we are connecting to.
]
call[name[self].client.subscribe, parameter[name[topics].status, name[self]._on_status_message]]
call[name[self].client.subscribe, parameter[name[topics].tracing, name[self]._on_trace]]
call[name[self].client.subscribe, parameter[name[topics].streaming, name[self]._on_report]]
call[name[self].client.subscribe, parameter[name[topics].response, name[self]._on_response_message]] | keyword[def] identifier[_bind_topics] ( identifier[self] , identifier[topics] ):
literal[string]
identifier[self] . identifier[client] . identifier[subscribe] ( identifier[topics] . identifier[status] , identifier[self] . identifier[_on_status_message] )
identifier[self] . identifier[client] . identifier[subscribe] ( identifier[topics] . identifier[tracing] , identifier[self] . identifier[_on_trace] )
identifier[self] . identifier[client] . identifier[subscribe] ( identifier[topics] . identifier[streaming] , identifier[self] . identifier[_on_report] )
identifier[self] . identifier[client] . identifier[subscribe] ( identifier[topics] . identifier[response] , identifier[self] . identifier[_on_response_message] ) | def _bind_topics(self, topics):
"""Subscribe to all the topics we need to communication with this device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we are connecting to.
"""
# FIXME: Allow for these subscriptions to fail and clean up the previous ones
# so that this function is atomic
self.client.subscribe(topics.status, self._on_status_message)
self.client.subscribe(topics.tracing, self._on_trace)
self.client.subscribe(topics.streaming, self._on_report)
self.client.subscribe(topics.response, self._on_response_message) |
def _cancel_grpc(operations_stub, operation_name):
"""Cancel an operation using a gRPC client.
Args:
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The gRPC operations stub.
operation_name (str): The name of the operation.
"""
request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
operations_stub.CancelOperation(request_pb) | def function[_cancel_grpc, parameter[operations_stub, operation_name]]:
constant[Cancel an operation using a gRPC client.
Args:
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The gRPC operations stub.
operation_name (str): The name of the operation.
]
variable[request_pb] assign[=] call[name[operations_pb2].CancelOperationRequest, parameter[]]
call[name[operations_stub].CancelOperation, parameter[name[request_pb]]] | keyword[def] identifier[_cancel_grpc] ( identifier[operations_stub] , identifier[operation_name] ):
literal[string]
identifier[request_pb] = identifier[operations_pb2] . identifier[CancelOperationRequest] ( identifier[name] = identifier[operation_name] )
identifier[operations_stub] . identifier[CancelOperation] ( identifier[request_pb] ) | def _cancel_grpc(operations_stub, operation_name):
"""Cancel an operation using a gRPC client.
Args:
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The gRPC operations stub.
operation_name (str): The name of the operation.
"""
request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
operations_stub.CancelOperation(request_pb) |
def activate_state_tab(self, state_m):
"""Opens the tab for the specified state model
The tab with the given state model is opened or set to foreground.
:param state_m: The desired state model (the selected state)
"""
# The current shown state differs from the desired one
current_state_m = self.get_current_state_m()
if current_state_m is not state_m:
state_identifier = self.get_state_identifier(state_m)
# The desired state is not open, yet
if state_identifier not in self.tabs:
# add tab for desired state
page_id = self.add_state_editor(state_m)
self.view.notebook.set_current_page(page_id)
# bring tab for desired state into foreground
else:
page = self.tabs[state_identifier]['page']
page_id = self.view.notebook.page_num(page)
self.view.notebook.set_current_page(page_id)
self.keep_only_sticked_and_selected_tabs() | def function[activate_state_tab, parameter[self, state_m]]:
constant[Opens the tab for the specified state model
The tab with the given state model is opened or set to foreground.
:param state_m: The desired state model (the selected state)
]
variable[current_state_m] assign[=] call[name[self].get_current_state_m, parameter[]]
if compare[name[current_state_m] is_not name[state_m]] begin[:]
variable[state_identifier] assign[=] call[name[self].get_state_identifier, parameter[name[state_m]]]
if compare[name[state_identifier] <ast.NotIn object at 0x7da2590d7190> name[self].tabs] begin[:]
variable[page_id] assign[=] call[name[self].add_state_editor, parameter[name[state_m]]]
call[name[self].view.notebook.set_current_page, parameter[name[page_id]]]
call[name[self].keep_only_sticked_and_selected_tabs, parameter[]] | keyword[def] identifier[activate_state_tab] ( identifier[self] , identifier[state_m] ):
literal[string]
identifier[current_state_m] = identifier[self] . identifier[get_current_state_m] ()
keyword[if] identifier[current_state_m] keyword[is] keyword[not] identifier[state_m] :
identifier[state_identifier] = identifier[self] . identifier[get_state_identifier] ( identifier[state_m] )
keyword[if] identifier[state_identifier] keyword[not] keyword[in] identifier[self] . identifier[tabs] :
identifier[page_id] = identifier[self] . identifier[add_state_editor] ( identifier[state_m] )
identifier[self] . identifier[view] . identifier[notebook] . identifier[set_current_page] ( identifier[page_id] )
keyword[else] :
identifier[page] = identifier[self] . identifier[tabs] [ identifier[state_identifier] ][ literal[string] ]
identifier[page_id] = identifier[self] . identifier[view] . identifier[notebook] . identifier[page_num] ( identifier[page] )
identifier[self] . identifier[view] . identifier[notebook] . identifier[set_current_page] ( identifier[page_id] )
identifier[self] . identifier[keep_only_sticked_and_selected_tabs] () | def activate_state_tab(self, state_m):
"""Opens the tab for the specified state model
The tab with the given state model is opened or set to foreground.
:param state_m: The desired state model (the selected state)
"""
# The current shown state differs from the desired one
current_state_m = self.get_current_state_m()
if current_state_m is not state_m:
state_identifier = self.get_state_identifier(state_m)
# The desired state is not open, yet
if state_identifier not in self.tabs:
# add tab for desired state
page_id = self.add_state_editor(state_m)
self.view.notebook.set_current_page(page_id) # depends on [control=['if'], data=[]]
else:
# bring tab for desired state into foreground
page = self.tabs[state_identifier]['page']
page_id = self.view.notebook.page_num(page)
self.view.notebook.set_current_page(page_id) # depends on [control=['if'], data=['state_m']]
self.keep_only_sticked_and_selected_tabs() |
def function(self, x, y, sigma0, Rs, e1, e2, center_x=0, center_y=0):
"""
returns double integral of NFW profile
"""
phi_G, q = param_util.ellipticity2phi_q(e1, e2)
x_shift = x - center_x
y_shift = y - center_y
cos_phi = np.cos(phi_G)
sin_phi = np.sin(phi_G)
e = abs(1 - q)
x_ = (cos_phi*x_shift+sin_phi*y_shift)*np.sqrt(1 - e)
y_ = (-sin_phi*x_shift+cos_phi*y_shift)*np.sqrt(1 + e)
f_ = self.spherical.function(x_, y_, sigma0, Rs)
return f_ | def function[function, parameter[self, x, y, sigma0, Rs, e1, e2, center_x, center_y]]:
constant[
returns double integral of NFW profile
]
<ast.Tuple object at 0x7da2054a7cd0> assign[=] call[name[param_util].ellipticity2phi_q, parameter[name[e1], name[e2]]]
variable[x_shift] assign[=] binary_operation[name[x] - name[center_x]]
variable[y_shift] assign[=] binary_operation[name[y] - name[center_y]]
variable[cos_phi] assign[=] call[name[np].cos, parameter[name[phi_G]]]
variable[sin_phi] assign[=] call[name[np].sin, parameter[name[phi_G]]]
variable[e] assign[=] call[name[abs], parameter[binary_operation[constant[1] - name[q]]]]
variable[x_] assign[=] binary_operation[binary_operation[binary_operation[name[cos_phi] * name[x_shift]] + binary_operation[name[sin_phi] * name[y_shift]]] * call[name[np].sqrt, parameter[binary_operation[constant[1] - name[e]]]]]
variable[y_] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da2054a69b0> * name[x_shift]] + binary_operation[name[cos_phi] * name[y_shift]]] * call[name[np].sqrt, parameter[binary_operation[constant[1] + name[e]]]]]
variable[f_] assign[=] call[name[self].spherical.function, parameter[name[x_], name[y_], name[sigma0], name[Rs]]]
return[name[f_]] | keyword[def] identifier[function] ( identifier[self] , identifier[x] , identifier[y] , identifier[sigma0] , identifier[Rs] , identifier[e1] , identifier[e2] , identifier[center_x] = literal[int] , identifier[center_y] = literal[int] ):
literal[string]
identifier[phi_G] , identifier[q] = identifier[param_util] . identifier[ellipticity2phi_q] ( identifier[e1] , identifier[e2] )
identifier[x_shift] = identifier[x] - identifier[center_x]
identifier[y_shift] = identifier[y] - identifier[center_y]
identifier[cos_phi] = identifier[np] . identifier[cos] ( identifier[phi_G] )
identifier[sin_phi] = identifier[np] . identifier[sin] ( identifier[phi_G] )
identifier[e] = identifier[abs] ( literal[int] - identifier[q] )
identifier[x_] =( identifier[cos_phi] * identifier[x_shift] + identifier[sin_phi] * identifier[y_shift] )* identifier[np] . identifier[sqrt] ( literal[int] - identifier[e] )
identifier[y_] =(- identifier[sin_phi] * identifier[x_shift] + identifier[cos_phi] * identifier[y_shift] )* identifier[np] . identifier[sqrt] ( literal[int] + identifier[e] )
identifier[f_] = identifier[self] . identifier[spherical] . identifier[function] ( identifier[x_] , identifier[y_] , identifier[sigma0] , identifier[Rs] )
keyword[return] identifier[f_] | def function(self, x, y, sigma0, Rs, e1, e2, center_x=0, center_y=0):
"""
returns double integral of NFW profile
"""
(phi_G, q) = param_util.ellipticity2phi_q(e1, e2)
x_shift = x - center_x
y_shift = y - center_y
cos_phi = np.cos(phi_G)
sin_phi = np.sin(phi_G)
e = abs(1 - q)
x_ = (cos_phi * x_shift + sin_phi * y_shift) * np.sqrt(1 - e)
y_ = (-sin_phi * x_shift + cos_phi * y_shift) * np.sqrt(1 + e)
f_ = self.spherical.function(x_, y_, sigma0, Rs)
return f_ |
def _set_monitor(self, v, load=False):
"""
Setter method for monitor, mapped from YANG variable /monitor (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_monitor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_monitor() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=monitor.monitor, is_container='container', presence=False, yang_name="monitor", rest_name="monitor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Entering span sessions', u'cli-incomplete-no': None, u'callpoint': u'Span', u'sort-priority': u'RUNNCFG_LEVEL_SPAN_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-span', defining_module='brocade-span', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """monitor must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=monitor.monitor, is_container='container', presence=False, yang_name="monitor", rest_name="monitor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Entering span sessions', u'cli-incomplete-no': None, u'callpoint': u'Span', u'sort-priority': u'RUNNCFG_LEVEL_SPAN_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-span', defining_module='brocade-span', yang_type='container', is_config=True)""",
})
self.__monitor = t
if hasattr(self, '_set'):
self._set() | def function[_set_monitor, parameter[self, v, load]]:
constant[
Setter method for monitor, mapped from YANG variable /monitor (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_monitor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_monitor() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da20c7947f0>
name[self].__monitor assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_monitor] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[monitor] . identifier[monitor] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__monitor] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_monitor(self, v, load=False):
"""
Setter method for monitor, mapped from YANG variable /monitor (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_monitor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_monitor() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=monitor.monitor, is_container='container', presence=False, yang_name='monitor', rest_name='monitor', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Entering span sessions', u'cli-incomplete-no': None, u'callpoint': u'Span', u'sort-priority': u'RUNNCFG_LEVEL_SPAN_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-span', defining_module='brocade-span', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'monitor must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=monitor.monitor, is_container=\'container\', presence=False, yang_name="monitor", rest_name="monitor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Entering span sessions\', u\'cli-incomplete-no\': None, u\'callpoint\': u\'Span\', u\'sort-priority\': u\'RUNNCFG_LEVEL_SPAN_CONFIG\'}}, namespace=\'urn:brocade.com:mgmt:brocade-span\', defining_module=\'brocade-span\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__monitor = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def calculate(self, T, P, zs, ws, method):
r'''Method to calculate molar volume of a solid mixture at
temperature `T`, pressure `P`, mole fractions `zs` and weight fractions
`ws` with a given method.
This method has no exception handling; see `mixture_property`
for that.
Parameters
----------
T : float
Temperature at which to calculate the property, [K]
P : float
Pressure at which to calculate the property, [Pa]
zs : list[float]
Mole fractions of all species in the mixture, [-]
ws : list[float]
Weight fractions of all species in the mixture, [-]
method : str
Name of the method to use
Returns
-------
Vm : float
Molar volume of the solid mixture at the given conditions,
[m^3/mol]
'''
if method == SIMPLE:
Vms = [i(T, P) for i in self.VolumeSolids]
return mixing_simple(zs, Vms)
else:
raise Exception('Method not valid') | def function[calculate, parameter[self, T, P, zs, ws, method]]:
constant[Method to calculate molar volume of a solid mixture at
temperature `T`, pressure `P`, mole fractions `zs` and weight fractions
`ws` with a given method.
This method has no exception handling; see `mixture_property`
for that.
Parameters
----------
T : float
Temperature at which to calculate the property, [K]
P : float
Pressure at which to calculate the property, [Pa]
zs : list[float]
Mole fractions of all species in the mixture, [-]
ws : list[float]
Weight fractions of all species in the mixture, [-]
method : str
Name of the method to use
Returns
-------
Vm : float
Molar volume of the solid mixture at the given conditions,
[m^3/mol]
]
if compare[name[method] equal[==] name[SIMPLE]] begin[:]
variable[Vms] assign[=] <ast.ListComp object at 0x7da2046210f0>
return[call[name[mixing_simple], parameter[name[zs], name[Vms]]]] | keyword[def] identifier[calculate] ( identifier[self] , identifier[T] , identifier[P] , identifier[zs] , identifier[ws] , identifier[method] ):
literal[string]
keyword[if] identifier[method] == identifier[SIMPLE] :
identifier[Vms] =[ identifier[i] ( identifier[T] , identifier[P] ) keyword[for] identifier[i] keyword[in] identifier[self] . identifier[VolumeSolids] ]
keyword[return] identifier[mixing_simple] ( identifier[zs] , identifier[Vms] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] ) | def calculate(self, T, P, zs, ws, method):
"""Method to calculate molar volume of a solid mixture at
temperature `T`, pressure `P`, mole fractions `zs` and weight fractions
`ws` with a given method.
This method has no exception handling; see `mixture_property`
for that.
Parameters
----------
T : float
Temperature at which to calculate the property, [K]
P : float
Pressure at which to calculate the property, [Pa]
zs : list[float]
Mole fractions of all species in the mixture, [-]
ws : list[float]
Weight fractions of all species in the mixture, [-]
method : str
Name of the method to use
Returns
-------
Vm : float
Molar volume of the solid mixture at the given conditions,
[m^3/mol]
"""
if method == SIMPLE:
Vms = [i(T, P) for i in self.VolumeSolids]
return mixing_simple(zs, Vms) # depends on [control=['if'], data=[]]
else:
raise Exception('Method not valid') |
def cast_like(x, y):
"""Cast x to y's dtype, if necessary."""
x = tf.convert_to_tensor(x)
y = tf.convert_to_tensor(y)
if x.dtype.base_dtype == y.dtype.base_dtype:
return x
cast_x = tf.cast(x, y.dtype)
if cast_x.device != x.device:
x_name = "(eager Tensor)"
try:
x_name = x.name
except AttributeError:
pass
tf.logging.warning("Cast for %s may induce copy from '%s' to '%s'", x_name,
x.device, cast_x.device)
return cast_x | def function[cast_like, parameter[x, y]]:
constant[Cast x to y's dtype, if necessary.]
variable[x] assign[=] call[name[tf].convert_to_tensor, parameter[name[x]]]
variable[y] assign[=] call[name[tf].convert_to_tensor, parameter[name[y]]]
if compare[name[x].dtype.base_dtype equal[==] name[y].dtype.base_dtype] begin[:]
return[name[x]]
variable[cast_x] assign[=] call[name[tf].cast, parameter[name[x], name[y].dtype]]
if compare[name[cast_x].device not_equal[!=] name[x].device] begin[:]
variable[x_name] assign[=] constant[(eager Tensor)]
<ast.Try object at 0x7da1b1ff21d0>
call[name[tf].logging.warning, parameter[constant[Cast for %s may induce copy from '%s' to '%s'], name[x_name], name[x].device, name[cast_x].device]]
return[name[cast_x]] | keyword[def] identifier[cast_like] ( identifier[x] , identifier[y] ):
literal[string]
identifier[x] = identifier[tf] . identifier[convert_to_tensor] ( identifier[x] )
identifier[y] = identifier[tf] . identifier[convert_to_tensor] ( identifier[y] )
keyword[if] identifier[x] . identifier[dtype] . identifier[base_dtype] == identifier[y] . identifier[dtype] . identifier[base_dtype] :
keyword[return] identifier[x]
identifier[cast_x] = identifier[tf] . identifier[cast] ( identifier[x] , identifier[y] . identifier[dtype] )
keyword[if] identifier[cast_x] . identifier[device] != identifier[x] . identifier[device] :
identifier[x_name] = literal[string]
keyword[try] :
identifier[x_name] = identifier[x] . identifier[name]
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[tf] . identifier[logging] . identifier[warning] ( literal[string] , identifier[x_name] ,
identifier[x] . identifier[device] , identifier[cast_x] . identifier[device] )
keyword[return] identifier[cast_x] | def cast_like(x, y):
"""Cast x to y's dtype, if necessary."""
x = tf.convert_to_tensor(x)
y = tf.convert_to_tensor(y)
if x.dtype.base_dtype == y.dtype.base_dtype:
return x # depends on [control=['if'], data=[]]
cast_x = tf.cast(x, y.dtype)
if cast_x.device != x.device:
x_name = '(eager Tensor)'
try:
x_name = x.name # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
tf.logging.warning("Cast for %s may induce copy from '%s' to '%s'", x_name, x.device, cast_x.device) # depends on [control=['if'], data=[]]
return cast_x |
def update_system(version='', ruby=None, runas=None, gem_bin=None):
'''
Update rubygems.
:param version: string : (newest)
The version of rubygems to install.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.update_system
'''
return _gem(['update', '--system', version],
ruby,
gem_bin=gem_bin,
runas=runas) | def function[update_system, parameter[version, ruby, runas, gem_bin]]:
constant[
Update rubygems.
:param version: string : (newest)
The version of rubygems to install.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.update_system
]
return[call[name[_gem], parameter[list[[<ast.Constant object at 0x7da204566290>, <ast.Constant object at 0x7da2045668f0>, <ast.Name object at 0x7da204567550>]], name[ruby]]]] | keyword[def] identifier[update_system] ( identifier[version] = literal[string] , identifier[ruby] = keyword[None] , identifier[runas] = keyword[None] , identifier[gem_bin] = keyword[None] ):
literal[string]
keyword[return] identifier[_gem] ([ literal[string] , literal[string] , identifier[version] ],
identifier[ruby] ,
identifier[gem_bin] = identifier[gem_bin] ,
identifier[runas] = identifier[runas] ) | def update_system(version='', ruby=None, runas=None, gem_bin=None):
"""
Update rubygems.
:param version: string : (newest)
The version of rubygems to install.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.update_system
"""
return _gem(['update', '--system', version], ruby, gem_bin=gem_bin, runas=runas) |
def get(self, columns=None):
"""
Execute the query as a "select" statement.
:param columns: The columns to get
:type columns: list
:rtype: orator.Collection
"""
models = self.get_models(columns)
# If we actually found models we will also eager load any relationships that
# have been specified as needing to be eager loaded, which will solve the
# n+1 query issue for the developers to avoid running a lot of queries.
if len(models) > 0:
models = self.eager_load_relations(models)
collection = self._model.new_collection(models)
return collection | def function[get, parameter[self, columns]]:
constant[
Execute the query as a "select" statement.
:param columns: The columns to get
:type columns: list
:rtype: orator.Collection
]
variable[models] assign[=] call[name[self].get_models, parameter[name[columns]]]
if compare[call[name[len], parameter[name[models]]] greater[>] constant[0]] begin[:]
variable[models] assign[=] call[name[self].eager_load_relations, parameter[name[models]]]
variable[collection] assign[=] call[name[self]._model.new_collection, parameter[name[models]]]
return[name[collection]] | keyword[def] identifier[get] ( identifier[self] , identifier[columns] = keyword[None] ):
literal[string]
identifier[models] = identifier[self] . identifier[get_models] ( identifier[columns] )
keyword[if] identifier[len] ( identifier[models] )> literal[int] :
identifier[models] = identifier[self] . identifier[eager_load_relations] ( identifier[models] )
identifier[collection] = identifier[self] . identifier[_model] . identifier[new_collection] ( identifier[models] )
keyword[return] identifier[collection] | def get(self, columns=None):
"""
Execute the query as a "select" statement.
:param columns: The columns to get
:type columns: list
:rtype: orator.Collection
"""
models = self.get_models(columns)
# If we actually found models we will also eager load any relationships that
# have been specified as needing to be eager loaded, which will solve the
# n+1 query issue for the developers to avoid running a lot of queries.
if len(models) > 0:
models = self.eager_load_relations(models) # depends on [control=['if'], data=[]]
collection = self._model.new_collection(models)
return collection |
def b64_print(s):
"""
Prints a string with spaces at every b64_intro characters
:param s: String to print
:return: String
"""
if USING_PYTHON2:
string = str(s)
else:
string = str(s, 'utf8')
return '\n'.join(
string[pos:pos + b64_intro] for pos in range(0, len(string), b64_intro)
) | def function[b64_print, parameter[s]]:
constant[
Prints a string with spaces at every b64_intro characters
:param s: String to print
:return: String
]
if name[USING_PYTHON2] begin[:]
variable[string] assign[=] call[name[str], parameter[name[s]]]
return[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da1b0e3a8c0>]]] | keyword[def] identifier[b64_print] ( identifier[s] ):
literal[string]
keyword[if] identifier[USING_PYTHON2] :
identifier[string] = identifier[str] ( identifier[s] )
keyword[else] :
identifier[string] = identifier[str] ( identifier[s] , literal[string] )
keyword[return] literal[string] . identifier[join] (
identifier[string] [ identifier[pos] : identifier[pos] + identifier[b64_intro] ] keyword[for] identifier[pos] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[string] ), identifier[b64_intro] )
) | def b64_print(s):
"""
Prints a string with spaces at every b64_intro characters
:param s: String to print
:return: String
"""
if USING_PYTHON2:
string = str(s) # depends on [control=['if'], data=[]]
else:
string = str(s, 'utf8')
return '\n'.join((string[pos:pos + b64_intro] for pos in range(0, len(string), b64_intro))) |
def trim_ordered_range_list(ranges,start,finish):
"""A function to help with slicing a mapping
Start with a list of ranges and get another list of ranges constrained by start (0-indexed) and finish (1-indexed)
:param ranges: ordered non-overlapping ranges on the same chromosome
:param start: start 0-indexed
:param finish: ending 1-indexed
:type ranges: GenomicRange []
:type start: Int
:type finish: Int
:return: non-overlapping ranges on same chromosome constrained by start and finish
:rtype: GenomicRange []
"""
z = 0
keep_ranges = []
for inrng in self.ranges:
z+=1
original_rng = inrng
rng = inrng.copy() # we will be passing it along and possibly be cutting it
done = False;
#print 'exon length '+str(rng.length())
if start >= index and start < index+original_rng.length(): # we are in this one
rng.start = original_rng.start+(start-index) # fix the start
#print 'fixstart '+str(original_rng.start)+' to '+str(rng.start)
if finish > index and finish <= index+original_rng.length():
rng.end = original_rng.start+(finish-index)-1
done = True
#print 'fixend '+str(original_rng.end)+' to '+str(rng.end)
if finish <= index+original_rng.length(): # we are in the last exon we need
index+= original_rng.length()
keep_ranges.append(rng)
break
if index+original_rng.length() < start: # we don't need any bases from this
index += original_rng.length()
continue # we don't use this exon
keep_ranges.append(rng)
index += original_rng.length()
if index > finish: break
if done: break
return keep_ranges | def function[trim_ordered_range_list, parameter[ranges, start, finish]]:
constant[A function to help with slicing a mapping
Start with a list of ranges and get another list of ranges constrained by start (0-indexed) and finish (1-indexed)
:param ranges: ordered non-overlapping ranges on the same chromosome
:param start: start 0-indexed
:param finish: ending 1-indexed
:type ranges: GenomicRange []
:type start: Int
:type finish: Int
:return: non-overlapping ranges on same chromosome constrained by start and finish
:rtype: GenomicRange []
]
variable[z] assign[=] constant[0]
variable[keep_ranges] assign[=] list[[]]
for taget[name[inrng]] in starred[name[self].ranges] begin[:]
<ast.AugAssign object at 0x7da20e954ca0>
variable[original_rng] assign[=] name[inrng]
variable[rng] assign[=] call[name[inrng].copy, parameter[]]
variable[done] assign[=] constant[False]
if <ast.BoolOp object at 0x7da20e955210> begin[:]
name[rng].start assign[=] binary_operation[name[original_rng].start + binary_operation[name[start] - name[index]]]
if <ast.BoolOp object at 0x7da20e956aa0> begin[:]
name[rng].end assign[=] binary_operation[binary_operation[name[original_rng].start + binary_operation[name[finish] - name[index]]] - constant[1]]
variable[done] assign[=] constant[True]
if compare[name[finish] less_or_equal[<=] binary_operation[name[index] + call[name[original_rng].length, parameter[]]]] begin[:]
<ast.AugAssign object at 0x7da20e957f40>
call[name[keep_ranges].append, parameter[name[rng]]]
break
if compare[binary_operation[name[index] + call[name[original_rng].length, parameter[]]] less[<] name[start]] begin[:]
<ast.AugAssign object at 0x7da20e955150>
continue
call[name[keep_ranges].append, parameter[name[rng]]]
<ast.AugAssign object at 0x7da20e9565c0>
if compare[name[index] greater[>] name[finish]] begin[:]
break
if name[done] begin[:]
break
return[name[keep_ranges]] | keyword[def] identifier[trim_ordered_range_list] ( identifier[ranges] , identifier[start] , identifier[finish] ):
literal[string]
identifier[z] = literal[int]
identifier[keep_ranges] =[]
keyword[for] identifier[inrng] keyword[in] identifier[self] . identifier[ranges] :
identifier[z] += literal[int]
identifier[original_rng] = identifier[inrng]
identifier[rng] = identifier[inrng] . identifier[copy] ()
identifier[done] = keyword[False] ;
keyword[if] identifier[start] >= identifier[index] keyword[and] identifier[start] < identifier[index] + identifier[original_rng] . identifier[length] ():
identifier[rng] . identifier[start] = identifier[original_rng] . identifier[start] +( identifier[start] - identifier[index] )
keyword[if] identifier[finish] > identifier[index] keyword[and] identifier[finish] <= identifier[index] + identifier[original_rng] . identifier[length] ():
identifier[rng] . identifier[end] = identifier[original_rng] . identifier[start] +( identifier[finish] - identifier[index] )- literal[int]
identifier[done] = keyword[True]
keyword[if] identifier[finish] <= identifier[index] + identifier[original_rng] . identifier[length] ():
identifier[index] += identifier[original_rng] . identifier[length] ()
identifier[keep_ranges] . identifier[append] ( identifier[rng] )
keyword[break]
keyword[if] identifier[index] + identifier[original_rng] . identifier[length] ()< identifier[start] :
identifier[index] += identifier[original_rng] . identifier[length] ()
keyword[continue]
identifier[keep_ranges] . identifier[append] ( identifier[rng] )
identifier[index] += identifier[original_rng] . identifier[length] ()
keyword[if] identifier[index] > identifier[finish] : keyword[break]
keyword[if] identifier[done] : keyword[break]
keyword[return] identifier[keep_ranges] | def trim_ordered_range_list(ranges, start, finish):
"""A function to help with slicing a mapping
Start with a list of ranges and get another list of ranges constrained by start (0-indexed) and finish (1-indexed)
:param ranges: ordered non-overlapping ranges on the same chromosome
:param start: start 0-indexed
:param finish: ending 1-indexed
:type ranges: GenomicRange []
:type start: Int
:type finish: Int
:return: non-overlapping ranges on same chromosome constrained by start and finish
:rtype: GenomicRange []
"""
z = 0
keep_ranges = []
for inrng in self.ranges:
z += 1
original_rng = inrng
rng = inrng.copy() # we will be passing it along and possibly be cutting it
done = False
#print 'exon length '+str(rng.length())
if start >= index and start < index + original_rng.length(): # we are in this one
rng.start = original_rng.start + (start - index) # fix the start # depends on [control=['if'], data=[]]
#print 'fixstart '+str(original_rng.start)+' to '+str(rng.start)
if finish > index and finish <= index + original_rng.length():
rng.end = original_rng.start + (finish - index) - 1
done = True # depends on [control=['if'], data=[]]
#print 'fixend '+str(original_rng.end)+' to '+str(rng.end)
if finish <= index + original_rng.length(): # we are in the last exon we need
index += original_rng.length()
keep_ranges.append(rng)
break # depends on [control=['if'], data=[]]
if index + original_rng.length() < start: # we don't need any bases from this
index += original_rng.length()
continue # we don't use this exon # depends on [control=['if'], data=[]]
keep_ranges.append(rng)
index += original_rng.length()
if index > finish:
break # depends on [control=['if'], data=[]]
if done:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inrng']]
return keep_ranges |
def _init_check_upodates():
"""Sub function for init
"""
message, count, packages = check_updates()
if count > 0:
print(message)
for pkg in packages:
print("{0}".format(pkg))
else:
print(message) | def function[_init_check_upodates, parameter[]]:
constant[Sub function for init
]
<ast.Tuple object at 0x7da204565d80> assign[=] call[name[check_updates], parameter[]]
if compare[name[count] greater[>] constant[0]] begin[:]
call[name[print], parameter[name[message]]]
for taget[name[pkg]] in starred[name[packages]] begin[:]
call[name[print], parameter[call[constant[{0}].format, parameter[name[pkg]]]]] | keyword[def] identifier[_init_check_upodates] ():
literal[string]
identifier[message] , identifier[count] , identifier[packages] = identifier[check_updates] ()
keyword[if] identifier[count] > literal[int] :
identifier[print] ( identifier[message] )
keyword[for] identifier[pkg] keyword[in] identifier[packages] :
identifier[print] ( literal[string] . identifier[format] ( identifier[pkg] ))
keyword[else] :
identifier[print] ( identifier[message] ) | def _init_check_upodates():
"""Sub function for init
"""
(message, count, packages) = check_updates()
if count > 0:
print(message)
for pkg in packages:
print('{0}'.format(pkg)) # depends on [control=['for'], data=['pkg']] # depends on [control=['if'], data=[]]
else:
print(message) |
def _node_tag_update_loop(self):
""" Update the AWS tags for a cluster periodically.
The purpose of this loop is to avoid excessive EC2 calls when a large
number of nodes are being launched simultaneously.
"""
while True:
self.tag_cache_update_event.wait()
self.tag_cache_update_event.clear()
batch_updates = defaultdict(list)
with self.tag_cache_lock:
for node_id, tags in self.tag_cache_pending.items():
for x in tags.items():
batch_updates[x].append(node_id)
self.tag_cache[node_id].update(tags)
self.tag_cache_pending = {}
for (k, v), node_ids in batch_updates.items():
m = "Set tag {}={} on {}".format(k, v, node_ids)
with LogTimer("AWSNodeProvider: {}".format(m)):
if k == TAG_RAY_NODE_NAME:
k = "Name"
self.ec2.meta.client.create_tags(
Resources=node_ids,
Tags=[{
"Key": k,
"Value": v
}],
)
self.tag_cache_kill_event.wait(timeout=5)
if self.tag_cache_kill_event.is_set():
return | def function[_node_tag_update_loop, parameter[self]]:
constant[ Update the AWS tags for a cluster periodically.
The purpose of this loop is to avoid excessive EC2 calls when a large
number of nodes are being launched simultaneously.
]
while constant[True] begin[:]
call[name[self].tag_cache_update_event.wait, parameter[]]
call[name[self].tag_cache_update_event.clear, parameter[]]
variable[batch_updates] assign[=] call[name[defaultdict], parameter[name[list]]]
with name[self].tag_cache_lock begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c6c6170>, <ast.Name object at 0x7da20c6c6f20>]]] in starred[call[name[self].tag_cache_pending.items, parameter[]]] begin[:]
for taget[name[x]] in starred[call[name[tags].items, parameter[]]] begin[:]
call[call[name[batch_updates]][name[x]].append, parameter[name[node_id]]]
call[call[name[self].tag_cache][name[node_id]].update, parameter[name[tags]]]
name[self].tag_cache_pending assign[=] dictionary[[], []]
for taget[tuple[[<ast.Tuple object at 0x7da2041da080>, <ast.Name object at 0x7da2041dafb0>]]] in starred[call[name[batch_updates].items, parameter[]]] begin[:]
variable[m] assign[=] call[constant[Set tag {}={} on {}].format, parameter[name[k], name[v], name[node_ids]]]
with call[name[LogTimer], parameter[call[constant[AWSNodeProvider: {}].format, parameter[name[m]]]]] begin[:]
if compare[name[k] equal[==] name[TAG_RAY_NODE_NAME]] begin[:]
variable[k] assign[=] constant[Name]
call[name[self].ec2.meta.client.create_tags, parameter[]]
call[name[self].tag_cache_kill_event.wait, parameter[]]
if call[name[self].tag_cache_kill_event.is_set, parameter[]] begin[:]
return[None] | keyword[def] identifier[_node_tag_update_loop] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[self] . identifier[tag_cache_update_event] . identifier[wait] ()
identifier[self] . identifier[tag_cache_update_event] . identifier[clear] ()
identifier[batch_updates] = identifier[defaultdict] ( identifier[list] )
keyword[with] identifier[self] . identifier[tag_cache_lock] :
keyword[for] identifier[node_id] , identifier[tags] keyword[in] identifier[self] . identifier[tag_cache_pending] . identifier[items] ():
keyword[for] identifier[x] keyword[in] identifier[tags] . identifier[items] ():
identifier[batch_updates] [ identifier[x] ]. identifier[append] ( identifier[node_id] )
identifier[self] . identifier[tag_cache] [ identifier[node_id] ]. identifier[update] ( identifier[tags] )
identifier[self] . identifier[tag_cache_pending] ={}
keyword[for] ( identifier[k] , identifier[v] ), identifier[node_ids] keyword[in] identifier[batch_updates] . identifier[items] ():
identifier[m] = literal[string] . identifier[format] ( identifier[k] , identifier[v] , identifier[node_ids] )
keyword[with] identifier[LogTimer] ( literal[string] . identifier[format] ( identifier[m] )):
keyword[if] identifier[k] == identifier[TAG_RAY_NODE_NAME] :
identifier[k] = literal[string]
identifier[self] . identifier[ec2] . identifier[meta] . identifier[client] . identifier[create_tags] (
identifier[Resources] = identifier[node_ids] ,
identifier[Tags] =[{
literal[string] : identifier[k] ,
literal[string] : identifier[v]
}],
)
identifier[self] . identifier[tag_cache_kill_event] . identifier[wait] ( identifier[timeout] = literal[int] )
keyword[if] identifier[self] . identifier[tag_cache_kill_event] . identifier[is_set] ():
keyword[return] | def _node_tag_update_loop(self):
""" Update the AWS tags for a cluster periodically.
The purpose of this loop is to avoid excessive EC2 calls when a large
number of nodes are being launched simultaneously.
"""
while True:
self.tag_cache_update_event.wait()
self.tag_cache_update_event.clear()
batch_updates = defaultdict(list)
with self.tag_cache_lock:
for (node_id, tags) in self.tag_cache_pending.items():
for x in tags.items():
batch_updates[x].append(node_id) # depends on [control=['for'], data=['x']]
self.tag_cache[node_id].update(tags) # depends on [control=['for'], data=[]]
self.tag_cache_pending = {} # depends on [control=['with'], data=[]]
for ((k, v), node_ids) in batch_updates.items():
m = 'Set tag {}={} on {}'.format(k, v, node_ids)
with LogTimer('AWSNodeProvider: {}'.format(m)):
if k == TAG_RAY_NODE_NAME:
k = 'Name' # depends on [control=['if'], data=['k']]
self.ec2.meta.client.create_tags(Resources=node_ids, Tags=[{'Key': k, 'Value': v}]) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=[]]
self.tag_cache_kill_event.wait(timeout=5)
if self.tag_cache_kill_event.is_set():
return # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def console(loop, log):
"""Connect to receiver and show events as they occur.
Pulls the following arguments from the command line (not method arguments):
:param host:
Hostname or IP Address of the device.
:param port:
TCP port number of the device.
:param verbose:
Show debug logging.
"""
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(level=level)
def log_callback(message):
"""Receives event callback from Anthem Protocol class."""
log.info('Callback invoked: %s' % message)
host = args.host
port = int(args.port)
log.info('Connecting to Anthem AVR at %s:%i' % (host, port))
conn = yield from anthemav.Connection.create(
host=host, port=port, loop=loop, update_callback=log_callback)
log.info('Power state is '+str(conn.protocol.power))
conn.protocol.power = True
log.info('Power state is '+str(conn.protocol.power))
yield from asyncio.sleep(10, loop=loop)
log.info('Panel brightness (raw) is '+str(conn.protocol.panel_brightness))
log.info('Panel brightness (text) is '+str(conn.protocol.panel_brightness_text)) | def function[console, parameter[loop, log]]:
constant[Connect to receiver and show events as they occur.
Pulls the following arguments from the command line (not method arguments):
:param host:
Hostname or IP Address of the device.
:param port:
TCP port number of the device.
:param verbose:
Show debug logging.
]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--host]]]
call[name[parser].add_argument, parameter[constant[--port]]]
call[name[parser].add_argument, parameter[constant[--verbose], constant[-v]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
if name[args].verbose begin[:]
variable[level] assign[=] name[logging].DEBUG
call[name[logging].basicConfig, parameter[]]
def function[log_callback, parameter[message]]:
constant[Receives event callback from Anthem Protocol class.]
call[name[log].info, parameter[binary_operation[constant[Callback invoked: %s] <ast.Mod object at 0x7da2590d6920> name[message]]]]
variable[host] assign[=] name[args].host
variable[port] assign[=] call[name[int], parameter[name[args].port]]
call[name[log].info, parameter[binary_operation[constant[Connecting to Anthem AVR at %s:%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b23e6980>, <ast.Name object at 0x7da1b23e59c0>]]]]]
variable[conn] assign[=] <ast.YieldFrom object at 0x7da1b23e6ad0>
call[name[log].info, parameter[binary_operation[constant[Power state is ] + call[name[str], parameter[name[conn].protocol.power]]]]]
name[conn].protocol.power assign[=] constant[True]
call[name[log].info, parameter[binary_operation[constant[Power state is ] + call[name[str], parameter[name[conn].protocol.power]]]]]
<ast.YieldFrom object at 0x7da1b23e67a0>
call[name[log].info, parameter[binary_operation[constant[Panel brightness (raw) is ] + call[name[str], parameter[name[conn].protocol.panel_brightness]]]]]
call[name[log].info, parameter[binary_operation[constant[Panel brightness (text) is ] + call[name[str], parameter[name[conn].protocol.panel_brightness_text]]]]] | keyword[def] identifier[console] ( identifier[loop] , identifier[log] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[console] . identifier[__doc__] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
keyword[if] identifier[args] . identifier[verbose] :
identifier[level] = identifier[logging] . identifier[DEBUG]
keyword[else] :
identifier[level] = identifier[logging] . identifier[INFO]
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[level] )
keyword[def] identifier[log_callback] ( identifier[message] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] % identifier[message] )
identifier[host] = identifier[args] . identifier[host]
identifier[port] = identifier[int] ( identifier[args] . identifier[port] )
identifier[log] . identifier[info] ( literal[string] %( identifier[host] , identifier[port] ))
identifier[conn] = keyword[yield] keyword[from] identifier[anthemav] . identifier[Connection] . identifier[create] (
identifier[host] = identifier[host] , identifier[port] = identifier[port] , identifier[loop] = identifier[loop] , identifier[update_callback] = identifier[log_callback] )
identifier[log] . identifier[info] ( literal[string] + identifier[str] ( identifier[conn] . identifier[protocol] . identifier[power] ))
identifier[conn] . identifier[protocol] . identifier[power] = keyword[True]
identifier[log] . identifier[info] ( literal[string] + identifier[str] ( identifier[conn] . identifier[protocol] . identifier[power] ))
keyword[yield] keyword[from] identifier[asyncio] . identifier[sleep] ( literal[int] , identifier[loop] = identifier[loop] )
identifier[log] . identifier[info] ( literal[string] + identifier[str] ( identifier[conn] . identifier[protocol] . identifier[panel_brightness] ))
identifier[log] . identifier[info] ( literal[string] + identifier[str] ( identifier[conn] . identifier[protocol] . identifier[panel_brightness_text] )) | def console(loop, log):
"""Connect to receiver and show events as they occur.
Pulls the following arguments from the command line (not method arguments):
:param host:
Hostname or IP Address of the device.
:param port:
TCP port number of the device.
:param verbose:
Show debug logging.
"""
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG # depends on [control=['if'], data=[]]
else:
level = logging.INFO
logging.basicConfig(level=level)
def log_callback(message):
"""Receives event callback from Anthem Protocol class."""
log.info('Callback invoked: %s' % message)
host = args.host
port = int(args.port)
log.info('Connecting to Anthem AVR at %s:%i' % (host, port))
conn = (yield from anthemav.Connection.create(host=host, port=port, loop=loop, update_callback=log_callback))
log.info('Power state is ' + str(conn.protocol.power))
conn.protocol.power = True
log.info('Power state is ' + str(conn.protocol.power))
yield from asyncio.sleep(10, loop=loop)
log.info('Panel brightness (raw) is ' + str(conn.protocol.panel_brightness))
log.info('Panel brightness (text) is ' + str(conn.protocol.panel_brightness_text)) |
def compact(self):
"""Compacts the queue: removes all the messages from the queue that
have been fetched by all the subscribed coroutines.
Returns the number of messages that have been removed."""
if self.subscribers:
level = min(self.subscribers.itervalues())
if level:
del self.messages[:level]
return level
else:
level = len(self.messages)
del self.messages[:]
return level | def function[compact, parameter[self]]:
constant[Compacts the queue: removes all the messages from the queue that
have been fetched by all the subscribed coroutines.
Returns the number of messages that have been removed.]
if name[self].subscribers begin[:]
variable[level] assign[=] call[name[min], parameter[call[name[self].subscribers.itervalues, parameter[]]]]
if name[level] begin[:]
<ast.Delete object at 0x7da20c6c5870>
return[name[level]] | keyword[def] identifier[compact] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[subscribers] :
identifier[level] = identifier[min] ( identifier[self] . identifier[subscribers] . identifier[itervalues] ())
keyword[if] identifier[level] :
keyword[del] identifier[self] . identifier[messages] [: identifier[level] ]
keyword[return] identifier[level]
keyword[else] :
identifier[level] = identifier[len] ( identifier[self] . identifier[messages] )
keyword[del] identifier[self] . identifier[messages] [:]
keyword[return] identifier[level] | def compact(self):
"""Compacts the queue: removes all the messages from the queue that
have been fetched by all the subscribed coroutines.
Returns the number of messages that have been removed."""
if self.subscribers:
level = min(self.subscribers.itervalues())
if level:
del self.messages[:level] # depends on [control=['if'], data=[]]
return level # depends on [control=['if'], data=[]]
else:
level = len(self.messages)
del self.messages[:]
return level |
def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False,
fatal=True, exc_list=None):
"""Return the assigned IP address for a given interface, if any.
:param iface: network interface on which address(es) are expected to
be found.
:param inet_type: inet address family
:param inc_aliases: include alias interfaces in search
:param fatal: if True, raise exception if address not found
:param exc_list: list of addresses to ignore
:return: list of ip addresses
"""
# Extract nic if passed /dev/ethX
if '/' in iface:
iface = iface.split('/')[-1]
if not exc_list:
exc_list = []
try:
inet_num = getattr(netifaces, inet_type)
except AttributeError:
raise Exception("Unknown inet type '%s'" % str(inet_type))
interfaces = netifaces.interfaces()
if inc_aliases:
ifaces = []
for _iface in interfaces:
if iface == _iface or _iface.split(':')[0] == iface:
ifaces.append(_iface)
if fatal and not ifaces:
raise Exception("Invalid interface '%s'" % iface)
ifaces.sort()
else:
if iface not in interfaces:
if fatal:
raise Exception("Interface '%s' not found " % (iface))
else:
return []
else:
ifaces = [iface]
addresses = []
for netiface in ifaces:
net_info = netifaces.ifaddresses(netiface)
if inet_num in net_info:
for entry in net_info[inet_num]:
if 'addr' in entry and entry['addr'] not in exc_list:
addresses.append(entry['addr'])
if fatal and not addresses:
raise Exception("Interface '%s' doesn't have any %s addresses." %
(iface, inet_type))
return sorted(addresses) | def function[get_iface_addr, parameter[iface, inet_type, inc_aliases, fatal, exc_list]]:
constant[Return the assigned IP address for a given interface, if any.
:param iface: network interface on which address(es) are expected to
be found.
:param inet_type: inet address family
:param inc_aliases: include alias interfaces in search
:param fatal: if True, raise exception if address not found
:param exc_list: list of addresses to ignore
:return: list of ip addresses
]
if compare[constant[/] in name[iface]] begin[:]
variable[iface] assign[=] call[call[name[iface].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da18f09d0f0>]
if <ast.UnaryOp object at 0x7da18f09e7d0> begin[:]
variable[exc_list] assign[=] list[[]]
<ast.Try object at 0x7da18f09f1f0>
variable[interfaces] assign[=] call[name[netifaces].interfaces, parameter[]]
if name[inc_aliases] begin[:]
variable[ifaces] assign[=] list[[]]
for taget[name[_iface]] in starred[name[interfaces]] begin[:]
if <ast.BoolOp object at 0x7da18f09fc40> begin[:]
call[name[ifaces].append, parameter[name[_iface]]]
if <ast.BoolOp object at 0x7da18f09d4e0> begin[:]
<ast.Raise object at 0x7da18f09f100>
call[name[ifaces].sort, parameter[]]
variable[addresses] assign[=] list[[]]
for taget[name[netiface]] in starred[name[ifaces]] begin[:]
variable[net_info] assign[=] call[name[netifaces].ifaddresses, parameter[name[netiface]]]
if compare[name[inet_num] in name[net_info]] begin[:]
for taget[name[entry]] in starred[call[name[net_info]][name[inet_num]]] begin[:]
if <ast.BoolOp object at 0x7da18f09ec20> begin[:]
call[name[addresses].append, parameter[call[name[entry]][constant[addr]]]]
if <ast.BoolOp object at 0x7da18f09c610> begin[:]
<ast.Raise object at 0x7da18f09feb0>
return[call[name[sorted], parameter[name[addresses]]]] | keyword[def] identifier[get_iface_addr] ( identifier[iface] = literal[string] , identifier[inet_type] = literal[string] , identifier[inc_aliases] = keyword[False] ,
identifier[fatal] = keyword[True] , identifier[exc_list] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[iface] :
identifier[iface] = identifier[iface] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[if] keyword[not] identifier[exc_list] :
identifier[exc_list] =[]
keyword[try] :
identifier[inet_num] = identifier[getattr] ( identifier[netifaces] , identifier[inet_type] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[str] ( identifier[inet_type] ))
identifier[interfaces] = identifier[netifaces] . identifier[interfaces] ()
keyword[if] identifier[inc_aliases] :
identifier[ifaces] =[]
keyword[for] identifier[_iface] keyword[in] identifier[interfaces] :
keyword[if] identifier[iface] == identifier[_iface] keyword[or] identifier[_iface] . identifier[split] ( literal[string] )[ literal[int] ]== identifier[iface] :
identifier[ifaces] . identifier[append] ( identifier[_iface] )
keyword[if] identifier[fatal] keyword[and] keyword[not] identifier[ifaces] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[iface] )
identifier[ifaces] . identifier[sort] ()
keyword[else] :
keyword[if] identifier[iface] keyword[not] keyword[in] identifier[interfaces] :
keyword[if] identifier[fatal] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[iface] ))
keyword[else] :
keyword[return] []
keyword[else] :
identifier[ifaces] =[ identifier[iface] ]
identifier[addresses] =[]
keyword[for] identifier[netiface] keyword[in] identifier[ifaces] :
identifier[net_info] = identifier[netifaces] . identifier[ifaddresses] ( identifier[netiface] )
keyword[if] identifier[inet_num] keyword[in] identifier[net_info] :
keyword[for] identifier[entry] keyword[in] identifier[net_info] [ identifier[inet_num] ]:
keyword[if] literal[string] keyword[in] identifier[entry] keyword[and] identifier[entry] [ literal[string] ] keyword[not] keyword[in] identifier[exc_list] :
identifier[addresses] . identifier[append] ( identifier[entry] [ literal[string] ])
keyword[if] identifier[fatal] keyword[and] keyword[not] identifier[addresses] :
keyword[raise] identifier[Exception] ( literal[string] %
( identifier[iface] , identifier[inet_type] ))
keyword[return] identifier[sorted] ( identifier[addresses] ) | def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False, fatal=True, exc_list=None):
"""Return the assigned IP address for a given interface, if any.
:param iface: network interface on which address(es) are expected to
be found.
:param inet_type: inet address family
:param inc_aliases: include alias interfaces in search
:param fatal: if True, raise exception if address not found
:param exc_list: list of addresses to ignore
:return: list of ip addresses
"""
# Extract nic if passed /dev/ethX
if '/' in iface:
iface = iface.split('/')[-1] # depends on [control=['if'], data=['iface']]
if not exc_list:
exc_list = [] # depends on [control=['if'], data=[]]
try:
inet_num = getattr(netifaces, inet_type) # depends on [control=['try'], data=[]]
except AttributeError:
raise Exception("Unknown inet type '%s'" % str(inet_type)) # depends on [control=['except'], data=[]]
interfaces = netifaces.interfaces()
if inc_aliases:
ifaces = []
for _iface in interfaces:
if iface == _iface or _iface.split(':')[0] == iface:
ifaces.append(_iface) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_iface']]
if fatal and (not ifaces):
raise Exception("Invalid interface '%s'" % iface) # depends on [control=['if'], data=[]]
ifaces.sort() # depends on [control=['if'], data=[]]
elif iface not in interfaces:
if fatal:
raise Exception("Interface '%s' not found " % iface) # depends on [control=['if'], data=[]]
else:
return [] # depends on [control=['if'], data=['iface']]
else:
ifaces = [iface]
addresses = []
for netiface in ifaces:
net_info = netifaces.ifaddresses(netiface)
if inet_num in net_info:
for entry in net_info[inet_num]:
if 'addr' in entry and entry['addr'] not in exc_list:
addresses.append(entry['addr']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']] # depends on [control=['if'], data=['inet_num', 'net_info']] # depends on [control=['for'], data=['netiface']]
if fatal and (not addresses):
raise Exception("Interface '%s' doesn't have any %s addresses." % (iface, inet_type)) # depends on [control=['if'], data=[]]
return sorted(addresses) |
def wait_for(
self, timeout=10000, interval=1000,
asserter=lambda x: x):
"""Wait for driver till satisfy the given condition
Support:
Android iOS Web(WebView)
Args:
timeout(int): How long we should be retrying stuff.
interval(int): How long between retries.
asserter(callable): The asserter func to determine the result.
Returns:
Return the driver.
Raises:
WebDriverException.
"""
if not callable(asserter):
raise TypeError('Asserter must be callable.')
@retry(
retry_on_exception=lambda ex: isinstance(ex, WebDriverException),
stop_max_delay=timeout,
wait_fixed=interval
)
def _wait_for(driver):
asserter(driver)
return driver
return _wait_for(self) | def function[wait_for, parameter[self, timeout, interval, asserter]]:
constant[Wait for driver till satisfy the given condition
Support:
Android iOS Web(WebView)
Args:
timeout(int): How long we should be retrying stuff.
interval(int): How long between retries.
asserter(callable): The asserter func to determine the result.
Returns:
Return the driver.
Raises:
WebDriverException.
]
if <ast.UnaryOp object at 0x7da1afe3aa70> begin[:]
<ast.Raise object at 0x7da1afe3a080>
def function[_wait_for, parameter[driver]]:
call[name[asserter], parameter[name[driver]]]
return[name[driver]]
return[call[name[_wait_for], parameter[name[self]]]] | keyword[def] identifier[wait_for] (
identifier[self] , identifier[timeout] = literal[int] , identifier[interval] = literal[int] ,
identifier[asserter] = keyword[lambda] identifier[x] : identifier[x] ):
literal[string]
keyword[if] keyword[not] identifier[callable] ( identifier[asserter] ):
keyword[raise] identifier[TypeError] ( literal[string] )
@ identifier[retry] (
identifier[retry_on_exception] = keyword[lambda] identifier[ex] : identifier[isinstance] ( identifier[ex] , identifier[WebDriverException] ),
identifier[stop_max_delay] = identifier[timeout] ,
identifier[wait_fixed] = identifier[interval]
)
keyword[def] identifier[_wait_for] ( identifier[driver] ):
identifier[asserter] ( identifier[driver] )
keyword[return] identifier[driver]
keyword[return] identifier[_wait_for] ( identifier[self] ) | def wait_for(self, timeout=10000, interval=1000, asserter=lambda x: x):
"""Wait for driver till satisfy the given condition
Support:
Android iOS Web(WebView)
Args:
timeout(int): How long we should be retrying stuff.
interval(int): How long between retries.
asserter(callable): The asserter func to determine the result.
Returns:
Return the driver.
Raises:
WebDriverException.
"""
if not callable(asserter):
raise TypeError('Asserter must be callable.') # depends on [control=['if'], data=[]]
@retry(retry_on_exception=lambda ex: isinstance(ex, WebDriverException), stop_max_delay=timeout, wait_fixed=interval)
def _wait_for(driver):
asserter(driver)
return driver
return _wait_for(self) |
def addMethod(self, m):
"""
Adds a L{Method} to the interface
"""
if m.nargs == -1:
m.nargs = len([a for a in marshal.genCompleteTypes(m.sigIn)])
m.nret = len([a for a in marshal.genCompleteTypes(m.sigOut)])
self.methods[m.name] = m
self._xml = None | def function[addMethod, parameter[self, m]]:
constant[
Adds a L{Method} to the interface
]
if compare[name[m].nargs equal[==] <ast.UnaryOp object at 0x7da18c4ce5c0>] begin[:]
name[m].nargs assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da18c4cc760>]]
name[m].nret assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da18c4cf550>]]
call[name[self].methods][name[m].name] assign[=] name[m]
name[self]._xml assign[=] constant[None] | keyword[def] identifier[addMethod] ( identifier[self] , identifier[m] ):
literal[string]
keyword[if] identifier[m] . identifier[nargs] ==- literal[int] :
identifier[m] . identifier[nargs] = identifier[len] ([ identifier[a] keyword[for] identifier[a] keyword[in] identifier[marshal] . identifier[genCompleteTypes] ( identifier[m] . identifier[sigIn] )])
identifier[m] . identifier[nret] = identifier[len] ([ identifier[a] keyword[for] identifier[a] keyword[in] identifier[marshal] . identifier[genCompleteTypes] ( identifier[m] . identifier[sigOut] )])
identifier[self] . identifier[methods] [ identifier[m] . identifier[name] ]= identifier[m]
identifier[self] . identifier[_xml] = keyword[None] | def addMethod(self, m):
"""
Adds a L{Method} to the interface
"""
if m.nargs == -1:
m.nargs = len([a for a in marshal.genCompleteTypes(m.sigIn)])
m.nret = len([a for a in marshal.genCompleteTypes(m.sigOut)]) # depends on [control=['if'], data=[]]
self.methods[m.name] = m
self._xml = None |
def set_min_beds(self, min_beds):
"""
The minimum number of beds.
:param min_beds:
:return:
"""
if not isinstance(min_beds, int):
raise DaftException("Minimum number of beds should be an integer.")
self._min_beds = str(min_beds)
self._query_params += str(QueryParam.MIN_BEDS) + self._min_beds | def function[set_min_beds, parameter[self, min_beds]]:
constant[
The minimum number of beds.
:param min_beds:
:return:
]
if <ast.UnaryOp object at 0x7da1b06269b0> begin[:]
<ast.Raise object at 0x7da1b06260e0>
name[self]._min_beds assign[=] call[name[str], parameter[name[min_beds]]]
<ast.AugAssign object at 0x7da1b0627970> | keyword[def] identifier[set_min_beds] ( identifier[self] , identifier[min_beds] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[min_beds] , identifier[int] ):
keyword[raise] identifier[DaftException] ( literal[string] )
identifier[self] . identifier[_min_beds] = identifier[str] ( identifier[min_beds] )
identifier[self] . identifier[_query_params] += identifier[str] ( identifier[QueryParam] . identifier[MIN_BEDS] )+ identifier[self] . identifier[_min_beds] | def set_min_beds(self, min_beds):
"""
The minimum number of beds.
:param min_beds:
:return:
"""
if not isinstance(min_beds, int):
raise DaftException('Minimum number of beds should be an integer.') # depends on [control=['if'], data=[]]
self._min_beds = str(min_beds)
self._query_params += str(QueryParam.MIN_BEDS) + self._min_beds |
def follow(gandi, resource):
""" Get the operation status
Resource is an operation ID
"""
oper = gandi.oper.info(int(resource))
assert(oper['type'].startswith('certificate_'))
output_cert_oper(gandi, oper)
return oper | def function[follow, parameter[gandi, resource]]:
constant[ Get the operation status
Resource is an operation ID
]
variable[oper] assign[=] call[name[gandi].oper.info, parameter[call[name[int], parameter[name[resource]]]]]
assert[call[call[name[oper]][constant[type]].startswith, parameter[constant[certificate_]]]]
call[name[output_cert_oper], parameter[name[gandi], name[oper]]]
return[name[oper]] | keyword[def] identifier[follow] ( identifier[gandi] , identifier[resource] ):
literal[string]
identifier[oper] = identifier[gandi] . identifier[oper] . identifier[info] ( identifier[int] ( identifier[resource] ))
keyword[assert] ( identifier[oper] [ literal[string] ]. identifier[startswith] ( literal[string] ))
identifier[output_cert_oper] ( identifier[gandi] , identifier[oper] )
keyword[return] identifier[oper] | def follow(gandi, resource):
""" Get the operation status
Resource is an operation ID
"""
oper = gandi.oper.info(int(resource))
assert oper['type'].startswith('certificate_')
output_cert_oper(gandi, oper)
return oper |
def scrypt_mcf_check(mcf, password):
"""Returns True if the password matches the given MCF hash"""
if not isinstance(mcf, bytes):
raise TypeError('MCF must be a byte string')
if isinstance(password, unicode):
password = password.encode('utf8')
elif not isinstance(password, bytes):
raise TypeError('password must be a unicode or byte string')
if len(mcf) != 124 or b'\0' in password:
return mcf_mod.scrypt_mcf_check(scrypt, mcf, password)
mcfbuf = ctypes.create_string_buffer(mcf)
ret = _libscrypt_check(mcfbuf, password)
if ret < 0:
return mcf_mod.scrypt_mcf_check(scrypt, mcf, password)
return bool(ret) | def function[scrypt_mcf_check, parameter[mcf, password]]:
constant[Returns True if the password matches the given MCF hash]
if <ast.UnaryOp object at 0x7da18c4cffd0> begin[:]
<ast.Raise object at 0x7da18c4cdc00>
if call[name[isinstance], parameter[name[password], name[unicode]]] begin[:]
variable[password] assign[=] call[name[password].encode, parameter[constant[utf8]]]
if <ast.BoolOp object at 0x7da20e9562c0> begin[:]
return[call[name[mcf_mod].scrypt_mcf_check, parameter[name[scrypt], name[mcf], name[password]]]]
variable[mcfbuf] assign[=] call[name[ctypes].create_string_buffer, parameter[name[mcf]]]
variable[ret] assign[=] call[name[_libscrypt_check], parameter[name[mcfbuf], name[password]]]
if compare[name[ret] less[<] constant[0]] begin[:]
return[call[name[mcf_mod].scrypt_mcf_check, parameter[name[scrypt], name[mcf], name[password]]]]
return[call[name[bool], parameter[name[ret]]]] | keyword[def] identifier[scrypt_mcf_check] ( identifier[mcf] , identifier[password] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[mcf] , identifier[bytes] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[password] , identifier[unicode] ):
identifier[password] = identifier[password] . identifier[encode] ( literal[string] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[password] , identifier[bytes] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[len] ( identifier[mcf] )!= literal[int] keyword[or] literal[string] keyword[in] identifier[password] :
keyword[return] identifier[mcf_mod] . identifier[scrypt_mcf_check] ( identifier[scrypt] , identifier[mcf] , identifier[password] )
identifier[mcfbuf] = identifier[ctypes] . identifier[create_string_buffer] ( identifier[mcf] )
identifier[ret] = identifier[_libscrypt_check] ( identifier[mcfbuf] , identifier[password] )
keyword[if] identifier[ret] < literal[int] :
keyword[return] identifier[mcf_mod] . identifier[scrypt_mcf_check] ( identifier[scrypt] , identifier[mcf] , identifier[password] )
keyword[return] identifier[bool] ( identifier[ret] ) | def scrypt_mcf_check(mcf, password):
"""Returns True if the password matches the given MCF hash"""
if not isinstance(mcf, bytes):
raise TypeError('MCF must be a byte string') # depends on [control=['if'], data=[]]
if isinstance(password, unicode):
password = password.encode('utf8') # depends on [control=['if'], data=[]]
elif not isinstance(password, bytes):
raise TypeError('password must be a unicode or byte string') # depends on [control=['if'], data=[]]
if len(mcf) != 124 or b'\x00' in password:
return mcf_mod.scrypt_mcf_check(scrypt, mcf, password) # depends on [control=['if'], data=[]]
mcfbuf = ctypes.create_string_buffer(mcf)
ret = _libscrypt_check(mcfbuf, password)
if ret < 0:
return mcf_mod.scrypt_mcf_check(scrypt, mcf, password) # depends on [control=['if'], data=[]]
return bool(ret) |
def from_pysam_alignment(locus, pileup_read):
'''
Factory function to create a new PileupElement from a pysam
`PileupRead`.
Parameters
----------
locus : varcode.Locus
Reference locus for which to construct a PileupElement. Must
include exactly one base.
pileup_read : pysam.calignmentfile.PileupRead
pysam PileupRead instance. Its alignment must overlap the locus.
Returns
----------
PileupElement
'''
assert not pileup_read.is_refskip, (
"Can't create a PileupElement in a refskip (typically an intronic "
"gap in an RNA alignment)")
# Pysam has an `aligned_pairs` method that gives a list of
# (offset, locus) pairs indicating the correspondence between bases in
# the alignment and reference loci. Here we use that to compute
# offset_start and offset_end.
#
# This is slightly tricky in the case of insertions and deletions.
# Here are examples of the desired logic.
#
# Target locus = 1000
#
# (1) Simple case: matching bases.
#
# OFFSET LOCUS
# 0 999
# 1 1000
# 2 1001
#
# DESIRED RESULT: offset_start=1, offset_end=2.
#
#
# (2) A 1 base insertion at offset 2.
#
# OFFSET LOCUS
# 0 999
# 1 1000
# 2 None
# 3 1001
#
# DESIRED RESULT: offset_start = 1, offset_end=3.
#
#
# (3) A 2 base deletion at loci 1000 and 1001.
#
# OFFSET LOCUS
# 0 999
# None 1000
# None 1001
# 1 1002
#
# DESIRED RESULT: offset_start = 1, offset_end=1.
#
offset_start = None
offset_end = len(pileup_read.alignment.query_sequence)
# TODO: doing this with get_blocks() may be faster.
for (offset, position) in pileup_read.alignment.aligned_pairs:
if offset is not None and position is not None:
if position == locus.position:
offset_start = offset
elif position > locus.position:
offset_end = offset
break
if offset_start is None:
offset_start = offset_end
assert pileup_read.is_del == (offset_end - offset_start == 0), \
"Deletion=%s but | [%d,%d) |=%d for locus %d in: \n%s" % (
pileup_read.is_del,
offset_start,
offset_end,
offset_end - offset_start,
locus.position,
pileup_read.alignment.aligned_pairs)
assert offset_end >= offset_start
result = PileupElement(
locus, offset_start, offset_end, pileup_read.alignment)
return result | def function[from_pysam_alignment, parameter[locus, pileup_read]]:
constant[
Factory function to create a new PileupElement from a pysam
`PileupRead`.
Parameters
----------
locus : varcode.Locus
Reference locus for which to construct a PileupElement. Must
include exactly one base.
pileup_read : pysam.calignmentfile.PileupRead
pysam PileupRead instance. Its alignment must overlap the locus.
Returns
----------
PileupElement
]
assert[<ast.UnaryOp object at 0x7da18dc996f0>]
variable[offset_start] assign[=] constant[None]
variable[offset_end] assign[=] call[name[len], parameter[name[pileup_read].alignment.query_sequence]]
for taget[tuple[[<ast.Name object at 0x7da18dc98490>, <ast.Name object at 0x7da18dc99060>]]] in starred[name[pileup_read].alignment.aligned_pairs] begin[:]
if <ast.BoolOp object at 0x7da18dc9a920> begin[:]
if compare[name[position] equal[==] name[locus].position] begin[:]
variable[offset_start] assign[=] name[offset]
if compare[name[offset_start] is constant[None]] begin[:]
variable[offset_start] assign[=] name[offset_end]
assert[compare[name[pileup_read].is_del equal[==] compare[binary_operation[name[offset_end] - name[offset_start]] equal[==] constant[0]]]]
assert[compare[name[offset_end] greater_or_equal[>=] name[offset_start]]]
variable[result] assign[=] call[name[PileupElement], parameter[name[locus], name[offset_start], name[offset_end], name[pileup_read].alignment]]
return[name[result]] | keyword[def] identifier[from_pysam_alignment] ( identifier[locus] , identifier[pileup_read] ):
literal[string]
keyword[assert] keyword[not] identifier[pileup_read] . identifier[is_refskip] ,(
literal[string]
literal[string] )
identifier[offset_start] = keyword[None]
identifier[offset_end] = identifier[len] ( identifier[pileup_read] . identifier[alignment] . identifier[query_sequence] )
keyword[for] ( identifier[offset] , identifier[position] ) keyword[in] identifier[pileup_read] . identifier[alignment] . identifier[aligned_pairs] :
keyword[if] identifier[offset] keyword[is] keyword[not] keyword[None] keyword[and] identifier[position] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[position] == identifier[locus] . identifier[position] :
identifier[offset_start] = identifier[offset]
keyword[elif] identifier[position] > identifier[locus] . identifier[position] :
identifier[offset_end] = identifier[offset]
keyword[break]
keyword[if] identifier[offset_start] keyword[is] keyword[None] :
identifier[offset_start] = identifier[offset_end]
keyword[assert] identifier[pileup_read] . identifier[is_del] ==( identifier[offset_end] - identifier[offset_start] == literal[int] ), literal[string] %(
identifier[pileup_read] . identifier[is_del] ,
identifier[offset_start] ,
identifier[offset_end] ,
identifier[offset_end] - identifier[offset_start] ,
identifier[locus] . identifier[position] ,
identifier[pileup_read] . identifier[alignment] . identifier[aligned_pairs] )
keyword[assert] identifier[offset_end] >= identifier[offset_start]
identifier[result] = identifier[PileupElement] (
identifier[locus] , identifier[offset_start] , identifier[offset_end] , identifier[pileup_read] . identifier[alignment] )
keyword[return] identifier[result] | def from_pysam_alignment(locus, pileup_read):
"""
Factory function to create a new PileupElement from a pysam
`PileupRead`.
Parameters
----------
locus : varcode.Locus
Reference locus for which to construct a PileupElement. Must
include exactly one base.
pileup_read : pysam.calignmentfile.PileupRead
pysam PileupRead instance. Its alignment must overlap the locus.
Returns
----------
PileupElement
"""
assert not pileup_read.is_refskip, "Can't create a PileupElement in a refskip (typically an intronic gap in an RNA alignment)"
# Pysam has an `aligned_pairs` method that gives a list of
# (offset, locus) pairs indicating the correspondence between bases in
# the alignment and reference loci. Here we use that to compute
# offset_start and offset_end.
#
# This is slightly tricky in the case of insertions and deletions.
# Here are examples of the desired logic.
#
# Target locus = 1000
#
# (1) Simple case: matching bases.
#
# OFFSET LOCUS
# 0 999
# 1 1000
# 2 1001
#
# DESIRED RESULT: offset_start=1, offset_end=2.
#
#
# (2) A 1 base insertion at offset 2.
#
# OFFSET LOCUS
# 0 999
# 1 1000
# 2 None
# 3 1001
#
# DESIRED RESULT: offset_start = 1, offset_end=3.
#
#
# (3) A 2 base deletion at loci 1000 and 1001.
#
# OFFSET LOCUS
# 0 999
# None 1000
# None 1001
# 1 1002
#
# DESIRED RESULT: offset_start = 1, offset_end=1.
#
offset_start = None
offset_end = len(pileup_read.alignment.query_sequence)
# TODO: doing this with get_blocks() may be faster.
for (offset, position) in pileup_read.alignment.aligned_pairs:
if offset is not None and position is not None:
if position == locus.position:
offset_start = offset # depends on [control=['if'], data=[]]
elif position > locus.position:
offset_end = offset
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if offset_start is None:
offset_start = offset_end # depends on [control=['if'], data=['offset_start']]
assert pileup_read.is_del == (offset_end - offset_start == 0), 'Deletion=%s but | [%d,%d) |=%d for locus %d in: \n%s' % (pileup_read.is_del, offset_start, offset_end, offset_end - offset_start, locus.position, pileup_read.alignment.aligned_pairs)
assert offset_end >= offset_start
result = PileupElement(locus, offset_start, offset_end, pileup_read.alignment)
return result |
def to_dict(self):
# type: () -> OrderedDict
"""Create a dictionary representation of object attributes
Returns:
OrderedDict serialised version of self
"""
d = OrderedDict()
if self.typeid:
d["typeid"] = self.typeid
for k in self.call_types:
# check_camel_case(k)
d[k] = serialize_object(getattr(self, k))
return d | def function[to_dict, parameter[self]]:
constant[Create a dictionary representation of object attributes
Returns:
OrderedDict serialised version of self
]
variable[d] assign[=] call[name[OrderedDict], parameter[]]
if name[self].typeid begin[:]
call[name[d]][constant[typeid]] assign[=] name[self].typeid
for taget[name[k]] in starred[name[self].call_types] begin[:]
call[name[d]][name[k]] assign[=] call[name[serialize_object], parameter[call[name[getattr], parameter[name[self], name[k]]]]]
return[name[d]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[d] = identifier[OrderedDict] ()
keyword[if] identifier[self] . identifier[typeid] :
identifier[d] [ literal[string] ]= identifier[self] . identifier[typeid]
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[call_types] :
identifier[d] [ identifier[k] ]= identifier[serialize_object] ( identifier[getattr] ( identifier[self] , identifier[k] ))
keyword[return] identifier[d] | def to_dict(self):
# type: () -> OrderedDict
'Create a dictionary representation of object attributes\n\n Returns:\n OrderedDict serialised version of self\n '
d = OrderedDict()
if self.typeid:
d['typeid'] = self.typeid # depends on [control=['if'], data=[]]
for k in self.call_types:
# check_camel_case(k)
d[k] = serialize_object(getattr(self, k)) # depends on [control=['for'], data=['k']]
return d |
def get_tile_image(self, x, y, layer):
""" Return the tile image for this location
:param x: x coordinate
:param y: y coordinate
:param layer: layer number
:rtype: surface if found, otherwise 0
"""
try:
assert (x >= 0 and y >= 0)
except AssertionError:
raise ValueError
try:
layer = self.layers[layer]
except IndexError:
raise ValueError
assert (isinstance(layer, TiledTileLayer))
try:
gid = layer.data[y][x]
except (IndexError, ValueError):
raise ValueError
except TypeError:
msg = "Tiles must be specified in integers."
logger.debug(msg)
raise TypeError
else:
return self.get_tile_image_by_gid(gid) | def function[get_tile_image, parameter[self, x, y, layer]]:
constant[ Return the tile image for this location
:param x: x coordinate
:param y: y coordinate
:param layer: layer number
:rtype: surface if found, otherwise 0
]
<ast.Try object at 0x7da1b088cfa0>
<ast.Try object at 0x7da1b088f9d0>
assert[call[name[isinstance], parameter[name[layer], name[TiledTileLayer]]]]
<ast.Try object at 0x7da20e9b3c10> | keyword[def] identifier[get_tile_image] ( identifier[self] , identifier[x] , identifier[y] , identifier[layer] ):
literal[string]
keyword[try] :
keyword[assert] ( identifier[x] >= literal[int] keyword[and] identifier[y] >= literal[int] )
keyword[except] identifier[AssertionError] :
keyword[raise] identifier[ValueError]
keyword[try] :
identifier[layer] = identifier[self] . identifier[layers] [ identifier[layer] ]
keyword[except] identifier[IndexError] :
keyword[raise] identifier[ValueError]
keyword[assert] ( identifier[isinstance] ( identifier[layer] , identifier[TiledTileLayer] ))
keyword[try] :
identifier[gid] = identifier[layer] . identifier[data] [ identifier[y] ][ identifier[x] ]
keyword[except] ( identifier[IndexError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError]
keyword[except] identifier[TypeError] :
identifier[msg] = literal[string]
identifier[logger] . identifier[debug] ( identifier[msg] )
keyword[raise] identifier[TypeError]
keyword[else] :
keyword[return] identifier[self] . identifier[get_tile_image_by_gid] ( identifier[gid] ) | def get_tile_image(self, x, y, layer):
""" Return the tile image for this location
:param x: x coordinate
:param y: y coordinate
:param layer: layer number
:rtype: surface if found, otherwise 0
"""
try:
assert x >= 0 and y >= 0 # depends on [control=['try'], data=[]]
except AssertionError:
raise ValueError # depends on [control=['except'], data=[]]
try:
layer = self.layers[layer] # depends on [control=['try'], data=[]]
except IndexError:
raise ValueError # depends on [control=['except'], data=[]]
assert isinstance(layer, TiledTileLayer)
try:
gid = layer.data[y][x] # depends on [control=['try'], data=[]]
except (IndexError, ValueError):
raise ValueError # depends on [control=['except'], data=[]]
except TypeError:
msg = 'Tiles must be specified in integers.'
logger.debug(msg)
raise TypeError # depends on [control=['except'], data=[]]
else:
return self.get_tile_image_by_gid(gid) |
def find_start_point(self):
"""
Find the first location in our array that is not empty
"""
for i, row in enumerate(self.data):
for j, _ in enumerate(row):
if self.data[i, j] != 0: # or not np.isfinite(self.data[i,j]):
return i, j | def function[find_start_point, parameter[self]]:
constant[
Find the first location in our array that is not empty
]
for taget[tuple[[<ast.Name object at 0x7da1b2345f60>, <ast.Name object at 0x7da1b23469e0>]]] in starred[call[name[enumerate], parameter[name[self].data]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c7956c0>, <ast.Name object at 0x7da20c794cd0>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:]
if compare[call[name[self].data][tuple[[<ast.Name object at 0x7da20c7941f0>, <ast.Name object at 0x7da20c796ad0>]]] not_equal[!=] constant[0]] begin[:]
return[tuple[[<ast.Name object at 0x7da20c796620>, <ast.Name object at 0x7da20c795a20>]]] | keyword[def] identifier[find_start_point] ( identifier[self] ):
literal[string]
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[self] . identifier[data] ):
keyword[for] identifier[j] , identifier[_] keyword[in] identifier[enumerate] ( identifier[row] ):
keyword[if] identifier[self] . identifier[data] [ identifier[i] , identifier[j] ]!= literal[int] :
keyword[return] identifier[i] , identifier[j] | def find_start_point(self):
"""
Find the first location in our array that is not empty
"""
for (i, row) in enumerate(self.data):
for (j, _) in enumerate(row):
if self.data[i, j] != 0: # or not np.isfinite(self.data[i,j]):
return (i, j) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def is_resource_class_terminal_attribute(rc, attr_name):
"""
Checks if the given attribute name is a terminal attribute of the given
registered resource.
"""
attr = get_resource_class_attribute(rc, attr_name)
return attr.kind == RESOURCE_ATTRIBUTE_KINDS.TERMINAL | def function[is_resource_class_terminal_attribute, parameter[rc, attr_name]]:
constant[
Checks if the given attribute name is a terminal attribute of the given
registered resource.
]
variable[attr] assign[=] call[name[get_resource_class_attribute], parameter[name[rc], name[attr_name]]]
return[compare[name[attr].kind equal[==] name[RESOURCE_ATTRIBUTE_KINDS].TERMINAL]] | keyword[def] identifier[is_resource_class_terminal_attribute] ( identifier[rc] , identifier[attr_name] ):
literal[string]
identifier[attr] = identifier[get_resource_class_attribute] ( identifier[rc] , identifier[attr_name] )
keyword[return] identifier[attr] . identifier[kind] == identifier[RESOURCE_ATTRIBUTE_KINDS] . identifier[TERMINAL] | def is_resource_class_terminal_attribute(rc, attr_name):
"""
Checks if the given attribute name is a terminal attribute of the given
registered resource.
"""
attr = get_resource_class_attribute(rc, attr_name)
return attr.kind == RESOURCE_ATTRIBUTE_KINDS.TERMINAL |
def authenticate(self, request, email=None, password=None, username=None):
"""
Attempt to authenticate a set of credentials.
Args:
request:
The request associated with the authentication attempt.
email:
The user's email address.
password:
The user's password.
username:
An alias for the ``email`` field. This is provided for
compatability with Django's built in authentication
views.
Returns:
The user associated with the provided credentials if they
are valid. Returns ``None`` otherwise.
"""
email = email or username
try:
email_instance = models.EmailAddress.objects.get(
is_verified=True, email=email
)
except models.EmailAddress.DoesNotExist:
return None
user = email_instance.user
if user.check_password(password):
return user
return None | def function[authenticate, parameter[self, request, email, password, username]]:
constant[
Attempt to authenticate a set of credentials.
Args:
request:
The request associated with the authentication attempt.
email:
The user's email address.
password:
The user's password.
username:
An alias for the ``email`` field. This is provided for
compatability with Django's built in authentication
views.
Returns:
The user associated with the provided credentials if they
are valid. Returns ``None`` otherwise.
]
variable[email] assign[=] <ast.BoolOp object at 0x7da1afe6fac0>
<ast.Try object at 0x7da1afe6f5b0>
variable[user] assign[=] name[email_instance].user
if call[name[user].check_password, parameter[name[password]]] begin[:]
return[name[user]]
return[constant[None]] | keyword[def] identifier[authenticate] ( identifier[self] , identifier[request] , identifier[email] = keyword[None] , identifier[password] = keyword[None] , identifier[username] = keyword[None] ):
literal[string]
identifier[email] = identifier[email] keyword[or] identifier[username]
keyword[try] :
identifier[email_instance] = identifier[models] . identifier[EmailAddress] . identifier[objects] . identifier[get] (
identifier[is_verified] = keyword[True] , identifier[email] = identifier[email]
)
keyword[except] identifier[models] . identifier[EmailAddress] . identifier[DoesNotExist] :
keyword[return] keyword[None]
identifier[user] = identifier[email_instance] . identifier[user]
keyword[if] identifier[user] . identifier[check_password] ( identifier[password] ):
keyword[return] identifier[user]
keyword[return] keyword[None] | def authenticate(self, request, email=None, password=None, username=None):
"""
Attempt to authenticate a set of credentials.
Args:
request:
The request associated with the authentication attempt.
email:
The user's email address.
password:
The user's password.
username:
An alias for the ``email`` field. This is provided for
compatability with Django's built in authentication
views.
Returns:
The user associated with the provided credentials if they
are valid. Returns ``None`` otherwise.
"""
email = email or username
try:
email_instance = models.EmailAddress.objects.get(is_verified=True, email=email) # depends on [control=['try'], data=[]]
except models.EmailAddress.DoesNotExist:
return None # depends on [control=['except'], data=[]]
user = email_instance.user
if user.check_password(password):
return user # depends on [control=['if'], data=[]]
return None |
def push_pending_node(self, name, attr):
"""
pending nodes are for affecting type, such as wrapping content
with text:a to make a hyperlink. Anything in pending nodes
will be written before the actual text.
User needs to remember to pop out of it.
"""
if self.cur_element is None:
self.add_text_frame()
self.cur_element.pending_nodes.append((name, attr)) | def function[push_pending_node, parameter[self, name, attr]]:
constant[
pending nodes are for affecting type, such as wrapping content
with text:a to make a hyperlink. Anything in pending nodes
will be written before the actual text.
User needs to remember to pop out of it.
]
if compare[name[self].cur_element is constant[None]] begin[:]
call[name[self].add_text_frame, parameter[]]
call[name[self].cur_element.pending_nodes.append, parameter[tuple[[<ast.Name object at 0x7da2041dbbe0>, <ast.Name object at 0x7da2041da650>]]]] | keyword[def] identifier[push_pending_node] ( identifier[self] , identifier[name] , identifier[attr] ):
literal[string]
keyword[if] identifier[self] . identifier[cur_element] keyword[is] keyword[None] :
identifier[self] . identifier[add_text_frame] ()
identifier[self] . identifier[cur_element] . identifier[pending_nodes] . identifier[append] (( identifier[name] , identifier[attr] )) | def push_pending_node(self, name, attr):
"""
pending nodes are for affecting type, such as wrapping content
with text:a to make a hyperlink. Anything in pending nodes
will be written before the actual text.
User needs to remember to pop out of it.
"""
if self.cur_element is None:
self.add_text_frame() # depends on [control=['if'], data=[]]
self.cur_element.pending_nodes.append((name, attr)) |
def define_magic(self, name, func):
"""[Deprecated] Expose own function as magic function for IPython.
Example::
def foo_impl(self, parameter_s=''):
'My very own magic!. (Use docstrings, IPython reads them).'
print 'Magic function. Passed parameter is between < >:'
print '<%s>' % parameter_s
print 'The self object is:', self
ip.define_magic('foo',foo_impl)
"""
meth = types.MethodType(func, self.user_magics)
setattr(self.user_magics, name, meth)
record_magic(self.magics, 'line', name, meth) | def function[define_magic, parameter[self, name, func]]:
constant[[Deprecated] Expose own function as magic function for IPython.
Example::
def foo_impl(self, parameter_s=''):
'My very own magic!. (Use docstrings, IPython reads them).'
print 'Magic function. Passed parameter is between < >:'
print '<%s>' % parameter_s
print 'The self object is:', self
ip.define_magic('foo',foo_impl)
]
variable[meth] assign[=] call[name[types].MethodType, parameter[name[func], name[self].user_magics]]
call[name[setattr], parameter[name[self].user_magics, name[name], name[meth]]]
call[name[record_magic], parameter[name[self].magics, constant[line], name[name], name[meth]]] | keyword[def] identifier[define_magic] ( identifier[self] , identifier[name] , identifier[func] ):
literal[string]
identifier[meth] = identifier[types] . identifier[MethodType] ( identifier[func] , identifier[self] . identifier[user_magics] )
identifier[setattr] ( identifier[self] . identifier[user_magics] , identifier[name] , identifier[meth] )
identifier[record_magic] ( identifier[self] . identifier[magics] , literal[string] , identifier[name] , identifier[meth] ) | def define_magic(self, name, func):
"""[Deprecated] Expose own function as magic function for IPython.
Example::
def foo_impl(self, parameter_s=''):
'My very own magic!. (Use docstrings, IPython reads them).'
print 'Magic function. Passed parameter is between < >:'
print '<%s>' % parameter_s
print 'The self object is:', self
ip.define_magic('foo',foo_impl)
"""
meth = types.MethodType(func, self.user_magics)
setattr(self.user_magics, name, meth)
record_magic(self.magics, 'line', name, meth) |
def log_request(self, code='-', size='-'):
"""
Called by send_response()
TODO: a configuration option to log or not
(maybe using logging filters?)
TODO: discriminate by code and dispatch to various log levels
"""
LOG.info("%r %s %s", self.requestline, code, size) | def function[log_request, parameter[self, code, size]]:
constant[
Called by send_response()
TODO: a configuration option to log or not
(maybe using logging filters?)
TODO: discriminate by code and dispatch to various log levels
]
call[name[LOG].info, parameter[constant[%r %s %s], name[self].requestline, name[code], name[size]]] | keyword[def] identifier[log_request] ( identifier[self] , identifier[code] = literal[string] , identifier[size] = literal[string] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] , identifier[self] . identifier[requestline] , identifier[code] , identifier[size] ) | def log_request(self, code='-', size='-'):
"""
Called by send_response()
TODO: a configuration option to log or not
(maybe using logging filters?)
TODO: discriminate by code and dispatch to various log levels
"""
LOG.info('%r %s %s', self.requestline, code, size) |
def load(self, *objs, consistent=False):
"""Populate objects from DynamoDB.
:param objs: objects to delete.
:param bool consistent: Use `strongly consistent reads`__ if True. Default is False.
:raises bloop.exceptions.MissingKey: if any object doesn't provide a value for a key column.
:raises bloop.exceptions.MissingObjects: if one or more objects aren't loaded.
__ http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ReadConsistency.html
"""
get_table_name = self._compute_table_name
objs = set(objs)
validate_not_abstract(*objs)
table_index, object_index, request = {}, {}, {}
for obj in objs:
table_name = get_table_name(obj.__class__)
key = dump_key(self, obj)
index = index_for(key)
if table_name not in object_index:
table_index[table_name] = list(sorted(key.keys()))
object_index[table_name] = {}
request[table_name] = {"Keys": [], "ConsistentRead": consistent}
if index not in object_index[table_name]:
request[table_name]["Keys"].append(key)
object_index[table_name][index] = set()
object_index[table_name][index].add(obj)
response = self.session.load_items(request)
for table_name, list_of_attrs in response.items():
for attrs in list_of_attrs:
key_shape = table_index[table_name]
key = extract_key(key_shape, attrs)
index = index_for(key)
for obj in object_index[table_name].pop(index):
unpack_from_dynamodb(
attrs=attrs, expected=obj.Meta.columns, engine=self, obj=obj)
object_loaded.send(self, engine=self, obj=obj)
if not object_index[table_name]:
object_index.pop(table_name)
if object_index:
not_loaded = set()
for index in object_index.values():
for index_set in index.values():
not_loaded.update(index_set)
logger.info("loaded {} of {} objects".format(len(objs) - len(not_loaded), len(objs)))
raise MissingObjects("Failed to load some objects.", objects=not_loaded)
logger.info("successfully loaded {} objects".format(len(objs))) | def function[load, parameter[self]]:
constant[Populate objects from DynamoDB.
:param objs: objects to delete.
:param bool consistent: Use `strongly consistent reads`__ if True. Default is False.
:raises bloop.exceptions.MissingKey: if any object doesn't provide a value for a key column.
:raises bloop.exceptions.MissingObjects: if one or more objects aren't loaded.
__ http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ReadConsistency.html
]
variable[get_table_name] assign[=] name[self]._compute_table_name
variable[objs] assign[=] call[name[set], parameter[name[objs]]]
call[name[validate_not_abstract], parameter[<ast.Starred object at 0x7da1b0f2ea40>]]
<ast.Tuple object at 0x7da1b0f2eb90> assign[=] tuple[[<ast.Dict object at 0x7da1b0f2de70>, <ast.Dict object at 0x7da1b0f2e9b0>, <ast.Dict object at 0x7da1b0f2f1f0>]]
for taget[name[obj]] in starred[name[objs]] begin[:]
variable[table_name] assign[=] call[name[get_table_name], parameter[name[obj].__class__]]
variable[key] assign[=] call[name[dump_key], parameter[name[self], name[obj]]]
variable[index] assign[=] call[name[index_for], parameter[name[key]]]
if compare[name[table_name] <ast.NotIn object at 0x7da2590d7190> name[object_index]] begin[:]
call[name[table_index]][name[table_name]] assign[=] call[name[list], parameter[call[name[sorted], parameter[call[name[key].keys, parameter[]]]]]]
call[name[object_index]][name[table_name]] assign[=] dictionary[[], []]
call[name[request]][name[table_name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f2ef50>, <ast.Constant object at 0x7da1b0f2ef20>], [<ast.List object at 0x7da1b0f2cac0>, <ast.Name object at 0x7da1b0f2c490>]]
if compare[name[index] <ast.NotIn object at 0x7da2590d7190> call[name[object_index]][name[table_name]]] begin[:]
call[call[call[name[request]][name[table_name]]][constant[Keys]].append, parameter[name[key]]]
call[call[name[object_index]][name[table_name]]][name[index]] assign[=] call[name[set], parameter[]]
call[call[call[name[object_index]][name[table_name]]][name[index]].add, parameter[name[obj]]]
variable[response] assign[=] call[name[self].session.load_items, parameter[name[request]]]
for taget[tuple[[<ast.Name object at 0x7da1b0f2e5c0>, <ast.Name object at 0x7da1b0f2c610>]]] in starred[call[name[response].items, parameter[]]] begin[:]
for taget[name[attrs]] in starred[name[list_of_attrs]] begin[:]
variable[key_shape] assign[=] call[name[table_index]][name[table_name]]
variable[key] assign[=] call[name[extract_key], parameter[name[key_shape], name[attrs]]]
variable[index] assign[=] call[name[index_for], parameter[name[key]]]
for taget[name[obj]] in starred[call[call[name[object_index]][name[table_name]].pop, parameter[name[index]]]] begin[:]
call[name[unpack_from_dynamodb], parameter[]]
call[name[object_loaded].send, parameter[name[self]]]
if <ast.UnaryOp object at 0x7da1b0fc5f90> begin[:]
call[name[object_index].pop, parameter[name[table_name]]]
if name[object_index] begin[:]
variable[not_loaded] assign[=] call[name[set], parameter[]]
for taget[name[index]] in starred[call[name[object_index].values, parameter[]]] begin[:]
for taget[name[index_set]] in starred[call[name[index].values, parameter[]]] begin[:]
call[name[not_loaded].update, parameter[name[index_set]]]
call[name[logger].info, parameter[call[constant[loaded {} of {} objects].format, parameter[binary_operation[call[name[len], parameter[name[objs]]] - call[name[len], parameter[name[not_loaded]]]], call[name[len], parameter[name[objs]]]]]]]
<ast.Raise object at 0x7da1b0fc6860>
call[name[logger].info, parameter[call[constant[successfully loaded {} objects].format, parameter[call[name[len], parameter[name[objs]]]]]]] | keyword[def] identifier[load] ( identifier[self] ,* identifier[objs] , identifier[consistent] = keyword[False] ):
literal[string]
identifier[get_table_name] = identifier[self] . identifier[_compute_table_name]
identifier[objs] = identifier[set] ( identifier[objs] )
identifier[validate_not_abstract] (* identifier[objs] )
identifier[table_index] , identifier[object_index] , identifier[request] ={},{},{}
keyword[for] identifier[obj] keyword[in] identifier[objs] :
identifier[table_name] = identifier[get_table_name] ( identifier[obj] . identifier[__class__] )
identifier[key] = identifier[dump_key] ( identifier[self] , identifier[obj] )
identifier[index] = identifier[index_for] ( identifier[key] )
keyword[if] identifier[table_name] keyword[not] keyword[in] identifier[object_index] :
identifier[table_index] [ identifier[table_name] ]= identifier[list] ( identifier[sorted] ( identifier[key] . identifier[keys] ()))
identifier[object_index] [ identifier[table_name] ]={}
identifier[request] [ identifier[table_name] ]={ literal[string] :[], literal[string] : identifier[consistent] }
keyword[if] identifier[index] keyword[not] keyword[in] identifier[object_index] [ identifier[table_name] ]:
identifier[request] [ identifier[table_name] ][ literal[string] ]. identifier[append] ( identifier[key] )
identifier[object_index] [ identifier[table_name] ][ identifier[index] ]= identifier[set] ()
identifier[object_index] [ identifier[table_name] ][ identifier[index] ]. identifier[add] ( identifier[obj] )
identifier[response] = identifier[self] . identifier[session] . identifier[load_items] ( identifier[request] )
keyword[for] identifier[table_name] , identifier[list_of_attrs] keyword[in] identifier[response] . identifier[items] ():
keyword[for] identifier[attrs] keyword[in] identifier[list_of_attrs] :
identifier[key_shape] = identifier[table_index] [ identifier[table_name] ]
identifier[key] = identifier[extract_key] ( identifier[key_shape] , identifier[attrs] )
identifier[index] = identifier[index_for] ( identifier[key] )
keyword[for] identifier[obj] keyword[in] identifier[object_index] [ identifier[table_name] ]. identifier[pop] ( identifier[index] ):
identifier[unpack_from_dynamodb] (
identifier[attrs] = identifier[attrs] , identifier[expected] = identifier[obj] . identifier[Meta] . identifier[columns] , identifier[engine] = identifier[self] , identifier[obj] = identifier[obj] )
identifier[object_loaded] . identifier[send] ( identifier[self] , identifier[engine] = identifier[self] , identifier[obj] = identifier[obj] )
keyword[if] keyword[not] identifier[object_index] [ identifier[table_name] ]:
identifier[object_index] . identifier[pop] ( identifier[table_name] )
keyword[if] identifier[object_index] :
identifier[not_loaded] = identifier[set] ()
keyword[for] identifier[index] keyword[in] identifier[object_index] . identifier[values] ():
keyword[for] identifier[index_set] keyword[in] identifier[index] . identifier[values] ():
identifier[not_loaded] . identifier[update] ( identifier[index_set] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[len] ( identifier[objs] )- identifier[len] ( identifier[not_loaded] ), identifier[len] ( identifier[objs] )))
keyword[raise] identifier[MissingObjects] ( literal[string] , identifier[objects] = identifier[not_loaded] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[len] ( identifier[objs] ))) | def load(self, *objs, consistent=False):
"""Populate objects from DynamoDB.
:param objs: objects to delete.
:param bool consistent: Use `strongly consistent reads`__ if True. Default is False.
:raises bloop.exceptions.MissingKey: if any object doesn't provide a value for a key column.
:raises bloop.exceptions.MissingObjects: if one or more objects aren't loaded.
__ http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ReadConsistency.html
"""
get_table_name = self._compute_table_name
objs = set(objs)
validate_not_abstract(*objs)
(table_index, object_index, request) = ({}, {}, {})
for obj in objs:
table_name = get_table_name(obj.__class__)
key = dump_key(self, obj)
index = index_for(key)
if table_name not in object_index:
table_index[table_name] = list(sorted(key.keys()))
object_index[table_name] = {}
request[table_name] = {'Keys': [], 'ConsistentRead': consistent} # depends on [control=['if'], data=['table_name', 'object_index']]
if index not in object_index[table_name]:
request[table_name]['Keys'].append(key)
object_index[table_name][index] = set() # depends on [control=['if'], data=['index']]
object_index[table_name][index].add(obj) # depends on [control=['for'], data=['obj']]
response = self.session.load_items(request)
for (table_name, list_of_attrs) in response.items():
for attrs in list_of_attrs:
key_shape = table_index[table_name]
key = extract_key(key_shape, attrs)
index = index_for(key)
for obj in object_index[table_name].pop(index):
unpack_from_dynamodb(attrs=attrs, expected=obj.Meta.columns, engine=self, obj=obj)
object_loaded.send(self, engine=self, obj=obj) # depends on [control=['for'], data=['obj']]
if not object_index[table_name]:
object_index.pop(table_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attrs']] # depends on [control=['for'], data=[]]
if object_index:
not_loaded = set()
for index in object_index.values():
for index_set in index.values():
not_loaded.update(index_set) # depends on [control=['for'], data=['index_set']] # depends on [control=['for'], data=['index']]
logger.info('loaded {} of {} objects'.format(len(objs) - len(not_loaded), len(objs)))
raise MissingObjects('Failed to load some objects.', objects=not_loaded) # depends on [control=['if'], data=[]]
logger.info('successfully loaded {} objects'.format(len(objs))) |
def construct_listener(outfile=None):
"""Create the listener that prints tweets"""
if outfile is not None:
if os.path.exists(outfile):
raise IOError("File %s already exists" % outfile)
outfile = open(outfile, 'wb')
return PrintingListener(out=outfile) | def function[construct_listener, parameter[outfile]]:
constant[Create the listener that prints tweets]
if compare[name[outfile] is_not constant[None]] begin[:]
if call[name[os].path.exists, parameter[name[outfile]]] begin[:]
<ast.Raise object at 0x7da1b0f11f60>
variable[outfile] assign[=] call[name[open], parameter[name[outfile], constant[wb]]]
return[call[name[PrintingListener], parameter[]]] | keyword[def] identifier[construct_listener] ( identifier[outfile] = keyword[None] ):
literal[string]
keyword[if] identifier[outfile] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[outfile] ):
keyword[raise] identifier[IOError] ( literal[string] % identifier[outfile] )
identifier[outfile] = identifier[open] ( identifier[outfile] , literal[string] )
keyword[return] identifier[PrintingListener] ( identifier[out] = identifier[outfile] ) | def construct_listener(outfile=None):
"""Create the listener that prints tweets"""
if outfile is not None:
if os.path.exists(outfile):
raise IOError('File %s already exists' % outfile) # depends on [control=['if'], data=[]]
outfile = open(outfile, 'wb') # depends on [control=['if'], data=['outfile']]
return PrintingListener(out=outfile) |
def get(self, agentml, user=None, key=None):
"""
Evaluate and return the current value of a user variable
:param user: The active user object
:type user: agentml.User or None
:param agentml: The active AgentML instance
:type agentml: AgentML
:param key: The variables key
:type key: str
:return: Current value of the user variable (or None if the variable has not been set)
:rtype : str or None
"""
if not user or not key:
return
try:
return user.get_var(key)
except VarNotDefinedError:
return | def function[get, parameter[self, agentml, user, key]]:
constant[
Evaluate and return the current value of a user variable
:param user: The active user object
:type user: agentml.User or None
:param agentml: The active AgentML instance
:type agentml: AgentML
:param key: The variables key
:type key: str
:return: Current value of the user variable (or None if the variable has not been set)
:rtype : str or None
]
if <ast.BoolOp object at 0x7da1b144e740> begin[:]
return[None]
<ast.Try object at 0x7da1b1470d90> | keyword[def] identifier[get] ( identifier[self] , identifier[agentml] , identifier[user] = keyword[None] , identifier[key] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[user] keyword[or] keyword[not] identifier[key] :
keyword[return]
keyword[try] :
keyword[return] identifier[user] . identifier[get_var] ( identifier[key] )
keyword[except] identifier[VarNotDefinedError] :
keyword[return] | def get(self, agentml, user=None, key=None):
"""
Evaluate and return the current value of a user variable
:param user: The active user object
:type user: agentml.User or None
:param agentml: The active AgentML instance
:type agentml: AgentML
:param key: The variables key
:type key: str
:return: Current value of the user variable (or None if the variable has not been set)
:rtype : str or None
"""
if not user or not key:
return # depends on [control=['if'], data=[]]
try:
return user.get_var(key) # depends on [control=['try'], data=[]]
except VarNotDefinedError:
return # depends on [control=['except'], data=[]] |
def delete(self):
"""
Explicit destructor of the internal SAT oracle and all the
totalizer objects creating during the solving process.
"""
if self.oracle:
self.oracle.delete()
self.oracle = None
if self.solver != 'mc': # for minicard, there is nothing to free
for t in six.itervalues(self.tobj):
t.delete() | def function[delete, parameter[self]]:
constant[
Explicit destructor of the internal SAT oracle and all the
totalizer objects creating during the solving process.
]
if name[self].oracle begin[:]
call[name[self].oracle.delete, parameter[]]
name[self].oracle assign[=] constant[None]
if compare[name[self].solver not_equal[!=] constant[mc]] begin[:]
for taget[name[t]] in starred[call[name[six].itervalues, parameter[name[self].tobj]]] begin[:]
call[name[t].delete, parameter[]] | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[oracle] :
identifier[self] . identifier[oracle] . identifier[delete] ()
identifier[self] . identifier[oracle] = keyword[None]
keyword[if] identifier[self] . identifier[solver] != literal[string] :
keyword[for] identifier[t] keyword[in] identifier[six] . identifier[itervalues] ( identifier[self] . identifier[tobj] ):
identifier[t] . identifier[delete] () | def delete(self):
"""
Explicit destructor of the internal SAT oracle and all the
totalizer objects creating during the solving process.
"""
if self.oracle:
self.oracle.delete()
self.oracle = None
if self.solver != 'mc': # for minicard, there is nothing to free
for t in six.itervalues(self.tobj):
t.delete() # depends on [control=['for'], data=['t']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def import_plugin(package_name, include_available=False):
'''
Import MicroDrop plugin.
Parameters
----------
package_name : str
Name of MicroDrop plugin Conda package.
include_available : bool, optional
If ``True``, import from all available plugins (not just **enabled**
ones).
By default, only the ``<conda>/etc/microdrop/plugins/enabled``
directory is added to the Python import paths (if necessary).
If ``True``, also add the ``<conda>/share/microdrop/plugins/available``
directory to the Python import paths.
Returns
-------
module
Imported plugin module.
'''
available_plugins_dir = MICRODROP_CONDA_SHARE.joinpath('plugins',
'available')
enabled_plugins_dir = MICRODROP_CONDA_ETC.joinpath('plugins', 'enabled')
search_paths = [enabled_plugins_dir]
if include_available:
search_paths += [available_plugins_dir]
for dir_i in search_paths:
if dir_i not in sys.path:
sys.path.insert(0, dir_i)
module_name = package_name.split('.')[-1].replace('-', '_')
return importlib.import_module(module_name) | def function[import_plugin, parameter[package_name, include_available]]:
constant[
Import MicroDrop plugin.
Parameters
----------
package_name : str
Name of MicroDrop plugin Conda package.
include_available : bool, optional
If ``True``, import from all available plugins (not just **enabled**
ones).
By default, only the ``<conda>/etc/microdrop/plugins/enabled``
directory is added to the Python import paths (if necessary).
If ``True``, also add the ``<conda>/share/microdrop/plugins/available``
directory to the Python import paths.
Returns
-------
module
Imported plugin module.
]
variable[available_plugins_dir] assign[=] call[name[MICRODROP_CONDA_SHARE].joinpath, parameter[constant[plugins], constant[available]]]
variable[enabled_plugins_dir] assign[=] call[name[MICRODROP_CONDA_ETC].joinpath, parameter[constant[plugins], constant[enabled]]]
variable[search_paths] assign[=] list[[<ast.Name object at 0x7da20c6a87c0>]]
if name[include_available] begin[:]
<ast.AugAssign object at 0x7da20c6a93f0>
for taget[name[dir_i]] in starred[name[search_paths]] begin[:]
if compare[name[dir_i] <ast.NotIn object at 0x7da2590d7190> name[sys].path] begin[:]
call[name[sys].path.insert, parameter[constant[0], name[dir_i]]]
variable[module_name] assign[=] call[call[call[name[package_name].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da2044c2590>].replace, parameter[constant[-], constant[_]]]
return[call[name[importlib].import_module, parameter[name[module_name]]]] | keyword[def] identifier[import_plugin] ( identifier[package_name] , identifier[include_available] = keyword[False] ):
literal[string]
identifier[available_plugins_dir] = identifier[MICRODROP_CONDA_SHARE] . identifier[joinpath] ( literal[string] ,
literal[string] )
identifier[enabled_plugins_dir] = identifier[MICRODROP_CONDA_ETC] . identifier[joinpath] ( literal[string] , literal[string] )
identifier[search_paths] =[ identifier[enabled_plugins_dir] ]
keyword[if] identifier[include_available] :
identifier[search_paths] +=[ identifier[available_plugins_dir] ]
keyword[for] identifier[dir_i] keyword[in] identifier[search_paths] :
keyword[if] identifier[dir_i] keyword[not] keyword[in] identifier[sys] . identifier[path] :
identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , identifier[dir_i] )
identifier[module_name] = identifier[package_name] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[importlib] . identifier[import_module] ( identifier[module_name] ) | def import_plugin(package_name, include_available=False):
"""
Import MicroDrop plugin.
Parameters
----------
package_name : str
Name of MicroDrop plugin Conda package.
include_available : bool, optional
If ``True``, import from all available plugins (not just **enabled**
ones).
By default, only the ``<conda>/etc/microdrop/plugins/enabled``
directory is added to the Python import paths (if necessary).
If ``True``, also add the ``<conda>/share/microdrop/plugins/available``
directory to the Python import paths.
Returns
-------
module
Imported plugin module.
"""
available_plugins_dir = MICRODROP_CONDA_SHARE.joinpath('plugins', 'available')
enabled_plugins_dir = MICRODROP_CONDA_ETC.joinpath('plugins', 'enabled')
search_paths = [enabled_plugins_dir]
if include_available:
search_paths += [available_plugins_dir] # depends on [control=['if'], data=[]]
for dir_i in search_paths:
if dir_i not in sys.path:
sys.path.insert(0, dir_i) # depends on [control=['if'], data=['dir_i']] # depends on [control=['for'], data=['dir_i']]
module_name = package_name.split('.')[-1].replace('-', '_')
return importlib.import_module(module_name) |
def send(self, sender, to, subject, plain=None, html=None, cc=None, bcc=None,
attach=None, replyto=None):
"""
Send the message.
If we have PLAIN and HTML versions, send a multipart alternative
MIME message, else send whichever we do have.
If we have neither, raise NoContentError
Arguments:
- `sender`: str
- `to`: list
- `subject`: str
- `plain`: str
- `html`: str
- `attach`: str or iterable of str
- `replyto`: str
Return: None
Exceptions: NoContentError
"""
headers = {}
if attach:
raise NotImplementedError('Attachments not implemented for Django yet!')
if replyto:
headers['Reply-To'] = replyto
self.sanity_check(sender, to, subject, plain=plain, html=html,
cc=cc, bcc=bcc)
if not cc:
cc = []
if not bcc:
bcc = []
# This comes straight from the docs at
# https://docs.djangoproject.com/en/dev/topics/email/
from django.core.mail import EmailMultiAlternatives
if not plain:
plain = ''
msg = EmailMultiAlternatives(u(subject), u(plain), u(sender), _stringlist(to),
bcc=bcc, cc=cc, headers=headers)
if html:
msg.attach_alternative(ensure_unicode(html), "text/html")
msg.send()
return | def function[send, parameter[self, sender, to, subject, plain, html, cc, bcc, attach, replyto]]:
constant[
Send the message.
If we have PLAIN and HTML versions, send a multipart alternative
MIME message, else send whichever we do have.
If we have neither, raise NoContentError
Arguments:
- `sender`: str
- `to`: list
- `subject`: str
- `plain`: str
- `html`: str
- `attach`: str or iterable of str
- `replyto`: str
Return: None
Exceptions: NoContentError
]
variable[headers] assign[=] dictionary[[], []]
if name[attach] begin[:]
<ast.Raise object at 0x7da1b1463040>
if name[replyto] begin[:]
call[name[headers]][constant[Reply-To]] assign[=] name[replyto]
call[name[self].sanity_check, parameter[name[sender], name[to], name[subject]]]
if <ast.UnaryOp object at 0x7da1b1461450> begin[:]
variable[cc] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b1460a00> begin[:]
variable[bcc] assign[=] list[[]]
from relative_module[django.core.mail] import module[EmailMultiAlternatives]
if <ast.UnaryOp object at 0x7da1b1462e60> begin[:]
variable[plain] assign[=] constant[]
variable[msg] assign[=] call[name[EmailMultiAlternatives], parameter[call[name[u], parameter[name[subject]]], call[name[u], parameter[name[plain]]], call[name[u], parameter[name[sender]]], call[name[_stringlist], parameter[name[to]]]]]
if name[html] begin[:]
call[name[msg].attach_alternative, parameter[call[name[ensure_unicode], parameter[name[html]]], constant[text/html]]]
call[name[msg].send, parameter[]]
return[None] | keyword[def] identifier[send] ( identifier[self] , identifier[sender] , identifier[to] , identifier[subject] , identifier[plain] = keyword[None] , identifier[html] = keyword[None] , identifier[cc] = keyword[None] , identifier[bcc] = keyword[None] ,
identifier[attach] = keyword[None] , identifier[replyto] = keyword[None] ):
literal[string]
identifier[headers] ={}
keyword[if] identifier[attach] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[replyto] :
identifier[headers] [ literal[string] ]= identifier[replyto]
identifier[self] . identifier[sanity_check] ( identifier[sender] , identifier[to] , identifier[subject] , identifier[plain] = identifier[plain] , identifier[html] = identifier[html] ,
identifier[cc] = identifier[cc] , identifier[bcc] = identifier[bcc] )
keyword[if] keyword[not] identifier[cc] :
identifier[cc] =[]
keyword[if] keyword[not] identifier[bcc] :
identifier[bcc] =[]
keyword[from] identifier[django] . identifier[core] . identifier[mail] keyword[import] identifier[EmailMultiAlternatives]
keyword[if] keyword[not] identifier[plain] :
identifier[plain] = literal[string]
identifier[msg] = identifier[EmailMultiAlternatives] ( identifier[u] ( identifier[subject] ), identifier[u] ( identifier[plain] ), identifier[u] ( identifier[sender] ), identifier[_stringlist] ( identifier[to] ),
identifier[bcc] = identifier[bcc] , identifier[cc] = identifier[cc] , identifier[headers] = identifier[headers] )
keyword[if] identifier[html] :
identifier[msg] . identifier[attach_alternative] ( identifier[ensure_unicode] ( identifier[html] ), literal[string] )
identifier[msg] . identifier[send] ()
keyword[return] | def send(self, sender, to, subject, plain=None, html=None, cc=None, bcc=None, attach=None, replyto=None):
"""
Send the message.
If we have PLAIN and HTML versions, send a multipart alternative
MIME message, else send whichever we do have.
If we have neither, raise NoContentError
Arguments:
- `sender`: str
- `to`: list
- `subject`: str
- `plain`: str
- `html`: str
- `attach`: str or iterable of str
- `replyto`: str
Return: None
Exceptions: NoContentError
"""
headers = {}
if attach:
raise NotImplementedError('Attachments not implemented for Django yet!') # depends on [control=['if'], data=[]]
if replyto:
headers['Reply-To'] = replyto # depends on [control=['if'], data=[]]
self.sanity_check(sender, to, subject, plain=plain, html=html, cc=cc, bcc=bcc)
if not cc:
cc = [] # depends on [control=['if'], data=[]]
if not bcc:
bcc = [] # depends on [control=['if'], data=[]]
# This comes straight from the docs at
# https://docs.djangoproject.com/en/dev/topics/email/
from django.core.mail import EmailMultiAlternatives
if not plain:
plain = '' # depends on [control=['if'], data=[]]
msg = EmailMultiAlternatives(u(subject), u(plain), u(sender), _stringlist(to), bcc=bcc, cc=cc, headers=headers)
if html:
msg.attach_alternative(ensure_unicode(html), 'text/html') # depends on [control=['if'], data=[]]
msg.send()
return |
def sort_dictionary_list(dict_list, sort_key):
"""
sorts a list of dictionaries based on the value of the sort_key
dict_list - a list of dictionaries
sort_key - a string that identifies the key to sort the dictionaries with.
Test sorting a list of dictionaries:
>>> sort_dictionary_list([{'b' : 1, 'value' : 2}, {'c' : 2, 'value' : 3}, {'a' : 3, 'value' : 1}], 'value')
[{'a': 3, 'value': 1}, {'b': 1, 'value': 2}, {'c': 2, 'value': 3}]
"""
if not dict_list or len(dict_list) == 0:
return dict_list
dict_list.sort(key=itemgetter(sort_key))
return dict_list | def function[sort_dictionary_list, parameter[dict_list, sort_key]]:
constant[
sorts a list of dictionaries based on the value of the sort_key
dict_list - a list of dictionaries
sort_key - a string that identifies the key to sort the dictionaries with.
Test sorting a list of dictionaries:
>>> sort_dictionary_list([{'b' : 1, 'value' : 2}, {'c' : 2, 'value' : 3}, {'a' : 3, 'value' : 1}], 'value')
[{'a': 3, 'value': 1}, {'b': 1, 'value': 2}, {'c': 2, 'value': 3}]
]
if <ast.BoolOp object at 0x7da18f58d9c0> begin[:]
return[name[dict_list]]
call[name[dict_list].sort, parameter[]]
return[name[dict_list]] | keyword[def] identifier[sort_dictionary_list] ( identifier[dict_list] , identifier[sort_key] ):
literal[string]
keyword[if] keyword[not] identifier[dict_list] keyword[or] identifier[len] ( identifier[dict_list] )== literal[int] :
keyword[return] identifier[dict_list]
identifier[dict_list] . identifier[sort] ( identifier[key] = identifier[itemgetter] ( identifier[sort_key] ))
keyword[return] identifier[dict_list] | def sort_dictionary_list(dict_list, sort_key):
"""
sorts a list of dictionaries based on the value of the sort_key
dict_list - a list of dictionaries
sort_key - a string that identifies the key to sort the dictionaries with.
Test sorting a list of dictionaries:
>>> sort_dictionary_list([{'b' : 1, 'value' : 2}, {'c' : 2, 'value' : 3}, {'a' : 3, 'value' : 1}], 'value')
[{'a': 3, 'value': 1}, {'b': 1, 'value': 2}, {'c': 2, 'value': 3}]
"""
if not dict_list or len(dict_list) == 0:
return dict_list # depends on [control=['if'], data=[]]
dict_list.sort(key=itemgetter(sort_key))
return dict_list |
async def export_image(self, name: str):
"""
Get a tarball of an image by name or id.
Args:
name: name/id of the image to be exported
Returns:
Streamreader of tarball image
"""
response = await self.docker._query(
"images/{name}/get".format(name=name), "GET"
)
return response.content | <ast.AsyncFunctionDef object at 0x7da1b08b3dc0> | keyword[async] keyword[def] identifier[export_image] ( identifier[self] , identifier[name] : identifier[str] ):
literal[string]
identifier[response] = keyword[await] identifier[self] . identifier[docker] . identifier[_query] (
literal[string] . identifier[format] ( identifier[name] = identifier[name] ), literal[string]
)
keyword[return] identifier[response] . identifier[content] | async def export_image(self, name: str):
"""
Get a tarball of an image by name or id.
Args:
name: name/id of the image to be exported
Returns:
Streamreader of tarball image
"""
response = await self.docker._query('images/{name}/get'.format(name=name), 'GET')
return response.content |
def norm(self, x):
"""Calculate the array-weighted norm of an element.
Parameters
----------
x : `ProductSpaceElement`
Element whose norm is calculated.
Returns
-------
norm : float
The norm of the provided element.
"""
if self.exponent == 2.0:
norm_squared = self.inner(x, x).real # TODO: optimize?!
return np.sqrt(norm_squared)
else:
norms = np.fromiter(
(xi.norm() for xi in x), dtype=np.float64, count=len(x))
if self.exponent in (1.0, float('inf')):
norms *= self.array
else:
norms *= self.array ** (1.0 / self.exponent)
return float(np.linalg.norm(norms, ord=self.exponent)) | def function[norm, parameter[self, x]]:
constant[Calculate the array-weighted norm of an element.
Parameters
----------
x : `ProductSpaceElement`
Element whose norm is calculated.
Returns
-------
norm : float
The norm of the provided element.
]
if compare[name[self].exponent equal[==] constant[2.0]] begin[:]
variable[norm_squared] assign[=] call[name[self].inner, parameter[name[x], name[x]]].real
return[call[name[np].sqrt, parameter[name[norm_squared]]]] | keyword[def] identifier[norm] ( identifier[self] , identifier[x] ):
literal[string]
keyword[if] identifier[self] . identifier[exponent] == literal[int] :
identifier[norm_squared] = identifier[self] . identifier[inner] ( identifier[x] , identifier[x] ). identifier[real]
keyword[return] identifier[np] . identifier[sqrt] ( identifier[norm_squared] )
keyword[else] :
identifier[norms] = identifier[np] . identifier[fromiter] (
( identifier[xi] . identifier[norm] () keyword[for] identifier[xi] keyword[in] identifier[x] ), identifier[dtype] = identifier[np] . identifier[float64] , identifier[count] = identifier[len] ( identifier[x] ))
keyword[if] identifier[self] . identifier[exponent] keyword[in] ( literal[int] , identifier[float] ( literal[string] )):
identifier[norms] *= identifier[self] . identifier[array]
keyword[else] :
identifier[norms] *= identifier[self] . identifier[array] **( literal[int] / identifier[self] . identifier[exponent] )
keyword[return] identifier[float] ( identifier[np] . identifier[linalg] . identifier[norm] ( identifier[norms] , identifier[ord] = identifier[self] . identifier[exponent] )) | def norm(self, x):
"""Calculate the array-weighted norm of an element.
Parameters
----------
x : `ProductSpaceElement`
Element whose norm is calculated.
Returns
-------
norm : float
The norm of the provided element.
"""
if self.exponent == 2.0:
norm_squared = self.inner(x, x).real # TODO: optimize?!
return np.sqrt(norm_squared) # depends on [control=['if'], data=[]]
else:
norms = np.fromiter((xi.norm() for xi in x), dtype=np.float64, count=len(x))
if self.exponent in (1.0, float('inf')):
norms *= self.array # depends on [control=['if'], data=[]]
else:
norms *= self.array ** (1.0 / self.exponent)
return float(np.linalg.norm(norms, ord=self.exponent)) |
def call_editor(file_path):
'''
call editor
'''
EDITOR = os.environ.get('EDITOR', 'vim')
with open(file_path, 'r+') as tf:
call([EDITOR, tf.name]) | def function[call_editor, parameter[file_path]]:
constant[
call editor
]
variable[EDITOR] assign[=] call[name[os].environ.get, parameter[constant[EDITOR], constant[vim]]]
with call[name[open], parameter[name[file_path], constant[r+]]] begin[:]
call[name[call], parameter[list[[<ast.Name object at 0x7da18f00de40>, <ast.Attribute object at 0x7da18f00c340>]]]] | keyword[def] identifier[call_editor] ( identifier[file_path] ):
literal[string]
identifier[EDITOR] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] )
keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[tf] :
identifier[call] ([ identifier[EDITOR] , identifier[tf] . identifier[name] ]) | def call_editor(file_path):
"""
call editor
"""
EDITOR = os.environ.get('EDITOR', 'vim')
with open(file_path, 'r+') as tf:
call([EDITOR, tf.name]) # depends on [control=['with'], data=['tf']] |
def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = EditText(self.get_context(), None,
d.style or "@attr/editTextStyle") | def function[create_widget, parameter[self]]:
constant[ Create the underlying widget.
]
variable[d] assign[=] name[self].declaration
name[self].widget assign[=] call[name[EditText], parameter[call[name[self].get_context, parameter[]], constant[None], <ast.BoolOp object at 0x7da1b1b9c130>]] | keyword[def] identifier[create_widget] ( identifier[self] ):
literal[string]
identifier[d] = identifier[self] . identifier[declaration]
identifier[self] . identifier[widget] = identifier[EditText] ( identifier[self] . identifier[get_context] (), keyword[None] ,
identifier[d] . identifier[style] keyword[or] literal[string] ) | def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = EditText(self.get_context(), None, d.style or '@attr/editTextStyle') |
def feedparser_render(url, *args, **kwargs):
"""
Render a feed and return its builded html
Usage: ::
{% feedparser_render 'http://localhost/sample.xml' %}
Or with all accepted arguments: ::
{% feedparser_render 'http://localhost/sample.xml' renderer='CustomRenderer' template='foo/custom.html' expiration=3600 %}
"""
renderer_name = kwargs.get('renderer', settings.FEED_DEFAULT_RENDERER_ENGINE)
renderer_template = kwargs.get('template', None)
expiration = kwargs.get('expiration', 0)
renderer = get_feed_renderer(settings.FEED_RENDER_ENGINES, renderer_name)
return renderer().render(url, template=renderer_template, expiration=expiration) | def function[feedparser_render, parameter[url]]:
constant[
Render a feed and return its builded html
Usage: ::
{% feedparser_render 'http://localhost/sample.xml' %}
Or with all accepted arguments: ::
{% feedparser_render 'http://localhost/sample.xml' renderer='CustomRenderer' template='foo/custom.html' expiration=3600 %}
]
variable[renderer_name] assign[=] call[name[kwargs].get, parameter[constant[renderer], name[settings].FEED_DEFAULT_RENDERER_ENGINE]]
variable[renderer_template] assign[=] call[name[kwargs].get, parameter[constant[template], constant[None]]]
variable[expiration] assign[=] call[name[kwargs].get, parameter[constant[expiration], constant[0]]]
variable[renderer] assign[=] call[name[get_feed_renderer], parameter[name[settings].FEED_RENDER_ENGINES, name[renderer_name]]]
return[call[call[name[renderer], parameter[]].render, parameter[name[url]]]] | keyword[def] identifier[feedparser_render] ( identifier[url] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[renderer_name] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[settings] . identifier[FEED_DEFAULT_RENDERER_ENGINE] )
identifier[renderer_template] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[expiration] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[renderer] = identifier[get_feed_renderer] ( identifier[settings] . identifier[FEED_RENDER_ENGINES] , identifier[renderer_name] )
keyword[return] identifier[renderer] (). identifier[render] ( identifier[url] , identifier[template] = identifier[renderer_template] , identifier[expiration] = identifier[expiration] ) | def feedparser_render(url, *args, **kwargs):
"""
Render a feed and return its builded html
Usage: ::
{% feedparser_render 'http://localhost/sample.xml' %}
Or with all accepted arguments: ::
{% feedparser_render 'http://localhost/sample.xml' renderer='CustomRenderer' template='foo/custom.html' expiration=3600 %}
"""
renderer_name = kwargs.get('renderer', settings.FEED_DEFAULT_RENDERER_ENGINE)
renderer_template = kwargs.get('template', None)
expiration = kwargs.get('expiration', 0)
renderer = get_feed_renderer(settings.FEED_RENDER_ENGINES, renderer_name)
return renderer().render(url, template=renderer_template, expiration=expiration) |
def files_info(self, *, id: str, **kwargs) -> SlackResponse:
"""Gets information about a team file.
Args:
id (str): The file id. e.g. 'F1234467890'
"""
kwargs.update({"id": id})
return self.api_call("files.info", http_verb="GET", params=kwargs) | def function[files_info, parameter[self]]:
constant[Gets information about a team file.
Args:
id (str): The file id. e.g. 'F1234467890'
]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1bf8fa0>], [<ast.Name object at 0x7da1b1bf9480>]]]]
return[call[name[self].api_call, parameter[constant[files.info]]]] | keyword[def] identifier[files_info] ( identifier[self] ,*, identifier[id] : identifier[str] ,** identifier[kwargs] )-> identifier[SlackResponse] :
literal[string]
identifier[kwargs] . identifier[update] ({ literal[string] : identifier[id] })
keyword[return] identifier[self] . identifier[api_call] ( literal[string] , identifier[http_verb] = literal[string] , identifier[params] = identifier[kwargs] ) | def files_info(self, *, id: str, **kwargs) -> SlackResponse:
"""Gets information about a team file.
Args:
id (str): The file id. e.g. 'F1234467890'
"""
kwargs.update({'id': id})
return self.api_call('files.info', http_verb='GET', params=kwargs) |
def tl_role(name, rawtext, text, lineno, inliner, options=None, content=None):
"""
Link to the TL reference.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
if options is None:
options = {}
if content is None:
content = []
# TODO Report error on type not found?
# Usage:
# msg = inliner.reporter.error(..., line=lineno)
# return [inliner.problematic(rawtext, rawtext, msg)], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, text, options)
return [node], [] | def function[tl_role, parameter[name, rawtext, text, lineno, inliner, options, content]]:
constant[
Link to the TL reference.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
]
if compare[name[options] is constant[None]] begin[:]
variable[options] assign[=] dictionary[[], []]
if compare[name[content] is constant[None]] begin[:]
variable[content] assign[=] list[[]]
variable[app] assign[=] name[inliner].document.settings.env.app
variable[node] assign[=] call[name[make_link_node], parameter[name[rawtext], name[app], name[text], name[options]]]
return[tuple[[<ast.List object at 0x7da1b1f96a40>, <ast.List object at 0x7da1b1f94370>]]] | keyword[def] identifier[tl_role] ( identifier[name] , identifier[rawtext] , identifier[text] , identifier[lineno] , identifier[inliner] , identifier[options] = keyword[None] , identifier[content] = keyword[None] ):
literal[string]
keyword[if] identifier[options] keyword[is] keyword[None] :
identifier[options] ={}
keyword[if] identifier[content] keyword[is] keyword[None] :
identifier[content] =[]
identifier[app] = identifier[inliner] . identifier[document] . identifier[settings] . identifier[env] . identifier[app]
identifier[node] = identifier[make_link_node] ( identifier[rawtext] , identifier[app] , identifier[text] , identifier[options] )
keyword[return] [ identifier[node] ],[] | def tl_role(name, rawtext, text, lineno, inliner, options=None, content=None):
"""
Link to the TL reference.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
if options is None:
options = {} # depends on [control=['if'], data=['options']]
if content is None:
content = [] # depends on [control=['if'], data=['content']]
# TODO Report error on type not found?
# Usage:
# msg = inliner.reporter.error(..., line=lineno)
# return [inliner.problematic(rawtext, rawtext, msg)], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, text, options)
return ([node], []) |
def get(self, q=None, page=None):
"""Get styles."""
# Check cache to exit early if needed
etag = generate_etag(current_ext.content_version.encode('utf8'))
self.check_etag(etag, weak=True)
# Build response
res = jsonify(current_ext.styles)
res.set_etag(etag)
return res | def function[get, parameter[self, q, page]]:
constant[Get styles.]
variable[etag] assign[=] call[name[generate_etag], parameter[call[name[current_ext].content_version.encode, parameter[constant[utf8]]]]]
call[name[self].check_etag, parameter[name[etag]]]
variable[res] assign[=] call[name[jsonify], parameter[name[current_ext].styles]]
call[name[res].set_etag, parameter[name[etag]]]
return[name[res]] | keyword[def] identifier[get] ( identifier[self] , identifier[q] = keyword[None] , identifier[page] = keyword[None] ):
literal[string]
identifier[etag] = identifier[generate_etag] ( identifier[current_ext] . identifier[content_version] . identifier[encode] ( literal[string] ))
identifier[self] . identifier[check_etag] ( identifier[etag] , identifier[weak] = keyword[True] )
identifier[res] = identifier[jsonify] ( identifier[current_ext] . identifier[styles] )
identifier[res] . identifier[set_etag] ( identifier[etag] )
keyword[return] identifier[res] | def get(self, q=None, page=None):
"""Get styles."""
# Check cache to exit early if needed
etag = generate_etag(current_ext.content_version.encode('utf8'))
self.check_etag(etag, weak=True)
# Build response
res = jsonify(current_ext.styles)
res.set_etag(etag)
return res |
def chunks(self):
"""Block dimensions for this dataset's data or None if it's not a dask
array.
"""
chunks = {}
for v in self.variables.values():
if v.chunks is not None:
for dim, c in zip(v.dims, v.chunks):
if dim in chunks and c != chunks[dim]:
raise ValueError('inconsistent chunks')
chunks[dim] = c
return Frozen(SortedKeysDict(chunks)) | def function[chunks, parameter[self]]:
constant[Block dimensions for this dataset's data or None if it's not a dask
array.
]
variable[chunks] assign[=] dictionary[[], []]
for taget[name[v]] in starred[call[name[self].variables.values, parameter[]]] begin[:]
if compare[name[v].chunks is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18c4ced70>, <ast.Name object at 0x7da18c4cc5b0>]]] in starred[call[name[zip], parameter[name[v].dims, name[v].chunks]]] begin[:]
if <ast.BoolOp object at 0x7da18c4cc0a0> begin[:]
<ast.Raise object at 0x7da20e955ed0>
call[name[chunks]][name[dim]] assign[=] name[c]
return[call[name[Frozen], parameter[call[name[SortedKeysDict], parameter[name[chunks]]]]]] | keyword[def] identifier[chunks] ( identifier[self] ):
literal[string]
identifier[chunks] ={}
keyword[for] identifier[v] keyword[in] identifier[self] . identifier[variables] . identifier[values] ():
keyword[if] identifier[v] . identifier[chunks] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[dim] , identifier[c] keyword[in] identifier[zip] ( identifier[v] . identifier[dims] , identifier[v] . identifier[chunks] ):
keyword[if] identifier[dim] keyword[in] identifier[chunks] keyword[and] identifier[c] != identifier[chunks] [ identifier[dim] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[chunks] [ identifier[dim] ]= identifier[c]
keyword[return] identifier[Frozen] ( identifier[SortedKeysDict] ( identifier[chunks] )) | def chunks(self):
"""Block dimensions for this dataset's data or None if it's not a dask
array.
"""
chunks = {}
for v in self.variables.values():
if v.chunks is not None:
for (dim, c) in zip(v.dims, v.chunks):
if dim in chunks and c != chunks[dim]:
raise ValueError('inconsistent chunks') # depends on [control=['if'], data=[]]
chunks[dim] = c # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']]
return Frozen(SortedKeysDict(chunks)) |
def makerandCIJ_dir(n, k, seed=None):
'''
This function generates a directed random network
Parameters
----------
N : int
number of vertices
K : int
number of edges
seed : hashable, optional
If None (default), use the np.random's global random state to generate random numbers.
Otherwise, use a new np.random.RandomState instance seeded with the given value.
Returns
-------
CIJ : NxN np.ndarray
directed random connection matrix
Notes
-----
no connections are placed on the main diagonal.
'''
rng = get_rng(seed)
ix, = np.where(np.logical_not(np.eye(n)).flat)
rp = rng.permutation(np.size(ix))
CIJ = np.zeros((n, n))
CIJ.flat[ix[rp][:k]] = 1
return CIJ | def function[makerandCIJ_dir, parameter[n, k, seed]]:
constant[
This function generates a directed random network
Parameters
----------
N : int
number of vertices
K : int
number of edges
seed : hashable, optional
If None (default), use the np.random's global random state to generate random numbers.
Otherwise, use a new np.random.RandomState instance seeded with the given value.
Returns
-------
CIJ : NxN np.ndarray
directed random connection matrix
Notes
-----
no connections are placed on the main diagonal.
]
variable[rng] assign[=] call[name[get_rng], parameter[name[seed]]]
<ast.Tuple object at 0x7da1b084ca00> assign[=] call[name[np].where, parameter[call[name[np].logical_not, parameter[call[name[np].eye, parameter[name[n]]]]].flat]]
variable[rp] assign[=] call[name[rng].permutation, parameter[call[name[np].size, parameter[name[ix]]]]]
variable[CIJ] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b08b8580>, <ast.Name object at 0x7da1b08b8550>]]]]
call[name[CIJ].flat][call[call[name[ix]][name[rp]]][<ast.Slice object at 0x7da1b08bb610>]] assign[=] constant[1]
return[name[CIJ]] | keyword[def] identifier[makerandCIJ_dir] ( identifier[n] , identifier[k] , identifier[seed] = keyword[None] ):
literal[string]
identifier[rng] = identifier[get_rng] ( identifier[seed] )
identifier[ix] ,= identifier[np] . identifier[where] ( identifier[np] . identifier[logical_not] ( identifier[np] . identifier[eye] ( identifier[n] )). identifier[flat] )
identifier[rp] = identifier[rng] . identifier[permutation] ( identifier[np] . identifier[size] ( identifier[ix] ))
identifier[CIJ] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[n] ))
identifier[CIJ] . identifier[flat] [ identifier[ix] [ identifier[rp] ][: identifier[k] ]]= literal[int]
keyword[return] identifier[CIJ] | def makerandCIJ_dir(n, k, seed=None):
"""
This function generates a directed random network
Parameters
----------
N : int
number of vertices
K : int
number of edges
seed : hashable, optional
If None (default), use the np.random's global random state to generate random numbers.
Otherwise, use a new np.random.RandomState instance seeded with the given value.
Returns
-------
CIJ : NxN np.ndarray
directed random connection matrix
Notes
-----
no connections are placed on the main diagonal.
"""
rng = get_rng(seed)
(ix,) = np.where(np.logical_not(np.eye(n)).flat)
rp = rng.permutation(np.size(ix))
CIJ = np.zeros((n, n))
CIJ.flat[ix[rp][:k]] = 1
return CIJ |
def get_available_storage_system(self, **kwargs):
"""
Retrieves a specific storage system and its associated volumes available to the server profile based
on the given server hardware type and enclosure group.
Args:
enclosureGroupUri (str):
The URI of the enclosure group associated with the resource.
serverHardwareTypeUri (str):
The URI of the server hardware type associated with the resource.
storageSystemId (str):
The storage system ID associated with the resource.
Returns:
dict: Available storage system.
"""
uri = self._helper.build_uri_with_query_string(kwargs, '/available-storage-system')
return self._helper.do_get(uri) | def function[get_available_storage_system, parameter[self]]:
constant[
Retrieves a specific storage system and its associated volumes available to the server profile based
on the given server hardware type and enclosure group.
Args:
enclosureGroupUri (str):
The URI of the enclosure group associated with the resource.
serverHardwareTypeUri (str):
The URI of the server hardware type associated with the resource.
storageSystemId (str):
The storage system ID associated with the resource.
Returns:
dict: Available storage system.
]
variable[uri] assign[=] call[name[self]._helper.build_uri_with_query_string, parameter[name[kwargs], constant[/available-storage-system]]]
return[call[name[self]._helper.do_get, parameter[name[uri]]]] | keyword[def] identifier[get_available_storage_system] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[uri] = identifier[self] . identifier[_helper] . identifier[build_uri_with_query_string] ( identifier[kwargs] , literal[string] )
keyword[return] identifier[self] . identifier[_helper] . identifier[do_get] ( identifier[uri] ) | def get_available_storage_system(self, **kwargs):
"""
Retrieves a specific storage system and its associated volumes available to the server profile based
on the given server hardware type and enclosure group.
Args:
enclosureGroupUri (str):
The URI of the enclosure group associated with the resource.
serverHardwareTypeUri (str):
The URI of the server hardware type associated with the resource.
storageSystemId (str):
The storage system ID associated with the resource.
Returns:
dict: Available storage system.
"""
uri = self._helper.build_uri_with_query_string(kwargs, '/available-storage-system')
return self._helper.do_get(uri) |
def _i2c_read_bytes(self, length=1):
"""Read the specified number of bytes from the I2C bus. Length is the
number of bytes to read (must be 1 or more).
"""
for i in range(length-1):
# Read a byte and send ACK.
self._command.append('\x20\x00\x00\x13\x00\x00')
# Make sure pins are back in idle state with clock low and data high.
self._ft232h.output_pins({0: GPIO.LOW, 1: GPIO.HIGH}, write=False)
self._command.append(self._ft232h.mpsse_gpio())
# Read last byte and send NAK.
self._command.append('\x20\x00\x00\x13\x00\xFF')
# Make sure pins are back in idle state with clock low and data high.
self._ft232h.output_pins({0: GPIO.LOW, 1: GPIO.HIGH}, write=False)
self._command.append(self._ft232h.mpsse_gpio())
# Increase expected number of bytes.
self._expected += length | def function[_i2c_read_bytes, parameter[self, length]]:
constant[Read the specified number of bytes from the I2C bus. Length is the
number of bytes to read (must be 1 or more).
]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[length] - constant[1]]]]] begin[:]
call[name[self]._command.append, parameter[constant[ ]]]
call[name[self]._ft232h.output_pins, parameter[dictionary[[<ast.Constant object at 0x7da1b01bab00>, <ast.Constant object at 0x7da1b01bad40>], [<ast.Attribute object at 0x7da1b01bb4f0>, <ast.Attribute object at 0x7da1b01ba920>]]]]
call[name[self]._command.append, parameter[call[name[self]._ft232h.mpsse_gpio, parameter[]]]]
call[name[self]._command.append, parameter[constant[ ÿ]]]
call[name[self]._ft232h.output_pins, parameter[dictionary[[<ast.Constant object at 0x7da1b01b9bd0>, <ast.Constant object at 0x7da1b01b97e0>], [<ast.Attribute object at 0x7da1b01b8a00>, <ast.Attribute object at 0x7da1b01baec0>]]]]
call[name[self]._command.append, parameter[call[name[self]._ft232h.mpsse_gpio, parameter[]]]]
<ast.AugAssign object at 0x7da1b01b9f60> | keyword[def] identifier[_i2c_read_bytes] ( identifier[self] , identifier[length] = literal[int] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[length] - literal[int] ):
identifier[self] . identifier[_command] . identifier[append] ( literal[string] )
identifier[self] . identifier[_ft232h] . identifier[output_pins] ({ literal[int] : identifier[GPIO] . identifier[LOW] , literal[int] : identifier[GPIO] . identifier[HIGH] }, identifier[write] = keyword[False] )
identifier[self] . identifier[_command] . identifier[append] ( identifier[self] . identifier[_ft232h] . identifier[mpsse_gpio] ())
identifier[self] . identifier[_command] . identifier[append] ( literal[string] )
identifier[self] . identifier[_ft232h] . identifier[output_pins] ({ literal[int] : identifier[GPIO] . identifier[LOW] , literal[int] : identifier[GPIO] . identifier[HIGH] }, identifier[write] = keyword[False] )
identifier[self] . identifier[_command] . identifier[append] ( identifier[self] . identifier[_ft232h] . identifier[mpsse_gpio] ())
identifier[self] . identifier[_expected] += identifier[length] | def _i2c_read_bytes(self, length=1):
"""Read the specified number of bytes from the I2C bus. Length is the
number of bytes to read (must be 1 or more).
"""
for i in range(length - 1):
# Read a byte and send ACK.
self._command.append(' \x00\x00\x13\x00\x00')
# Make sure pins are back in idle state with clock low and data high.
self._ft232h.output_pins({0: GPIO.LOW, 1: GPIO.HIGH}, write=False)
self._command.append(self._ft232h.mpsse_gpio()) # depends on [control=['for'], data=[]]
# Read last byte and send NAK.
self._command.append(' \x00\x00\x13\x00ÿ')
# Make sure pins are back in idle state with clock low and data high.
self._ft232h.output_pins({0: GPIO.LOW, 1: GPIO.HIGH}, write=False)
self._command.append(self._ft232h.mpsse_gpio())
# Increase expected number of bytes.
self._expected += length |
def create_ui_structure(cls, username, napp_name, ui_templates_path,
context):
"""Create the ui directory structure."""
for section in ['k-info-panel', 'k-toolbar', 'k-action-menu']:
os.makedirs(os.path.join(username, napp_name, 'ui', section))
templates = os.listdir(ui_templates_path)
for tmp in templates:
fname = os.path.join(username, napp_name, 'ui',
tmp.rsplit('.template')[0])
with open(fname, 'w') as file:
content = cls.render_template(ui_templates_path, tmp,
context)
file.write(content) | def function[create_ui_structure, parameter[cls, username, napp_name, ui_templates_path, context]]:
constant[Create the ui directory structure.]
for taget[name[section]] in starred[list[[<ast.Constant object at 0x7da20c6ab160>, <ast.Constant object at 0x7da20c6aa920>, <ast.Constant object at 0x7da20c6a84c0>]]] begin[:]
call[name[os].makedirs, parameter[call[name[os].path.join, parameter[name[username], name[napp_name], constant[ui], name[section]]]]]
variable[templates] assign[=] call[name[os].listdir, parameter[name[ui_templates_path]]]
for taget[name[tmp]] in starred[name[templates]] begin[:]
variable[fname] assign[=] call[name[os].path.join, parameter[name[username], name[napp_name], constant[ui], call[call[name[tmp].rsplit, parameter[constant[.template]]]][constant[0]]]]
with call[name[open], parameter[name[fname], constant[w]]] begin[:]
variable[content] assign[=] call[name[cls].render_template, parameter[name[ui_templates_path], name[tmp], name[context]]]
call[name[file].write, parameter[name[content]]] | keyword[def] identifier[create_ui_structure] ( identifier[cls] , identifier[username] , identifier[napp_name] , identifier[ui_templates_path] ,
identifier[context] ):
literal[string]
keyword[for] identifier[section] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[join] ( identifier[username] , identifier[napp_name] , literal[string] , identifier[section] ))
identifier[templates] = identifier[os] . identifier[listdir] ( identifier[ui_templates_path] )
keyword[for] identifier[tmp] keyword[in] identifier[templates] :
identifier[fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[username] , identifier[napp_name] , literal[string] ,
identifier[tmp] . identifier[rsplit] ( literal[string] )[ literal[int] ])
keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[file] :
identifier[content] = identifier[cls] . identifier[render_template] ( identifier[ui_templates_path] , identifier[tmp] ,
identifier[context] )
identifier[file] . identifier[write] ( identifier[content] ) | def create_ui_structure(cls, username, napp_name, ui_templates_path, context):
"""Create the ui directory structure."""
for section in ['k-info-panel', 'k-toolbar', 'k-action-menu']:
os.makedirs(os.path.join(username, napp_name, 'ui', section)) # depends on [control=['for'], data=['section']]
templates = os.listdir(ui_templates_path)
for tmp in templates:
fname = os.path.join(username, napp_name, 'ui', tmp.rsplit('.template')[0])
with open(fname, 'w') as file:
content = cls.render_template(ui_templates_path, tmp, context)
file.write(content) # depends on [control=['with'], data=['file']] # depends on [control=['for'], data=['tmp']] |
def metric(cls, name, count, elapsed):
"""A metric function that buffers through numpy
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
"""
if name is None:
warnings.warn("Ignoring unnamed metric", stacklevel=3)
return
with cls.lock:
# register with atexit on first call
if cls.dump_atexit and not cls.instances:
atexit.register(cls.dump)
try:
self = cls.instances[name]
except KeyError:
self = cls.instances[name] = cls(name)
self.temp.write(self.struct.pack(count, elapsed)) | def function[metric, parameter[cls, name, count, elapsed]]:
constant[A metric function that buffers through numpy
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
]
if compare[name[name] is constant[None]] begin[:]
call[name[warnings].warn, parameter[constant[Ignoring unnamed metric]]]
return[None]
with name[cls].lock begin[:]
if <ast.BoolOp object at 0x7da18bc70b20> begin[:]
call[name[atexit].register, parameter[name[cls].dump]]
<ast.Try object at 0x7da18bc71600>
call[name[self].temp.write, parameter[call[name[self].struct.pack, parameter[name[count], name[elapsed]]]]] | keyword[def] identifier[metric] ( identifier[cls] , identifier[name] , identifier[count] , identifier[elapsed] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[warnings] . identifier[warn] ( literal[string] , identifier[stacklevel] = literal[int] )
keyword[return]
keyword[with] identifier[cls] . identifier[lock] :
keyword[if] identifier[cls] . identifier[dump_atexit] keyword[and] keyword[not] identifier[cls] . identifier[instances] :
identifier[atexit] . identifier[register] ( identifier[cls] . identifier[dump] )
keyword[try] :
identifier[self] = identifier[cls] . identifier[instances] [ identifier[name] ]
keyword[except] identifier[KeyError] :
identifier[self] = identifier[cls] . identifier[instances] [ identifier[name] ]= identifier[cls] ( identifier[name] )
identifier[self] . identifier[temp] . identifier[write] ( identifier[self] . identifier[struct] . identifier[pack] ( identifier[count] , identifier[elapsed] )) | def metric(cls, name, count, elapsed):
"""A metric function that buffers through numpy
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
"""
if name is None:
warnings.warn('Ignoring unnamed metric', stacklevel=3)
return # depends on [control=['if'], data=[]]
with cls.lock:
# register with atexit on first call
if cls.dump_atexit and (not cls.instances):
atexit.register(cls.dump) # depends on [control=['if'], data=[]]
try:
self = cls.instances[name] # depends on [control=['try'], data=[]]
except KeyError:
self = cls.instances[name] = cls(name) # depends on [control=['except'], data=[]]
self.temp.write(self.struct.pack(count, elapsed)) # depends on [control=['with'], data=[]] |
def validate_vars(env):
"""Validate the PCH and PCHSTOP construction variables."""
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.")
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP']) | def function[validate_vars, parameter[env]]:
constant[Validate the PCH and PCHSTOP construction variables.]
if <ast.BoolOp object at 0x7da18f00f1c0> begin[:]
if compare[constant[PCHSTOP] <ast.NotIn object at 0x7da2590d7190> name[env]] begin[:]
<ast.Raise object at 0x7da18f00e020>
if <ast.UnaryOp object at 0x7da18f00c4c0> begin[:]
<ast.Raise object at 0x7da18f00f9d0> | keyword[def] identifier[validate_vars] ( identifier[env] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[env] keyword[and] identifier[env] [ literal[string] ]:
keyword[if] literal[string] keyword[not] keyword[in] identifier[env] :
keyword[raise] identifier[SCons] . identifier[Errors] . identifier[UserError] ( literal[string] )
keyword[if] keyword[not] identifier[SCons] . identifier[Util] . identifier[is_String] ( identifier[env] [ literal[string] ]):
keyword[raise] identifier[SCons] . identifier[Errors] . identifier[UserError] ( literal[string] % identifier[env] [ literal[string] ]) | def validate_vars(env):
"""Validate the PCH and PCHSTOP construction variables."""
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError('The PCHSTOP construction must be defined if PCH is defined.') # depends on [control=['if'], data=[]]
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError('The PCHSTOP construction variable must be a string: %r' % env['PCHSTOP']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def base_link(rel, self_rel=False):
"""Helper for getting a link document under the API root, given a rel."""
link = _BASE_LINKS[rel].copy()
link['rel'] = 'self' if self_rel else rel
return link | def function[base_link, parameter[rel, self_rel]]:
constant[Helper for getting a link document under the API root, given a rel.]
variable[link] assign[=] call[call[name[_BASE_LINKS]][name[rel]].copy, parameter[]]
call[name[link]][constant[rel]] assign[=] <ast.IfExp object at 0x7da1b1a5d810>
return[name[link]] | keyword[def] identifier[base_link] ( identifier[rel] , identifier[self_rel] = keyword[False] ):
literal[string]
identifier[link] = identifier[_BASE_LINKS] [ identifier[rel] ]. identifier[copy] ()
identifier[link] [ literal[string] ]= literal[string] keyword[if] identifier[self_rel] keyword[else] identifier[rel]
keyword[return] identifier[link] | def base_link(rel, self_rel=False):
"""Helper for getting a link document under the API root, given a rel."""
link = _BASE_LINKS[rel].copy()
link['rel'] = 'self' if self_rel else rel
return link |
def slug(self):
"""
A concise slug for this feature.
Unlike the internal representation, which is 0-based half-open, the
slug is a 1-based closed interval (a la GFF3).
"""
return '{:s}@{:s}[{:d}, {:d}]'.format(self.type, self.seqid,
self.start + 1, self.end) | def function[slug, parameter[self]]:
constant[
A concise slug for this feature.
Unlike the internal representation, which is 0-based half-open, the
slug is a 1-based closed interval (a la GFF3).
]
return[call[constant[{:s}@{:s}[{:d}, {:d}]].format, parameter[name[self].type, name[self].seqid, binary_operation[name[self].start + constant[1]], name[self].end]]] | keyword[def] identifier[slug] ( identifier[self] ):
literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[type] , identifier[self] . identifier[seqid] ,
identifier[self] . identifier[start] + literal[int] , identifier[self] . identifier[end] ) | def slug(self):
"""
A concise slug for this feature.
Unlike the internal representation, which is 0-based half-open, the
slug is a 1-based closed interval (a la GFF3).
"""
return '{:s}@{:s}[{:d}, {:d}]'.format(self.type, self.seqid, self.start + 1, self.end) |
def nps_survey_show(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/nps-api/surveys#show-survey"
api_path = "/api/v2/nps/surveys/{id}"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[nps_survey_show, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/nps-api/surveys#show-survey]
variable[api_path] assign[=] constant[/api/v2/nps/surveys/{id}]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[nps_survey_show] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def nps_survey_show(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/nps-api/surveys#show-survey"""
api_path = '/api/v2/nps/surveys/{id}'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def storage_distribution(network, scaling=1, filename=None):
"""
Plot storage distribution as circles on grid nodes
Displays storage size and distribution in network.
Parameters
----------
network : PyPSA network container
Holds topology of grid including results from powerflow analysis
filename : str
Specify filename
If not given, figure will be show directly
"""
stores = network.storage_units
storage_distribution = network.storage_units.p_nom_opt[stores.index]\
.groupby(network.storage_units.bus)\
.sum().reindex(network.buses.index, fill_value=0.)
fig, ax = plt.subplots(1, 1)
fig.set_size_inches(6, 6)
msd_max = storage_distribution.max()
msd_median = storage_distribution[storage_distribution != 0].median()
msd_min = storage_distribution[storage_distribution > 1].min()
if msd_max != 0:
LabelVal = int(log10(msd_max))
else:
LabelVal = 0
if LabelVal < 0:
LabelUnit = 'kW'
msd_max, msd_median, msd_min = msd_max * \
1000, msd_median * 1000, msd_min * 1000
storage_distribution = storage_distribution * 1000
elif LabelVal < 3:
LabelUnit = 'MW'
else:
LabelUnit = 'GW'
msd_max, msd_median, msd_min = msd_max / \
1000, msd_median / 1000, msd_min / 1000
storage_distribution = storage_distribution / 1000
if sum(storage_distribution) == 0:
network.plot(bus_sizes=0, ax=ax, title="No storages")
else:
network.plot(
bus_sizes=storage_distribution * scaling,
ax=ax,
line_widths=0.3,
title="Storage distribution")
# Here we create a legend:
# we'll plot empty lists with the desired size and label
for area in [msd_max, msd_median, msd_min]:
plt.scatter([], [], c='white', s=area * scaling,
label='= ' + str(round(area, 0)) + LabelUnit + ' ')
plt.legend(scatterpoints=1, labelspacing=1, title='Storage size')
if filename is None:
plt.show()
else:
plt.savefig(filename)
plt.close() | def function[storage_distribution, parameter[network, scaling, filename]]:
constant[
Plot storage distribution as circles on grid nodes
Displays storage size and distribution in network.
Parameters
----------
network : PyPSA network container
Holds topology of grid including results from powerflow analysis
filename : str
Specify filename
If not given, figure will be show directly
]
variable[stores] assign[=] name[network].storage_units
variable[storage_distribution] assign[=] call[call[call[call[name[network].storage_units.p_nom_opt][name[stores].index].groupby, parameter[name[network].storage_units.bus]].sum, parameter[]].reindex, parameter[name[network].buses.index]]
<ast.Tuple object at 0x7da1b1b9c4c0> assign[=] call[name[plt].subplots, parameter[constant[1], constant[1]]]
call[name[fig].set_size_inches, parameter[constant[6], constant[6]]]
variable[msd_max] assign[=] call[name[storage_distribution].max, parameter[]]
variable[msd_median] assign[=] call[call[name[storage_distribution]][compare[name[storage_distribution] not_equal[!=] constant[0]]].median, parameter[]]
variable[msd_min] assign[=] call[call[name[storage_distribution]][compare[name[storage_distribution] greater[>] constant[1]]].min, parameter[]]
if compare[name[msd_max] not_equal[!=] constant[0]] begin[:]
variable[LabelVal] assign[=] call[name[int], parameter[call[name[log10], parameter[name[msd_max]]]]]
if compare[name[LabelVal] less[<] constant[0]] begin[:]
variable[LabelUnit] assign[=] constant[kW]
<ast.Tuple object at 0x7da1b1b9e920> assign[=] tuple[[<ast.BinOp object at 0x7da1b1b9e9e0>, <ast.BinOp object at 0x7da1b1b9e860>, <ast.BinOp object at 0x7da1b1b9e7a0>]]
variable[storage_distribution] assign[=] binary_operation[name[storage_distribution] * constant[1000]]
if compare[call[name[sum], parameter[name[storage_distribution]]] equal[==] constant[0]] begin[:]
call[name[network].plot, parameter[]]
for taget[name[area]] in starred[list[[<ast.Name object at 0x7da1b1b9c220>, <ast.Name object at 0x7da1b1b9cb80>, <ast.Name object at 0x7da1b1b9c3d0>]]] begin[:]
call[name[plt].scatter, parameter[list[[]], list[[]]]]
call[name[plt].legend, parameter[]]
if compare[name[filename] is constant[None]] begin[:]
call[name[plt].show, parameter[]] | keyword[def] identifier[storage_distribution] ( identifier[network] , identifier[scaling] = literal[int] , identifier[filename] = keyword[None] ):
literal[string]
identifier[stores] = identifier[network] . identifier[storage_units]
identifier[storage_distribution] = identifier[network] . identifier[storage_units] . identifier[p_nom_opt] [ identifier[stores] . identifier[index] ]. identifier[groupby] ( identifier[network] . identifier[storage_units] . identifier[bus] ). identifier[sum] (). identifier[reindex] ( identifier[network] . identifier[buses] . identifier[index] , identifier[fill_value] = literal[int] )
identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] )
identifier[fig] . identifier[set_size_inches] ( literal[int] , literal[int] )
identifier[msd_max] = identifier[storage_distribution] . identifier[max] ()
identifier[msd_median] = identifier[storage_distribution] [ identifier[storage_distribution] != literal[int] ]. identifier[median] ()
identifier[msd_min] = identifier[storage_distribution] [ identifier[storage_distribution] > literal[int] ]. identifier[min] ()
keyword[if] identifier[msd_max] != literal[int] :
identifier[LabelVal] = identifier[int] ( identifier[log10] ( identifier[msd_max] ))
keyword[else] :
identifier[LabelVal] = literal[int]
keyword[if] identifier[LabelVal] < literal[int] :
identifier[LabelUnit] = literal[string]
identifier[msd_max] , identifier[msd_median] , identifier[msd_min] = identifier[msd_max] * literal[int] , identifier[msd_median] * literal[int] , identifier[msd_min] * literal[int]
identifier[storage_distribution] = identifier[storage_distribution] * literal[int]
keyword[elif] identifier[LabelVal] < literal[int] :
identifier[LabelUnit] = literal[string]
keyword[else] :
identifier[LabelUnit] = literal[string]
identifier[msd_max] , identifier[msd_median] , identifier[msd_min] = identifier[msd_max] / literal[int] , identifier[msd_median] / literal[int] , identifier[msd_min] / literal[int]
identifier[storage_distribution] = identifier[storage_distribution] / literal[int]
keyword[if] identifier[sum] ( identifier[storage_distribution] )== literal[int] :
identifier[network] . identifier[plot] ( identifier[bus_sizes] = literal[int] , identifier[ax] = identifier[ax] , identifier[title] = literal[string] )
keyword[else] :
identifier[network] . identifier[plot] (
identifier[bus_sizes] = identifier[storage_distribution] * identifier[scaling] ,
identifier[ax] = identifier[ax] ,
identifier[line_widths] = literal[int] ,
identifier[title] = literal[string] )
keyword[for] identifier[area] keyword[in] [ identifier[msd_max] , identifier[msd_median] , identifier[msd_min] ]:
identifier[plt] . identifier[scatter] ([],[], identifier[c] = literal[string] , identifier[s] = identifier[area] * identifier[scaling] ,
identifier[label] = literal[string] + identifier[str] ( identifier[round] ( identifier[area] , literal[int] ))+ identifier[LabelUnit] + literal[string] )
identifier[plt] . identifier[legend] ( identifier[scatterpoints] = literal[int] , identifier[labelspacing] = literal[int] , identifier[title] = literal[string] )
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[plt] . identifier[show] ()
keyword[else] :
identifier[plt] . identifier[savefig] ( identifier[filename] )
identifier[plt] . identifier[close] () | def storage_distribution(network, scaling=1, filename=None):
"""
Plot storage distribution as circles on grid nodes
Displays storage size and distribution in network.
Parameters
----------
network : PyPSA network container
Holds topology of grid including results from powerflow analysis
filename : str
Specify filename
If not given, figure will be show directly
"""
stores = network.storage_units
storage_distribution = network.storage_units.p_nom_opt[stores.index].groupby(network.storage_units.bus).sum().reindex(network.buses.index, fill_value=0.0)
(fig, ax) = plt.subplots(1, 1)
fig.set_size_inches(6, 6)
msd_max = storage_distribution.max()
msd_median = storage_distribution[storage_distribution != 0].median()
msd_min = storage_distribution[storage_distribution > 1].min()
if msd_max != 0:
LabelVal = int(log10(msd_max)) # depends on [control=['if'], data=['msd_max']]
else:
LabelVal = 0
if LabelVal < 0:
LabelUnit = 'kW'
(msd_max, msd_median, msd_min) = (msd_max * 1000, msd_median * 1000, msd_min * 1000)
storage_distribution = storage_distribution * 1000 # depends on [control=['if'], data=[]]
elif LabelVal < 3:
LabelUnit = 'MW' # depends on [control=['if'], data=[]]
else:
LabelUnit = 'GW'
(msd_max, msd_median, msd_min) = (msd_max / 1000, msd_median / 1000, msd_min / 1000)
storage_distribution = storage_distribution / 1000
if sum(storage_distribution) == 0:
network.plot(bus_sizes=0, ax=ax, title='No storages') # depends on [control=['if'], data=[]]
else:
network.plot(bus_sizes=storage_distribution * scaling, ax=ax, line_widths=0.3, title='Storage distribution')
# Here we create a legend:
# we'll plot empty lists with the desired size and label
for area in [msd_max, msd_median, msd_min]:
plt.scatter([], [], c='white', s=area * scaling, label='= ' + str(round(area, 0)) + LabelUnit + ' ') # depends on [control=['for'], data=['area']]
plt.legend(scatterpoints=1, labelspacing=1, title='Storage size')
if filename is None:
plt.show() # depends on [control=['if'], data=[]]
else:
plt.savefig(filename)
plt.close() |
def sample_out_dir(self):
"""Absolute path to permanent location in working directory
where EricScript output for the current sample will be stored.
(a subdirectory of `output_dir`)
"""
if self._sample_out_dir is None:
self._sample_out_dir = os.path.join(
self.output_dir, self._sample_name
)
return self._sample_out_dir | def function[sample_out_dir, parameter[self]]:
constant[Absolute path to permanent location in working directory
where EricScript output for the current sample will be stored.
(a subdirectory of `output_dir`)
]
if compare[name[self]._sample_out_dir is constant[None]] begin[:]
name[self]._sample_out_dir assign[=] call[name[os].path.join, parameter[name[self].output_dir, name[self]._sample_name]]
return[name[self]._sample_out_dir] | keyword[def] identifier[sample_out_dir] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_sample_out_dir] keyword[is] keyword[None] :
identifier[self] . identifier[_sample_out_dir] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[output_dir] , identifier[self] . identifier[_sample_name]
)
keyword[return] identifier[self] . identifier[_sample_out_dir] | def sample_out_dir(self):
"""Absolute path to permanent location in working directory
where EricScript output for the current sample will be stored.
(a subdirectory of `output_dir`)
"""
if self._sample_out_dir is None:
self._sample_out_dir = os.path.join(self.output_dir, self._sample_name) # depends on [control=['if'], data=[]]
return self._sample_out_dir |
def import_certificate(self):
"""
Returns errors on:
Certificate, PrivateKey or Chain not being properly formatted
Arn not existing if its provided
PrivateKey size > 2048
Certificate expired or is not yet in effect
Does not return errors on:
Checking Certificate is legit, or a selfsigned chain is provided
:return: str(JSON) for response
"""
certificate = self._get_param('Certificate')
private_key = self._get_param('PrivateKey')
chain = self._get_param('CertificateChain') # Optional
current_arn = self._get_param('CertificateArn') # Optional
# Simple parameter decoding. Rather do it here as its a data transport decision not part of the
# actual data
try:
certificate = base64.standard_b64decode(certificate)
except Exception:
return AWSValidationException('The certificate is not PEM-encoded or is not valid.').response()
try:
private_key = base64.standard_b64decode(private_key)
except Exception:
return AWSValidationException('The private key is not PEM-encoded or is not valid.').response()
if chain is not None:
try:
chain = base64.standard_b64decode(chain)
except Exception:
return AWSValidationException('The certificate chain is not PEM-encoded or is not valid.').response()
try:
arn = self.acm_backend.import_cert(certificate, private_key, chain=chain, arn=current_arn)
except AWSError as err:
return err.response()
return json.dumps({'CertificateArn': arn}) | def function[import_certificate, parameter[self]]:
constant[
Returns errors on:
Certificate, PrivateKey or Chain not being properly formatted
Arn not existing if its provided
PrivateKey size > 2048
Certificate expired or is not yet in effect
Does not return errors on:
Checking Certificate is legit, or a selfsigned chain is provided
:return: str(JSON) for response
]
variable[certificate] assign[=] call[name[self]._get_param, parameter[constant[Certificate]]]
variable[private_key] assign[=] call[name[self]._get_param, parameter[constant[PrivateKey]]]
variable[chain] assign[=] call[name[self]._get_param, parameter[constant[CertificateChain]]]
variable[current_arn] assign[=] call[name[self]._get_param, parameter[constant[CertificateArn]]]
<ast.Try object at 0x7da1b18ac220>
<ast.Try object at 0x7da1b18ac670>
if compare[name[chain] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b170c220>
<ast.Try object at 0x7da1b170ecb0>
return[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da1b170dba0>], [<ast.Name object at 0x7da1b170d780>]]]]] | keyword[def] identifier[import_certificate] ( identifier[self] ):
literal[string]
identifier[certificate] = identifier[self] . identifier[_get_param] ( literal[string] )
identifier[private_key] = identifier[self] . identifier[_get_param] ( literal[string] )
identifier[chain] = identifier[self] . identifier[_get_param] ( literal[string] )
identifier[current_arn] = identifier[self] . identifier[_get_param] ( literal[string] )
keyword[try] :
identifier[certificate] = identifier[base64] . identifier[standard_b64decode] ( identifier[certificate] )
keyword[except] identifier[Exception] :
keyword[return] identifier[AWSValidationException] ( literal[string] ). identifier[response] ()
keyword[try] :
identifier[private_key] = identifier[base64] . identifier[standard_b64decode] ( identifier[private_key] )
keyword[except] identifier[Exception] :
keyword[return] identifier[AWSValidationException] ( literal[string] ). identifier[response] ()
keyword[if] identifier[chain] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[chain] = identifier[base64] . identifier[standard_b64decode] ( identifier[chain] )
keyword[except] identifier[Exception] :
keyword[return] identifier[AWSValidationException] ( literal[string] ). identifier[response] ()
keyword[try] :
identifier[arn] = identifier[self] . identifier[acm_backend] . identifier[import_cert] ( identifier[certificate] , identifier[private_key] , identifier[chain] = identifier[chain] , identifier[arn] = identifier[current_arn] )
keyword[except] identifier[AWSError] keyword[as] identifier[err] :
keyword[return] identifier[err] . identifier[response] ()
keyword[return] identifier[json] . identifier[dumps] ({ literal[string] : identifier[arn] }) | def import_certificate(self):
"""
Returns errors on:
Certificate, PrivateKey or Chain not being properly formatted
Arn not existing if its provided
PrivateKey size > 2048
Certificate expired or is not yet in effect
Does not return errors on:
Checking Certificate is legit, or a selfsigned chain is provided
:return: str(JSON) for response
"""
certificate = self._get_param('Certificate')
private_key = self._get_param('PrivateKey')
chain = self._get_param('CertificateChain') # Optional
current_arn = self._get_param('CertificateArn') # Optional
# Simple parameter decoding. Rather do it here as its a data transport decision not part of the
# actual data
try:
certificate = base64.standard_b64decode(certificate) # depends on [control=['try'], data=[]]
except Exception:
return AWSValidationException('The certificate is not PEM-encoded or is not valid.').response() # depends on [control=['except'], data=[]]
try:
private_key = base64.standard_b64decode(private_key) # depends on [control=['try'], data=[]]
except Exception:
return AWSValidationException('The private key is not PEM-encoded or is not valid.').response() # depends on [control=['except'], data=[]]
if chain is not None:
try:
chain = base64.standard_b64decode(chain) # depends on [control=['try'], data=[]]
except Exception:
return AWSValidationException('The certificate chain is not PEM-encoded or is not valid.').response() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['chain']]
try:
arn = self.acm_backend.import_cert(certificate, private_key, chain=chain, arn=current_arn) # depends on [control=['try'], data=[]]
except AWSError as err:
return err.response() # depends on [control=['except'], data=['err']]
return json.dumps({'CertificateArn': arn}) |
def save(self, *args, **kwargs):
"""
Update ``self.modified``.
"""
self.modified = timezone.now()
super(AbstractBaseModel, self).save(*args, **kwargs) | def function[save, parameter[self]]:
constant[
Update ``self.modified``.
]
name[self].modified assign[=] call[name[timezone].now, parameter[]]
call[call[name[super], parameter[name[AbstractBaseModel], name[self]]].save, parameter[<ast.Starred object at 0x7da204565c90>]] | keyword[def] identifier[save] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[modified] = identifier[timezone] . identifier[now] ()
identifier[super] ( identifier[AbstractBaseModel] , identifier[self] ). identifier[save] (* identifier[args] ,** identifier[kwargs] ) | def save(self, *args, **kwargs):
"""
Update ``self.modified``.
"""
self.modified = timezone.now()
super(AbstractBaseModel, self).save(*args, **kwargs) |
def init_variables(self, verbose=False):
"""Redefine the causes of the graph."""
# Resetting adjacency matrix
for i in range(self.nodes):
for j in np.random.choice(range(self.nodes),
np.random.randint(
0, self.parents_max + 1),
replace=False):
if i != j:
self.adjacency_matrix[j, i] = 1
try:
assert any([sum(self.adjacency_matrix[:, i]) ==
self.parents_max for i in range(self.nodes)])
self.g = nx.DiGraph(self.adjacency_matrix)
assert list(nx.simple_cycles(self.g))
assert any(len(i) == 2 for i in nx.simple_cycles(self.g))
except AssertionError:
if verbose:
print("Regenerating, graph non valid...")
self.init_variables()
if verbose:
print("Matrix generated ! \
Number of cycles: {}".format(len(list(nx.simple_cycles(self.g)))))
for i in range(self.nodes):
self.data.iloc[:, i] = scale(self.initial_generator(self.points))
# Mechanisms
self.cfunctions = [self.mechanism(int(sum(self.adjacency_matrix[:, i])),
self.points, self.noise, noise_coeff=self.noise_coeff) for i in range(self.nodes)] | def function[init_variables, parameter[self, verbose]]:
constant[Redefine the causes of the graph.]
for taget[name[i]] in starred[call[name[range], parameter[name[self].nodes]]] begin[:]
for taget[name[j]] in starred[call[name[np].random.choice, parameter[call[name[range], parameter[name[self].nodes]], call[name[np].random.randint, parameter[constant[0], binary_operation[name[self].parents_max + constant[1]]]]]]] begin[:]
if compare[name[i] not_equal[!=] name[j]] begin[:]
call[name[self].adjacency_matrix][tuple[[<ast.Name object at 0x7da18dc9bd60>, <ast.Name object at 0x7da18dc99270>]]] assign[=] constant[1]
<ast.Try object at 0x7da18dc99210>
if name[verbose] begin[:]
call[name[print], parameter[call[constant[Matrix generated ! Number of cycles: {}].format, parameter[call[name[len], parameter[call[name[list], parameter[call[name[nx].simple_cycles, parameter[name[self].g]]]]]]]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[self].nodes]]] begin[:]
call[name[self].data.iloc][tuple[[<ast.Slice object at 0x7da1b015a470>, <ast.Name object at 0x7da1b0159270>]]] assign[=] call[name[scale], parameter[call[name[self].initial_generator, parameter[name[self].points]]]]
name[self].cfunctions assign[=] <ast.ListComp object at 0x7da1b01593c0> | keyword[def] identifier[init_variables] ( identifier[self] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nodes] ):
keyword[for] identifier[j] keyword[in] identifier[np] . identifier[random] . identifier[choice] ( identifier[range] ( identifier[self] . identifier[nodes] ),
identifier[np] . identifier[random] . identifier[randint] (
literal[int] , identifier[self] . identifier[parents_max] + literal[int] ),
identifier[replace] = keyword[False] ):
keyword[if] identifier[i] != identifier[j] :
identifier[self] . identifier[adjacency_matrix] [ identifier[j] , identifier[i] ]= literal[int]
keyword[try] :
keyword[assert] identifier[any] ([ identifier[sum] ( identifier[self] . identifier[adjacency_matrix] [:, identifier[i] ])==
identifier[self] . identifier[parents_max] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nodes] )])
identifier[self] . identifier[g] = identifier[nx] . identifier[DiGraph] ( identifier[self] . identifier[adjacency_matrix] )
keyword[assert] identifier[list] ( identifier[nx] . identifier[simple_cycles] ( identifier[self] . identifier[g] ))
keyword[assert] identifier[any] ( identifier[len] ( identifier[i] )== literal[int] keyword[for] identifier[i] keyword[in] identifier[nx] . identifier[simple_cycles] ( identifier[self] . identifier[g] ))
keyword[except] identifier[AssertionError] :
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] )
identifier[self] . identifier[init_variables] ()
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[list] ( identifier[nx] . identifier[simple_cycles] ( identifier[self] . identifier[g] )))))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nodes] ):
identifier[self] . identifier[data] . identifier[iloc] [:, identifier[i] ]= identifier[scale] ( identifier[self] . identifier[initial_generator] ( identifier[self] . identifier[points] ))
identifier[self] . identifier[cfunctions] =[ identifier[self] . identifier[mechanism] ( identifier[int] ( identifier[sum] ( identifier[self] . identifier[adjacency_matrix] [:, identifier[i] ])),
identifier[self] . identifier[points] , identifier[self] . identifier[noise] , identifier[noise_coeff] = identifier[self] . identifier[noise_coeff] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nodes] )] | def init_variables(self, verbose=False):
"""Redefine the causes of the graph."""
# Resetting adjacency matrix
for i in range(self.nodes):
for j in np.random.choice(range(self.nodes), np.random.randint(0, self.parents_max + 1), replace=False):
if i != j:
self.adjacency_matrix[j, i] = 1 # depends on [control=['if'], data=['i', 'j']] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
try:
assert any([sum(self.adjacency_matrix[:, i]) == self.parents_max for i in range(self.nodes)])
self.g = nx.DiGraph(self.adjacency_matrix)
assert list(nx.simple_cycles(self.g))
assert any((len(i) == 2 for i in nx.simple_cycles(self.g))) # depends on [control=['try'], data=[]]
except AssertionError:
if verbose:
print('Regenerating, graph non valid...') # depends on [control=['if'], data=[]]
self.init_variables() # depends on [control=['except'], data=[]]
if verbose:
print('Matrix generated ! Number of cycles: {}'.format(len(list(nx.simple_cycles(self.g))))) # depends on [control=['if'], data=[]]
for i in range(self.nodes):
self.data.iloc[:, i] = scale(self.initial_generator(self.points)) # depends on [control=['for'], data=['i']]
# Mechanisms
self.cfunctions = [self.mechanism(int(sum(self.adjacency_matrix[:, i])), self.points, self.noise, noise_coeff=self.noise_coeff) for i in range(self.nodes)] |
def valid_function_signature(input_type, func):
"""Check that the declared number of inputs (the length of `input_type`)
and the number of inputs to `func` are equal.
Parameters
----------
input_type : List[DataType]
func : callable
Returns
-------
inspect.Signature
"""
funcsig = signature(func)
declared_parameter_count = len(input_type)
function_parameter_count = parameter_count(funcsig)
if declared_parameter_count != function_parameter_count:
raise TypeError(
'Function signature {!r} has {:d} parameters, '
'input_type has {:d}. These must match'.format(
func.__name__,
function_parameter_count,
declared_parameter_count,
)
)
return funcsig | def function[valid_function_signature, parameter[input_type, func]]:
constant[Check that the declared number of inputs (the length of `input_type`)
and the number of inputs to `func` are equal.
Parameters
----------
input_type : List[DataType]
func : callable
Returns
-------
inspect.Signature
]
variable[funcsig] assign[=] call[name[signature], parameter[name[func]]]
variable[declared_parameter_count] assign[=] call[name[len], parameter[name[input_type]]]
variable[function_parameter_count] assign[=] call[name[parameter_count], parameter[name[funcsig]]]
if compare[name[declared_parameter_count] not_equal[!=] name[function_parameter_count]] begin[:]
<ast.Raise object at 0x7da20e956d10>
return[name[funcsig]] | keyword[def] identifier[valid_function_signature] ( identifier[input_type] , identifier[func] ):
literal[string]
identifier[funcsig] = identifier[signature] ( identifier[func] )
identifier[declared_parameter_count] = identifier[len] ( identifier[input_type] )
identifier[function_parameter_count] = identifier[parameter_count] ( identifier[funcsig] )
keyword[if] identifier[declared_parameter_count] != identifier[function_parameter_count] :
keyword[raise] identifier[TypeError] (
literal[string]
literal[string] . identifier[format] (
identifier[func] . identifier[__name__] ,
identifier[function_parameter_count] ,
identifier[declared_parameter_count] ,
)
)
keyword[return] identifier[funcsig] | def valid_function_signature(input_type, func):
"""Check that the declared number of inputs (the length of `input_type`)
and the number of inputs to `func` are equal.
Parameters
----------
input_type : List[DataType]
func : callable
Returns
-------
inspect.Signature
"""
funcsig = signature(func)
declared_parameter_count = len(input_type)
function_parameter_count = parameter_count(funcsig)
if declared_parameter_count != function_parameter_count:
raise TypeError('Function signature {!r} has {:d} parameters, input_type has {:d}. These must match'.format(func.__name__, function_parameter_count, declared_parameter_count)) # depends on [control=['if'], data=['declared_parameter_count', 'function_parameter_count']]
return funcsig |
def _AnsiCmd(command_list):
"""Takes a list of SGR values and formats them as an ANSI escape sequence.
Args:
command_list: List of strings, each string represents an SGR value.
e.g. 'fg_blue', 'bg_yellow'
Returns:
The ANSI escape sequence.
Raises:
ValueError: if a member of command_list does not map to a valid SGR value.
"""
if not isinstance(command_list, list):
raise ValueError('Invalid list: %s' % command_list)
# Checks that entries are valid SGR names.
# No checking is done for sequences that are correct but 'nonsensical'.
for sgr in command_list:
if sgr.lower() not in SGR:
raise ValueError('Invalid or unsupported SGR name: %s' % sgr)
# Convert to numerical strings.
command_str = [str(SGR[x.lower()]) for x in command_list]
# Wrap values in Ansi escape sequence (CSI prefix & SGR suffix).
return '\033[%sm' % (';'.join(command_str)) | def function[_AnsiCmd, parameter[command_list]]:
constant[Takes a list of SGR values and formats them as an ANSI escape sequence.
Args:
command_list: List of strings, each string represents an SGR value.
e.g. 'fg_blue', 'bg_yellow'
Returns:
The ANSI escape sequence.
Raises:
ValueError: if a member of command_list does not map to a valid SGR value.
]
if <ast.UnaryOp object at 0x7da1b17b4d60> begin[:]
<ast.Raise object at 0x7da1b17b5c60>
for taget[name[sgr]] in starred[name[command_list]] begin[:]
if compare[call[name[sgr].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[SGR]] begin[:]
<ast.Raise object at 0x7da1b17b42e0>
variable[command_str] assign[=] <ast.ListComp object at 0x7da20c6a8eb0>
return[binary_operation[constant[[%sm] <ast.Mod object at 0x7da2590d6920> call[constant[;].join, parameter[name[command_str]]]]] | keyword[def] identifier[_AnsiCmd] ( identifier[command_list] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[command_list] , identifier[list] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[command_list] )
keyword[for] identifier[sgr] keyword[in] identifier[command_list] :
keyword[if] identifier[sgr] . identifier[lower] () keyword[not] keyword[in] identifier[SGR] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[sgr] )
identifier[command_str] =[ identifier[str] ( identifier[SGR] [ identifier[x] . identifier[lower] ()]) keyword[for] identifier[x] keyword[in] identifier[command_list] ]
keyword[return] literal[string] %( literal[string] . identifier[join] ( identifier[command_str] )) | def _AnsiCmd(command_list):
"""Takes a list of SGR values and formats them as an ANSI escape sequence.
Args:
command_list: List of strings, each string represents an SGR value.
e.g. 'fg_blue', 'bg_yellow'
Returns:
The ANSI escape sequence.
Raises:
ValueError: if a member of command_list does not map to a valid SGR value.
"""
if not isinstance(command_list, list):
raise ValueError('Invalid list: %s' % command_list) # depends on [control=['if'], data=[]]
# Checks that entries are valid SGR names.
# No checking is done for sequences that are correct but 'nonsensical'.
for sgr in command_list:
if sgr.lower() not in SGR:
raise ValueError('Invalid or unsupported SGR name: %s' % sgr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sgr']]
# Convert to numerical strings.
command_str = [str(SGR[x.lower()]) for x in command_list]
# Wrap values in Ansi escape sequence (CSI prefix & SGR suffix).
return '\x1b[%sm' % ';'.join(command_str) |
def _do_cross_validation(self, clf, data, task):
"""Run voxelwise cross validation based on correlation vectors.
clf: classification function
the classifier to be used in cross validation
data: 3D numpy array
If using sklearn.svm.SVC with precomputed kernel,
it is in shape [num_processed_voxels, num_epochs, num_epochs];
otherwise it is the input argument corr,
in shape [num_processed_voxels, num_epochs, num_voxels]
task: tuple (start_voxel_id, num_processed_voxels)
depicting the voxels assigned to compute
Returns
-------
results: list of tuple (voxel_id, accuracy)
the accuracy numbers of all voxels, in accuracy descending order
the length of array equals the number of assigned voxels
"""
time1 = time.time()
if isinstance(clf, sklearn.svm.SVC) and clf.kernel == 'precomputed'\
and self.use_multiprocessing:
inlist = [(clf, i + task[0], self.num_folds, data[i, :, :],
self.labels) for i in range(task[1])]
with multiprocessing.Pool(self.process_num) as pool:
results = list(pool.starmap(_cross_validation_for_one_voxel,
inlist))
else:
results = []
for i in range(task[1]):
result = _cross_validation_for_one_voxel(clf, i + task[0],
self.num_folds,
data[i, :, :],
self.labels)
results.append(result)
time2 = time.time()
logger.debug(
'cross validation for %d voxels, takes %.2f s' %
(task[1], (time2 - time1))
)
return results | def function[_do_cross_validation, parameter[self, clf, data, task]]:
constant[Run voxelwise cross validation based on correlation vectors.
clf: classification function
the classifier to be used in cross validation
data: 3D numpy array
If using sklearn.svm.SVC with precomputed kernel,
it is in shape [num_processed_voxels, num_epochs, num_epochs];
otherwise it is the input argument corr,
in shape [num_processed_voxels, num_epochs, num_voxels]
task: tuple (start_voxel_id, num_processed_voxels)
depicting the voxels assigned to compute
Returns
-------
results: list of tuple (voxel_id, accuracy)
the accuracy numbers of all voxels, in accuracy descending order
the length of array equals the number of assigned voxels
]
variable[time1] assign[=] call[name[time].time, parameter[]]
if <ast.BoolOp object at 0x7da18fe92440> begin[:]
variable[inlist] assign[=] <ast.ListComp object at 0x7da18fe90f70>
with call[name[multiprocessing].Pool, parameter[name[self].process_num]] begin[:]
variable[results] assign[=] call[name[list], parameter[call[name[pool].starmap, parameter[name[_cross_validation_for_one_voxel], name[inlist]]]]]
variable[time2] assign[=] call[name[time].time, parameter[]]
call[name[logger].debug, parameter[binary_operation[constant[cross validation for %d voxels, takes %.2f s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b07916c0>, <ast.BinOp object at 0x7da1b07912a0>]]]]]
return[name[results]] | keyword[def] identifier[_do_cross_validation] ( identifier[self] , identifier[clf] , identifier[data] , identifier[task] ):
literal[string]
identifier[time1] = identifier[time] . identifier[time] ()
keyword[if] identifier[isinstance] ( identifier[clf] , identifier[sklearn] . identifier[svm] . identifier[SVC] ) keyword[and] identifier[clf] . identifier[kernel] == literal[string] keyword[and] identifier[self] . identifier[use_multiprocessing] :
identifier[inlist] =[( identifier[clf] , identifier[i] + identifier[task] [ literal[int] ], identifier[self] . identifier[num_folds] , identifier[data] [ identifier[i] ,:,:],
identifier[self] . identifier[labels] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[task] [ literal[int] ])]
keyword[with] identifier[multiprocessing] . identifier[Pool] ( identifier[self] . identifier[process_num] ) keyword[as] identifier[pool] :
identifier[results] = identifier[list] ( identifier[pool] . identifier[starmap] ( identifier[_cross_validation_for_one_voxel] ,
identifier[inlist] ))
keyword[else] :
identifier[results] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[task] [ literal[int] ]):
identifier[result] = identifier[_cross_validation_for_one_voxel] ( identifier[clf] , identifier[i] + identifier[task] [ literal[int] ],
identifier[self] . identifier[num_folds] ,
identifier[data] [ identifier[i] ,:,:],
identifier[self] . identifier[labels] )
identifier[results] . identifier[append] ( identifier[result] )
identifier[time2] = identifier[time] . identifier[time] ()
identifier[logger] . identifier[debug] (
literal[string] %
( identifier[task] [ literal[int] ],( identifier[time2] - identifier[time1] ))
)
keyword[return] identifier[results] | def _do_cross_validation(self, clf, data, task):
"""Run voxelwise cross validation based on correlation vectors.
clf: classification function
the classifier to be used in cross validation
data: 3D numpy array
If using sklearn.svm.SVC with precomputed kernel,
it is in shape [num_processed_voxels, num_epochs, num_epochs];
otherwise it is the input argument corr,
in shape [num_processed_voxels, num_epochs, num_voxels]
task: tuple (start_voxel_id, num_processed_voxels)
depicting the voxels assigned to compute
Returns
-------
results: list of tuple (voxel_id, accuracy)
the accuracy numbers of all voxels, in accuracy descending order
the length of array equals the number of assigned voxels
"""
time1 = time.time()
if isinstance(clf, sklearn.svm.SVC) and clf.kernel == 'precomputed' and self.use_multiprocessing:
inlist = [(clf, i + task[0], self.num_folds, data[i, :, :], self.labels) for i in range(task[1])]
with multiprocessing.Pool(self.process_num) as pool:
results = list(pool.starmap(_cross_validation_for_one_voxel, inlist)) # depends on [control=['with'], data=['pool']] # depends on [control=['if'], data=[]]
else:
results = []
for i in range(task[1]):
result = _cross_validation_for_one_voxel(clf, i + task[0], self.num_folds, data[i, :, :], self.labels)
results.append(result) # depends on [control=['for'], data=['i']]
time2 = time.time()
logger.debug('cross validation for %d voxels, takes %.2f s' % (task[1], time2 - time1))
return results |
def extract(self, sampler, feature_extractor, number_of_examples_per_scale = (100, 100), similarity_thresholds = (0.5, 0.8), parallel = None, mirror = False, use_every_nth_negative_scale = 1):
"""Extracts features from **all** images in **all** scales and writes them to file.
This function iterates over all images that are present in the internally stored list, and extracts features using the given ``feature_extractor`` for every image patch that the given ``sampler`` returns.
The final features will be stored in the ``feature_directory`` that is set in the constructor.
For each image, the ``sampler`` samples patch locations, which cover the whole image in different scales.
For each patch locations is tested, how similar they are to the face bounding boxes that belong to that image, using the Jaccard :py:meth:`BoundingBox.similarity`.
The similarity is compared to the ``similarity_thresholds``.
If it is smaller than the first threshold, the patch is considered as background, when it is greater the the second threshold, it is considered as a face, otherwise it is rejected.
Depending on the image resolution and the number of bounding boxes, this will usually result in some positive and thousands of negative patches per image.
To limit the total amount of training data, for all scales, only up to a given number of positive and negative patches are kept.
Also, to further limit the number of negative samples, only every ``use_every_nth_negative_scale`` scale is considered (for the positives, always all scales are processed).
To increase the number (especially of positive) examples, features can also be extracted for horizontally mirrored images.
Simply set the ``mirror`` parameter to ``True``.
Furthermore, this function is designed to be run using several parallel processes, e.g., using the `GridTK <https://pypi.python.org/pypi/gridtk>`_.
Each of the processes will run on a particular subset of the images, which is defined by the ``SGE_TASK_ID`` environment variable.
The ``parallel`` parameter defines the total number of parallel processes that are used.
**Parameters:**
``sampler`` : :py:class:`Sampler`
The sampler to use to sample patches of the images. Please assure that the sampler is set up such that it samples patch locations which can overlap with the face locations.
``feature_extractor`` : :py:class:`FeatureExtractor`
The feature extractor to be used to extract features from image patches
``number_of_examples_per_scale`` : (int, int)
The maximum number of positive and negative examples to extract for each scale of the image
``similarity_thresholds`` : (float, float)
The Jaccard similarity threshold, below which patch locations are considered to be negative, and above which patch locations are considered to be positive examples.
``parallel`` : int or ``None``
If given, the total number of parallel processes, which are used to extract features (the current process index is read from the ``SGE_TASK_ID`` environment variable)
``mirror`` : bool
Extract positive and negative samples also from horizontally mirrored images?
``use_every_nth_negative_scale`` : int
Skip some negative scales to decrease the number of negative examples, i.e., only extract and store negative features, when ``scale_counter % use_every_nth_negative_scale == 0``
.. note::
The ``scale_counter`` is not reset between images, so that we might get features from different scales in subsequent images.
"""
feature_file = self._feature_file(parallel)
bob.io.base.create_directories_safe(self.feature_directory)
if parallel is None or "SGE_TASK_ID" not in os.environ or os.environ["SGE_TASK_ID"] == '1':
extractor_file = os.path.join(self.feature_directory, "Extractor.hdf5")
hdf5 = bob.io.base.HDF5File(extractor_file, "w")
feature_extractor.save(hdf5)
del hdf5
total_positives, total_negatives = 0, 0
indices = parallel_part(range(len(self)), parallel)
if not indices:
logger.warning("The index range for the current parallel thread is empty.")
else:
logger.info("Extracting features for images in range %d - %d of %d", indices[0], indices[-1], len(self))
hdf5 = bob.io.base.HDF5File(feature_file, "w")
for index in indices:
hdf5.create_group("Image-%d" % index)
hdf5.cd("Image-%d" % index)
logger.debug("Processing file %d of %d: %s", index+1, indices[-1]+1, self.image_paths[index])
# load image
image = bob.io.base.load(self.image_paths[index])
if image.ndim == 3:
image = bob.ip.color.rgb_to_gray(image)
# get ground_truth bounding boxes
ground_truth = self.bounding_boxes[index]
# collect image and GT for originally and mirrored image
images = [image] if not mirror else [image, bob.ip.base.flop(image)]
ground_truths = [ground_truth] if not mirror else [ground_truth, [gt.mirror_x(image.shape[1]) for gt in ground_truth]]
parts = "om"
# now, sample
scale_counter = -1
for image, ground_truth, part in zip(images, ground_truths, parts):
for scale, scaled_image_shape in sampler.scales(image):
scale_counter += 1
scaled_gt = [gt.scale(scale) for gt in ground_truth]
positives = []
negatives = []
# iterate over all possible positions in the image
for bb in sampler.sample_scaled(scaled_image_shape):
# check if the patch is a positive example
positive = False
negative = True
for gt in scaled_gt:
similarity = bb.similarity(gt)
if similarity > similarity_thresholds[1]:
positive = True
break
if similarity > similarity_thresholds[0]:
negative = False
break
if positive:
positives.append(bb)
elif negative and scale_counter % use_every_nth_negative_scale == 0:
negatives.append(bb)
# per scale, limit the number of positive and negative samples
positives = [positives[i] for i in quasi_random_indices(len(positives), number_of_examples_per_scale[0])]
negatives = [negatives[i] for i in quasi_random_indices(len(negatives), number_of_examples_per_scale[1])]
# extract features
feature_extractor.prepare(image, scale)
# .. negative features
if negatives:
negative_features = numpy.zeros((len(negatives), feature_extractor.number_of_features), numpy.uint16)
for i, bb in enumerate(negatives):
feature_extractor.extract_all(bb, negative_features, i)
hdf5.set("Negatives-%s-%.5f" % (part,scale), negative_features)
total_negatives += len(negatives)
# positive features
if positives:
positive_features = numpy.zeros((len(positives), feature_extractor.number_of_features), numpy.uint16)
for i, bb in enumerate(positives):
feature_extractor.extract_all(bb, positive_features, i)
hdf5.set("Positives-%s-%.5f" % (part,scale), positive_features)
total_positives += len(positives)
# cd backwards after each image
hdf5.cd("..")
hdf5.set("TotalPositives", total_positives)
hdf5.set("TotalNegatives", total_negatives) | def function[extract, parameter[self, sampler, feature_extractor, number_of_examples_per_scale, similarity_thresholds, parallel, mirror, use_every_nth_negative_scale]]:
constant[Extracts features from **all** images in **all** scales and writes them to file.
This function iterates over all images that are present in the internally stored list, and extracts features using the given ``feature_extractor`` for every image patch that the given ``sampler`` returns.
The final features will be stored in the ``feature_directory`` that is set in the constructor.
For each image, the ``sampler`` samples patch locations, which cover the whole image in different scales.
For each patch locations is tested, how similar they are to the face bounding boxes that belong to that image, using the Jaccard :py:meth:`BoundingBox.similarity`.
The similarity is compared to the ``similarity_thresholds``.
If it is smaller than the first threshold, the patch is considered as background, when it is greater the the second threshold, it is considered as a face, otherwise it is rejected.
Depending on the image resolution and the number of bounding boxes, this will usually result in some positive and thousands of negative patches per image.
To limit the total amount of training data, for all scales, only up to a given number of positive and negative patches are kept.
Also, to further limit the number of negative samples, only every ``use_every_nth_negative_scale`` scale is considered (for the positives, always all scales are processed).
To increase the number (especially of positive) examples, features can also be extracted for horizontally mirrored images.
Simply set the ``mirror`` parameter to ``True``.
Furthermore, this function is designed to be run using several parallel processes, e.g., using the `GridTK <https://pypi.python.org/pypi/gridtk>`_.
Each of the processes will run on a particular subset of the images, which is defined by the ``SGE_TASK_ID`` environment variable.
The ``parallel`` parameter defines the total number of parallel processes that are used.
**Parameters:**
``sampler`` : :py:class:`Sampler`
The sampler to use to sample patches of the images. Please assure that the sampler is set up such that it samples patch locations which can overlap with the face locations.
``feature_extractor`` : :py:class:`FeatureExtractor`
The feature extractor to be used to extract features from image patches
``number_of_examples_per_scale`` : (int, int)
The maximum number of positive and negative examples to extract for each scale of the image
``similarity_thresholds`` : (float, float)
The Jaccard similarity threshold, below which patch locations are considered to be negative, and above which patch locations are considered to be positive examples.
``parallel`` : int or ``None``
If given, the total number of parallel processes, which are used to extract features (the current process index is read from the ``SGE_TASK_ID`` environment variable)
``mirror`` : bool
Extract positive and negative samples also from horizontally mirrored images?
``use_every_nth_negative_scale`` : int
Skip some negative scales to decrease the number of negative examples, i.e., only extract and store negative features, when ``scale_counter % use_every_nth_negative_scale == 0``
.. note::
The ``scale_counter`` is not reset between images, so that we might get features from different scales in subsequent images.
]
variable[feature_file] assign[=] call[name[self]._feature_file, parameter[name[parallel]]]
call[name[bob].io.base.create_directories_safe, parameter[name[self].feature_directory]]
if <ast.BoolOp object at 0x7da207f03dc0> begin[:]
variable[extractor_file] assign[=] call[name[os].path.join, parameter[name[self].feature_directory, constant[Extractor.hdf5]]]
variable[hdf5] assign[=] call[name[bob].io.base.HDF5File, parameter[name[extractor_file], constant[w]]]
call[name[feature_extractor].save, parameter[name[hdf5]]]
<ast.Delete object at 0x7da207f01e70>
<ast.Tuple object at 0x7da207f030d0> assign[=] tuple[[<ast.Constant object at 0x7da207f028c0>, <ast.Constant object at 0x7da207f02ce0>]]
variable[indices] assign[=] call[name[parallel_part], parameter[call[name[range], parameter[call[name[len], parameter[name[self]]]]], name[parallel]]]
if <ast.UnaryOp object at 0x7da207f025f0> begin[:]
call[name[logger].warning, parameter[constant[The index range for the current parallel thread is empty.]]]
variable[hdf5] assign[=] call[name[bob].io.base.HDF5File, parameter[name[feature_file], constant[w]]]
for taget[name[index]] in starred[name[indices]] begin[:]
call[name[hdf5].create_group, parameter[binary_operation[constant[Image-%d] <ast.Mod object at 0x7da2590d6920> name[index]]]]
call[name[hdf5].cd, parameter[binary_operation[constant[Image-%d] <ast.Mod object at 0x7da2590d6920> name[index]]]]
call[name[logger].debug, parameter[constant[Processing file %d of %d: %s], binary_operation[name[index] + constant[1]], binary_operation[call[name[indices]][<ast.UnaryOp object at 0x7da207f01bd0>] + constant[1]], call[name[self].image_paths][name[index]]]]
variable[image] assign[=] call[name[bob].io.base.load, parameter[call[name[self].image_paths][name[index]]]]
if compare[name[image].ndim equal[==] constant[3]] begin[:]
variable[image] assign[=] call[name[bob].ip.color.rgb_to_gray, parameter[name[image]]]
variable[ground_truth] assign[=] call[name[self].bounding_boxes][name[index]]
variable[images] assign[=] <ast.IfExp object at 0x7da207f02f20>
variable[ground_truths] assign[=] <ast.IfExp object at 0x7da207f02590>
variable[parts] assign[=] constant[om]
variable[scale_counter] assign[=] <ast.UnaryOp object at 0x7da20e9b11e0>
for taget[tuple[[<ast.Name object at 0x7da20e9b3730>, <ast.Name object at 0x7da20e9b3be0>, <ast.Name object at 0x7da20e9b2380>]]] in starred[call[name[zip], parameter[name[images], name[ground_truths], name[parts]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20e9b31c0>, <ast.Name object at 0x7da20e9b1420>]]] in starred[call[name[sampler].scales, parameter[name[image]]]] begin[:]
<ast.AugAssign object at 0x7da20e9b3eb0>
variable[scaled_gt] assign[=] <ast.ListComp object at 0x7da20e9b0f10>
variable[positives] assign[=] list[[]]
variable[negatives] assign[=] list[[]]
for taget[name[bb]] in starred[call[name[sampler].sample_scaled, parameter[name[scaled_image_shape]]]] begin[:]
variable[positive] assign[=] constant[False]
variable[negative] assign[=] constant[True]
for taget[name[gt]] in starred[name[scaled_gt]] begin[:]
variable[similarity] assign[=] call[name[bb].similarity, parameter[name[gt]]]
if compare[name[similarity] greater[>] call[name[similarity_thresholds]][constant[1]]] begin[:]
variable[positive] assign[=] constant[True]
break
if compare[name[similarity] greater[>] call[name[similarity_thresholds]][constant[0]]] begin[:]
variable[negative] assign[=] constant[False]
break
if name[positive] begin[:]
call[name[positives].append, parameter[name[bb]]]
variable[positives] assign[=] <ast.ListComp object at 0x7da18f09dcc0>
variable[negatives] assign[=] <ast.ListComp object at 0x7da18f09c2b0>
call[name[feature_extractor].prepare, parameter[name[image], name[scale]]]
if name[negatives] begin[:]
variable[negative_features] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Call object at 0x7da18f09c1f0>, <ast.Attribute object at 0x7da18f09eaa0>]], name[numpy].uint16]]
for taget[tuple[[<ast.Name object at 0x7da18f09e890>, <ast.Name object at 0x7da18f09f130>]]] in starred[call[name[enumerate], parameter[name[negatives]]]] begin[:]
call[name[feature_extractor].extract_all, parameter[name[bb], name[negative_features], name[i]]]
call[name[hdf5].set, parameter[binary_operation[constant[Negatives-%s-%.5f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09c340>, <ast.Name object at 0x7da18f09d570>]]], name[negative_features]]]
<ast.AugAssign object at 0x7da18f09ca30>
if name[positives] begin[:]
variable[positive_features] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Call object at 0x7da18f09f010>, <ast.Attribute object at 0x7da18f09e260>]], name[numpy].uint16]]
for taget[tuple[[<ast.Name object at 0x7da18f09c730>, <ast.Name object at 0x7da18f09ea10>]]] in starred[call[name[enumerate], parameter[name[positives]]]] begin[:]
call[name[feature_extractor].extract_all, parameter[name[bb], name[positive_features], name[i]]]
call[name[hdf5].set, parameter[binary_operation[constant[Positives-%s-%.5f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09ec20>, <ast.Name object at 0x7da18f09f1f0>]]], name[positive_features]]]
<ast.AugAssign object at 0x7da18f09c070>
call[name[hdf5].cd, parameter[constant[..]]]
call[name[hdf5].set, parameter[constant[TotalPositives], name[total_positives]]]
call[name[hdf5].set, parameter[constant[TotalNegatives], name[total_negatives]]] | keyword[def] identifier[extract] ( identifier[self] , identifier[sampler] , identifier[feature_extractor] , identifier[number_of_examples_per_scale] =( literal[int] , literal[int] ), identifier[similarity_thresholds] =( literal[int] , literal[int] ), identifier[parallel] = keyword[None] , identifier[mirror] = keyword[False] , identifier[use_every_nth_negative_scale] = literal[int] ):
literal[string]
identifier[feature_file] = identifier[self] . identifier[_feature_file] ( identifier[parallel] )
identifier[bob] . identifier[io] . identifier[base] . identifier[create_directories_safe] ( identifier[self] . identifier[feature_directory] )
keyword[if] identifier[parallel] keyword[is] keyword[None] keyword[or] literal[string] keyword[not] keyword[in] identifier[os] . identifier[environ] keyword[or] identifier[os] . identifier[environ] [ literal[string] ]== literal[string] :
identifier[extractor_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[feature_directory] , literal[string] )
identifier[hdf5] = identifier[bob] . identifier[io] . identifier[base] . identifier[HDF5File] ( identifier[extractor_file] , literal[string] )
identifier[feature_extractor] . identifier[save] ( identifier[hdf5] )
keyword[del] identifier[hdf5]
identifier[total_positives] , identifier[total_negatives] = literal[int] , literal[int]
identifier[indices] = identifier[parallel_part] ( identifier[range] ( identifier[len] ( identifier[self] )), identifier[parallel] )
keyword[if] keyword[not] identifier[indices] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] , identifier[indices] [ literal[int] ], identifier[indices] [- literal[int] ], identifier[len] ( identifier[self] ))
identifier[hdf5] = identifier[bob] . identifier[io] . identifier[base] . identifier[HDF5File] ( identifier[feature_file] , literal[string] )
keyword[for] identifier[index] keyword[in] identifier[indices] :
identifier[hdf5] . identifier[create_group] ( literal[string] % identifier[index] )
identifier[hdf5] . identifier[cd] ( literal[string] % identifier[index] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[index] + literal[int] , identifier[indices] [- literal[int] ]+ literal[int] , identifier[self] . identifier[image_paths] [ identifier[index] ])
identifier[image] = identifier[bob] . identifier[io] . identifier[base] . identifier[load] ( identifier[self] . identifier[image_paths] [ identifier[index] ])
keyword[if] identifier[image] . identifier[ndim] == literal[int] :
identifier[image] = identifier[bob] . identifier[ip] . identifier[color] . identifier[rgb_to_gray] ( identifier[image] )
identifier[ground_truth] = identifier[self] . identifier[bounding_boxes] [ identifier[index] ]
identifier[images] =[ identifier[image] ] keyword[if] keyword[not] identifier[mirror] keyword[else] [ identifier[image] , identifier[bob] . identifier[ip] . identifier[base] . identifier[flop] ( identifier[image] )]
identifier[ground_truths] =[ identifier[ground_truth] ] keyword[if] keyword[not] identifier[mirror] keyword[else] [ identifier[ground_truth] ,[ identifier[gt] . identifier[mirror_x] ( identifier[image] . identifier[shape] [ literal[int] ]) keyword[for] identifier[gt] keyword[in] identifier[ground_truth] ]]
identifier[parts] = literal[string]
identifier[scale_counter] =- literal[int]
keyword[for] identifier[image] , identifier[ground_truth] , identifier[part] keyword[in] identifier[zip] ( identifier[images] , identifier[ground_truths] , identifier[parts] ):
keyword[for] identifier[scale] , identifier[scaled_image_shape] keyword[in] identifier[sampler] . identifier[scales] ( identifier[image] ):
identifier[scale_counter] += literal[int]
identifier[scaled_gt] =[ identifier[gt] . identifier[scale] ( identifier[scale] ) keyword[for] identifier[gt] keyword[in] identifier[ground_truth] ]
identifier[positives] =[]
identifier[negatives] =[]
keyword[for] identifier[bb] keyword[in] identifier[sampler] . identifier[sample_scaled] ( identifier[scaled_image_shape] ):
identifier[positive] = keyword[False]
identifier[negative] = keyword[True]
keyword[for] identifier[gt] keyword[in] identifier[scaled_gt] :
identifier[similarity] = identifier[bb] . identifier[similarity] ( identifier[gt] )
keyword[if] identifier[similarity] > identifier[similarity_thresholds] [ literal[int] ]:
identifier[positive] = keyword[True]
keyword[break]
keyword[if] identifier[similarity] > identifier[similarity_thresholds] [ literal[int] ]:
identifier[negative] = keyword[False]
keyword[break]
keyword[if] identifier[positive] :
identifier[positives] . identifier[append] ( identifier[bb] )
keyword[elif] identifier[negative] keyword[and] identifier[scale_counter] % identifier[use_every_nth_negative_scale] == literal[int] :
identifier[negatives] . identifier[append] ( identifier[bb] )
identifier[positives] =[ identifier[positives] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[quasi_random_indices] ( identifier[len] ( identifier[positives] ), identifier[number_of_examples_per_scale] [ literal[int] ])]
identifier[negatives] =[ identifier[negatives] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[quasi_random_indices] ( identifier[len] ( identifier[negatives] ), identifier[number_of_examples_per_scale] [ literal[int] ])]
identifier[feature_extractor] . identifier[prepare] ( identifier[image] , identifier[scale] )
keyword[if] identifier[negatives] :
identifier[negative_features] = identifier[numpy] . identifier[zeros] (( identifier[len] ( identifier[negatives] ), identifier[feature_extractor] . identifier[number_of_features] ), identifier[numpy] . identifier[uint16] )
keyword[for] identifier[i] , identifier[bb] keyword[in] identifier[enumerate] ( identifier[negatives] ):
identifier[feature_extractor] . identifier[extract_all] ( identifier[bb] , identifier[negative_features] , identifier[i] )
identifier[hdf5] . identifier[set] ( literal[string] %( identifier[part] , identifier[scale] ), identifier[negative_features] )
identifier[total_negatives] += identifier[len] ( identifier[negatives] )
keyword[if] identifier[positives] :
identifier[positive_features] = identifier[numpy] . identifier[zeros] (( identifier[len] ( identifier[positives] ), identifier[feature_extractor] . identifier[number_of_features] ), identifier[numpy] . identifier[uint16] )
keyword[for] identifier[i] , identifier[bb] keyword[in] identifier[enumerate] ( identifier[positives] ):
identifier[feature_extractor] . identifier[extract_all] ( identifier[bb] , identifier[positive_features] , identifier[i] )
identifier[hdf5] . identifier[set] ( literal[string] %( identifier[part] , identifier[scale] ), identifier[positive_features] )
identifier[total_positives] += identifier[len] ( identifier[positives] )
identifier[hdf5] . identifier[cd] ( literal[string] )
identifier[hdf5] . identifier[set] ( literal[string] , identifier[total_positives] )
identifier[hdf5] . identifier[set] ( literal[string] , identifier[total_negatives] ) | def extract(self, sampler, feature_extractor, number_of_examples_per_scale=(100, 100), similarity_thresholds=(0.5, 0.8), parallel=None, mirror=False, use_every_nth_negative_scale=1):
"""Extracts features from **all** images in **all** scales and writes them to file.
This function iterates over all images that are present in the internally stored list, and extracts features using the given ``feature_extractor`` for every image patch that the given ``sampler`` returns.
The final features will be stored in the ``feature_directory`` that is set in the constructor.
For each image, the ``sampler`` samples patch locations, which cover the whole image in different scales.
For each patch locations is tested, how similar they are to the face bounding boxes that belong to that image, using the Jaccard :py:meth:`BoundingBox.similarity`.
The similarity is compared to the ``similarity_thresholds``.
If it is smaller than the first threshold, the patch is considered as background, when it is greater the the second threshold, it is considered as a face, otherwise it is rejected.
Depending on the image resolution and the number of bounding boxes, this will usually result in some positive and thousands of negative patches per image.
To limit the total amount of training data, for all scales, only up to a given number of positive and negative patches are kept.
Also, to further limit the number of negative samples, only every ``use_every_nth_negative_scale`` scale is considered (for the positives, always all scales are processed).
To increase the number (especially of positive) examples, features can also be extracted for horizontally mirrored images.
Simply set the ``mirror`` parameter to ``True``.
Furthermore, this function is designed to be run using several parallel processes, e.g., using the `GridTK <https://pypi.python.org/pypi/gridtk>`_.
Each of the processes will run on a particular subset of the images, which is defined by the ``SGE_TASK_ID`` environment variable.
The ``parallel`` parameter defines the total number of parallel processes that are used.
**Parameters:**
``sampler`` : :py:class:`Sampler`
The sampler to use to sample patches of the images. Please assure that the sampler is set up such that it samples patch locations which can overlap with the face locations.
``feature_extractor`` : :py:class:`FeatureExtractor`
The feature extractor to be used to extract features from image patches
``number_of_examples_per_scale`` : (int, int)
The maximum number of positive and negative examples to extract for each scale of the image
``similarity_thresholds`` : (float, float)
The Jaccard similarity threshold, below which patch locations are considered to be negative, and above which patch locations are considered to be positive examples.
``parallel`` : int or ``None``
If given, the total number of parallel processes, which are used to extract features (the current process index is read from the ``SGE_TASK_ID`` environment variable)
``mirror`` : bool
Extract positive and negative samples also from horizontally mirrored images?
``use_every_nth_negative_scale`` : int
Skip some negative scales to decrease the number of negative examples, i.e., only extract and store negative features, when ``scale_counter % use_every_nth_negative_scale == 0``
.. note::
The ``scale_counter`` is not reset between images, so that we might get features from different scales in subsequent images.
"""
feature_file = self._feature_file(parallel)
bob.io.base.create_directories_safe(self.feature_directory)
if parallel is None or 'SGE_TASK_ID' not in os.environ or os.environ['SGE_TASK_ID'] == '1':
extractor_file = os.path.join(self.feature_directory, 'Extractor.hdf5')
hdf5 = bob.io.base.HDF5File(extractor_file, 'w')
feature_extractor.save(hdf5)
del hdf5 # depends on [control=['if'], data=[]]
(total_positives, total_negatives) = (0, 0)
indices = parallel_part(range(len(self)), parallel)
if not indices:
logger.warning('The index range for the current parallel thread is empty.') # depends on [control=['if'], data=[]]
else:
logger.info('Extracting features for images in range %d - %d of %d', indices[0], indices[-1], len(self))
hdf5 = bob.io.base.HDF5File(feature_file, 'w')
for index in indices:
hdf5.create_group('Image-%d' % index)
hdf5.cd('Image-%d' % index)
logger.debug('Processing file %d of %d: %s', index + 1, indices[-1] + 1, self.image_paths[index])
# load image
image = bob.io.base.load(self.image_paths[index])
if image.ndim == 3:
image = bob.ip.color.rgb_to_gray(image) # depends on [control=['if'], data=[]]
# get ground_truth bounding boxes
ground_truth = self.bounding_boxes[index]
# collect image and GT for originally and mirrored image
images = [image] if not mirror else [image, bob.ip.base.flop(image)]
ground_truths = [ground_truth] if not mirror else [ground_truth, [gt.mirror_x(image.shape[1]) for gt in ground_truth]]
parts = 'om'
# now, sample
scale_counter = -1
for (image, ground_truth, part) in zip(images, ground_truths, parts):
for (scale, scaled_image_shape) in sampler.scales(image):
scale_counter += 1
scaled_gt = [gt.scale(scale) for gt in ground_truth]
positives = []
negatives = []
# iterate over all possible positions in the image
for bb in sampler.sample_scaled(scaled_image_shape):
# check if the patch is a positive example
positive = False
negative = True
for gt in scaled_gt:
similarity = bb.similarity(gt)
if similarity > similarity_thresholds[1]:
positive = True
break # depends on [control=['if'], data=[]]
if similarity > similarity_thresholds[0]:
negative = False
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['gt']]
if positive:
positives.append(bb) # depends on [control=['if'], data=[]]
elif negative and scale_counter % use_every_nth_negative_scale == 0:
negatives.append(bb) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bb']]
# per scale, limit the number of positive and negative samples
positives = [positives[i] for i in quasi_random_indices(len(positives), number_of_examples_per_scale[0])]
negatives = [negatives[i] for i in quasi_random_indices(len(negatives), number_of_examples_per_scale[1])]
# extract features
feature_extractor.prepare(image, scale)
# .. negative features
if negatives:
negative_features = numpy.zeros((len(negatives), feature_extractor.number_of_features), numpy.uint16)
for (i, bb) in enumerate(negatives):
feature_extractor.extract_all(bb, negative_features, i) # depends on [control=['for'], data=[]]
hdf5.set('Negatives-%s-%.5f' % (part, scale), negative_features)
total_negatives += len(negatives) # depends on [control=['if'], data=[]]
# positive features
if positives:
positive_features = numpy.zeros((len(positives), feature_extractor.number_of_features), numpy.uint16)
for (i, bb) in enumerate(positives):
feature_extractor.extract_all(bb, positive_features, i) # depends on [control=['for'], data=[]]
hdf5.set('Positives-%s-%.5f' % (part, scale), positive_features)
total_positives += len(positives) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
# cd backwards after each image
hdf5.cd('..') # depends on [control=['for'], data=['index']]
hdf5.set('TotalPositives', total_positives)
hdf5.set('TotalNegatives', total_negatives) |
def getEthernetStatistic(self, lanInterfaceId=1, timeout=1):
"""Execute GetStatistics action to get statistics of the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: statisticss of the Ethernet interface.
:rtype: EthernetStatistic
"""
namespace = Lan.getServiceType("getEthernetStatistic") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetStatistics", timeout=timeout)
return EthernetStatistic(results) | def function[getEthernetStatistic, parameter[self, lanInterfaceId, timeout]]:
constant[Execute GetStatistics action to get statistics of the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: statisticss of the Ethernet interface.
:rtype: EthernetStatistic
]
variable[namespace] assign[=] binary_operation[call[name[Lan].getServiceType, parameter[constant[getEthernetStatistic]]] + call[name[str], parameter[name[lanInterfaceId]]]]
variable[uri] assign[=] call[name[self].getControlURL, parameter[name[namespace]]]
variable[results] assign[=] call[name[self].execute, parameter[name[uri], name[namespace], constant[GetStatistics]]]
return[call[name[EthernetStatistic], parameter[name[results]]]] | keyword[def] identifier[getEthernetStatistic] ( identifier[self] , identifier[lanInterfaceId] = literal[int] , identifier[timeout] = literal[int] ):
literal[string]
identifier[namespace] = identifier[Lan] . identifier[getServiceType] ( literal[string] )+ identifier[str] ( identifier[lanInterfaceId] )
identifier[uri] = identifier[self] . identifier[getControlURL] ( identifier[namespace] )
identifier[results] = identifier[self] . identifier[execute] ( identifier[uri] , identifier[namespace] , literal[string] , identifier[timeout] = identifier[timeout] )
keyword[return] identifier[EthernetStatistic] ( identifier[results] ) | def getEthernetStatistic(self, lanInterfaceId=1, timeout=1):
"""Execute GetStatistics action to get statistics of the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: statisticss of the Ethernet interface.
:rtype: EthernetStatistic
"""
namespace = Lan.getServiceType('getEthernetStatistic') + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, 'GetStatistics', timeout=timeout)
return EthernetStatistic(results) |
def create_lb(kwargs=None, call=None):
'''
Create a load-balancer configuration.
CLI Example:
.. code-block:: bash
salt-cloud -f create_lb gce name=lb region=us-central1 ports=80
'''
if call != 'function':
raise SaltCloudSystemExit(
'The create_lb function must be called with -f or --function.'
)
if not kwargs or 'name' not in kwargs:
log.error(
'A name must be specified when creating a health check.'
)
return False
if 'ports' not in kwargs:
log.error(
'A port or port-range must be specified for the load-balancer.'
)
return False
if 'region' not in kwargs:
log.error(
'A region must be specified for the load-balancer.'
)
return False
if 'members' not in kwargs:
log.error(
'A comma-separated list of members must be specified.'
)
return False
name = kwargs['name']
ports = kwargs['ports']
ex_region = kwargs['region']
members = kwargs.get('members').split(',')
protocol = kwargs.get('protocol', 'tcp')
algorithm = kwargs.get('algorithm', None)
ex_healthchecks = kwargs.get('healthchecks', None)
# pylint: disable=W0511
conn = get_conn()
lb_conn = get_lb_conn(conn)
ex_address = kwargs.get('address', None)
if ex_address is not None:
ex_address = __create_orget_address(conn, ex_address, ex_region)
if ex_healthchecks:
ex_healthchecks = ex_healthchecks.split(',')
__utils__['cloud.fire_event'](
'event',
'create load_balancer',
'salt/cloud/loadbalancer/creating',
args=kwargs,
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
lb = lb_conn.create_balancer(
name, ports, protocol, algorithm, members,
ex_region=ex_region, ex_healthchecks=ex_healthchecks,
ex_address=ex_address
)
__utils__['cloud.fire_event'](
'event',
'created load_balancer',
'salt/cloud/loadbalancer/created',
args=kwargs,
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
return _expand_balancer(lb) | def function[create_lb, parameter[kwargs, call]]:
constant[
Create a load-balancer configuration.
CLI Example:
.. code-block:: bash
salt-cloud -f create_lb gce name=lb region=us-central1 ports=80
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da207f022c0>
if <ast.BoolOp object at 0x7da207f008e0> begin[:]
call[name[log].error, parameter[constant[A name must be specified when creating a health check.]]]
return[constant[False]]
if compare[constant[ports] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[log].error, parameter[constant[A port or port-range must be specified for the load-balancer.]]]
return[constant[False]]
if compare[constant[region] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[log].error, parameter[constant[A region must be specified for the load-balancer.]]]
return[constant[False]]
if compare[constant[members] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[log].error, parameter[constant[A comma-separated list of members must be specified.]]]
return[constant[False]]
variable[name] assign[=] call[name[kwargs]][constant[name]]
variable[ports] assign[=] call[name[kwargs]][constant[ports]]
variable[ex_region] assign[=] call[name[kwargs]][constant[region]]
variable[members] assign[=] call[call[name[kwargs].get, parameter[constant[members]]].split, parameter[constant[,]]]
variable[protocol] assign[=] call[name[kwargs].get, parameter[constant[protocol], constant[tcp]]]
variable[algorithm] assign[=] call[name[kwargs].get, parameter[constant[algorithm], constant[None]]]
variable[ex_healthchecks] assign[=] call[name[kwargs].get, parameter[constant[healthchecks], constant[None]]]
variable[conn] assign[=] call[name[get_conn], parameter[]]
variable[lb_conn] assign[=] call[name[get_lb_conn], parameter[name[conn]]]
variable[ex_address] assign[=] call[name[kwargs].get, parameter[constant[address], constant[None]]]
if compare[name[ex_address] is_not constant[None]] begin[:]
variable[ex_address] assign[=] call[name[__create_orget_address], parameter[name[conn], name[ex_address], name[ex_region]]]
if name[ex_healthchecks] begin[:]
variable[ex_healthchecks] assign[=] call[name[ex_healthchecks].split, parameter[constant[,]]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[create load_balancer], constant[salt/cloud/loadbalancer/creating]]]
variable[lb] assign[=] call[name[lb_conn].create_balancer, parameter[name[name], name[ports], name[protocol], name[algorithm], name[members]]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[created load_balancer], constant[salt/cloud/loadbalancer/created]]]
return[call[name[_expand_balancer], parameter[name[lb]]]] | keyword[def] identifier[create_lb] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] keyword[not] identifier[kwargs] keyword[or] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
identifier[name] = identifier[kwargs] [ literal[string] ]
identifier[ports] = identifier[kwargs] [ literal[string] ]
identifier[ex_region] = identifier[kwargs] [ literal[string] ]
identifier[members] = identifier[kwargs] . identifier[get] ( literal[string] ). identifier[split] ( literal[string] )
identifier[protocol] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[algorithm] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[ex_healthchecks] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[conn] = identifier[get_conn] ()
identifier[lb_conn] = identifier[get_lb_conn] ( identifier[conn] )
identifier[ex_address] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[ex_address] keyword[is] keyword[not] keyword[None] :
identifier[ex_address] = identifier[__create_orget_address] ( identifier[conn] , identifier[ex_address] , identifier[ex_region] )
keyword[if] identifier[ex_healthchecks] :
identifier[ex_healthchecks] = identifier[ex_healthchecks] . identifier[split] ( literal[string] )
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] ,
identifier[args] = identifier[kwargs] ,
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
identifier[lb] = identifier[lb_conn] . identifier[create_balancer] (
identifier[name] , identifier[ports] , identifier[protocol] , identifier[algorithm] , identifier[members] ,
identifier[ex_region] = identifier[ex_region] , identifier[ex_healthchecks] = identifier[ex_healthchecks] ,
identifier[ex_address] = identifier[ex_address]
)
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] ,
identifier[args] = identifier[kwargs] ,
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
keyword[return] identifier[_expand_balancer] ( identifier[lb] ) | def create_lb(kwargs=None, call=None):
"""
Create a load-balancer configuration.
CLI Example:
.. code-block:: bash
salt-cloud -f create_lb gce name=lb region=us-central1 ports=80
"""
if call != 'function':
raise SaltCloudSystemExit('The create_lb function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if not kwargs or 'name' not in kwargs:
log.error('A name must be specified when creating a health check.')
return False # depends on [control=['if'], data=[]]
if 'ports' not in kwargs:
log.error('A port or port-range must be specified for the load-balancer.')
return False # depends on [control=['if'], data=[]]
if 'region' not in kwargs:
log.error('A region must be specified for the load-balancer.')
return False # depends on [control=['if'], data=[]]
if 'members' not in kwargs:
log.error('A comma-separated list of members must be specified.')
return False # depends on [control=['if'], data=[]]
name = kwargs['name']
ports = kwargs['ports']
ex_region = kwargs['region']
members = kwargs.get('members').split(',')
protocol = kwargs.get('protocol', 'tcp')
algorithm = kwargs.get('algorithm', None)
ex_healthchecks = kwargs.get('healthchecks', None)
# pylint: disable=W0511
conn = get_conn()
lb_conn = get_lb_conn(conn)
ex_address = kwargs.get('address', None)
if ex_address is not None:
ex_address = __create_orget_address(conn, ex_address, ex_region) # depends on [control=['if'], data=['ex_address']]
if ex_healthchecks:
ex_healthchecks = ex_healthchecks.split(',') # depends on [control=['if'], data=[]]
__utils__['cloud.fire_event']('event', 'create load_balancer', 'salt/cloud/loadbalancer/creating', args=kwargs, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
lb = lb_conn.create_balancer(name, ports, protocol, algorithm, members, ex_region=ex_region, ex_healthchecks=ex_healthchecks, ex_address=ex_address)
__utils__['cloud.fire_event']('event', 'created load_balancer', 'salt/cloud/loadbalancer/created', args=kwargs, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
return _expand_balancer(lb) |
def _subprocess(cmd):
'''
Function to standardize the subprocess call
'''
log.debug('Running: "%s"', ' '.join(cmd))
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
ret = salt.utils.stringutils.to_unicode(proc.communicate()[0]).strip()
retcode = proc.wait()
if ret:
return ret
elif retcode != 1:
return True
else:
return False
except OSError as err:
log.error(err)
return False | def function[_subprocess, parameter[cmd]]:
constant[
Function to standardize the subprocess call
]
call[name[log].debug, parameter[constant[Running: "%s"], call[constant[ ].join, parameter[name[cmd]]]]]
<ast.Try object at 0x7da1b208aa70> | keyword[def] identifier[_subprocess] ( identifier[cmd] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[cmd] ))
keyword[try] :
identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
identifier[ret] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[proc] . identifier[communicate] ()[ literal[int] ]). identifier[strip] ()
identifier[retcode] = identifier[proc] . identifier[wait] ()
keyword[if] identifier[ret] :
keyword[return] identifier[ret]
keyword[elif] identifier[retcode] != literal[int] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
keyword[except] identifier[OSError] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( identifier[err] )
keyword[return] keyword[False] | def _subprocess(cmd):
"""
Function to standardize the subprocess call
"""
log.debug('Running: "%s"', ' '.join(cmd))
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
ret = salt.utils.stringutils.to_unicode(proc.communicate()[0]).strip()
retcode = proc.wait()
if ret:
return ret # depends on [control=['if'], data=[]]
elif retcode != 1:
return True # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['try'], data=[]]
except OSError as err:
log.error(err)
return False # depends on [control=['except'], data=['err']] |
def _get_to_many_relationship_value(self, obj, column):
"""
Get the resulting datas for a One To many or a many to many relationship
:param obj obj: The instance we manage
:param dict column: The column description dictionnary
:returns: The associated value
"""
related_key = column.get('related_key', None)
related = getattr(obj, column['__col__'].key)
value = {}
if related:
total = len(related)
for index, rel_obj in enumerate(related):
if related_key:
compiled_res = self._get_formatted_val(
rel_obj, related_key, column
)
else:
compiled_res = column['__prop__'].compile_obj(
rel_obj
)
value['item_%d' % index] = compiled_res
value[str(index)] = compiled_res
value["_" + str(index)] = compiled_res
if index == 0:
value['first'] = compiled_res
if index == total - 1:
value['last'] = compiled_res
return value | def function[_get_to_many_relationship_value, parameter[self, obj, column]]:
constant[
Get the resulting datas for a One To many or a many to many relationship
:param obj obj: The instance we manage
:param dict column: The column description dictionnary
:returns: The associated value
]
variable[related_key] assign[=] call[name[column].get, parameter[constant[related_key], constant[None]]]
variable[related] assign[=] call[name[getattr], parameter[name[obj], call[name[column]][constant[__col__]].key]]
variable[value] assign[=] dictionary[[], []]
if name[related] begin[:]
variable[total] assign[=] call[name[len], parameter[name[related]]]
for taget[tuple[[<ast.Name object at 0x7da18f812a10>, <ast.Name object at 0x7da18f8122f0>]]] in starred[call[name[enumerate], parameter[name[related]]]] begin[:]
if name[related_key] begin[:]
variable[compiled_res] assign[=] call[name[self]._get_formatted_val, parameter[name[rel_obj], name[related_key], name[column]]]
call[name[value]][binary_operation[constant[item_%d] <ast.Mod object at 0x7da2590d6920> name[index]]] assign[=] name[compiled_res]
call[name[value]][call[name[str], parameter[name[index]]]] assign[=] name[compiled_res]
call[name[value]][binary_operation[constant[_] + call[name[str], parameter[name[index]]]]] assign[=] name[compiled_res]
if compare[name[index] equal[==] constant[0]] begin[:]
call[name[value]][constant[first]] assign[=] name[compiled_res]
if compare[name[index] equal[==] binary_operation[name[total] - constant[1]]] begin[:]
call[name[value]][constant[last]] assign[=] name[compiled_res]
return[name[value]] | keyword[def] identifier[_get_to_many_relationship_value] ( identifier[self] , identifier[obj] , identifier[column] ):
literal[string]
identifier[related_key] = identifier[column] . identifier[get] ( literal[string] , keyword[None] )
identifier[related] = identifier[getattr] ( identifier[obj] , identifier[column] [ literal[string] ]. identifier[key] )
identifier[value] ={}
keyword[if] identifier[related] :
identifier[total] = identifier[len] ( identifier[related] )
keyword[for] identifier[index] , identifier[rel_obj] keyword[in] identifier[enumerate] ( identifier[related] ):
keyword[if] identifier[related_key] :
identifier[compiled_res] = identifier[self] . identifier[_get_formatted_val] (
identifier[rel_obj] , identifier[related_key] , identifier[column]
)
keyword[else] :
identifier[compiled_res] = identifier[column] [ literal[string] ]. identifier[compile_obj] (
identifier[rel_obj]
)
identifier[value] [ literal[string] % identifier[index] ]= identifier[compiled_res]
identifier[value] [ identifier[str] ( identifier[index] )]= identifier[compiled_res]
identifier[value] [ literal[string] + identifier[str] ( identifier[index] )]= identifier[compiled_res]
keyword[if] identifier[index] == literal[int] :
identifier[value] [ literal[string] ]= identifier[compiled_res]
keyword[if] identifier[index] == identifier[total] - literal[int] :
identifier[value] [ literal[string] ]= identifier[compiled_res]
keyword[return] identifier[value] | def _get_to_many_relationship_value(self, obj, column):
"""
Get the resulting datas for a One To many or a many to many relationship
:param obj obj: The instance we manage
:param dict column: The column description dictionnary
:returns: The associated value
"""
related_key = column.get('related_key', None)
related = getattr(obj, column['__col__'].key)
value = {}
if related:
total = len(related)
for (index, rel_obj) in enumerate(related):
if related_key:
compiled_res = self._get_formatted_val(rel_obj, related_key, column) # depends on [control=['if'], data=[]]
else:
compiled_res = column['__prop__'].compile_obj(rel_obj)
value['item_%d' % index] = compiled_res
value[str(index)] = compiled_res
value['_' + str(index)] = compiled_res
if index == 0:
value['first'] = compiled_res # depends on [control=['if'], data=[]]
if index == total - 1:
value['last'] = compiled_res # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return value |
def offset(self, offset, unit = None):
"""Increment (or decrement) the given period with offset units.
>>> period('day', 2014).offset(1)
Period(('day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'day')
Period(('day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'month')
Period(('day', Instant((2014, 2, 1)), 365))
>>> period('day', 2014).offset(1, 'year')
Period(('day', Instant((2015, 1, 1)), 365))
>>> period('month', 2014).offset(1)
Period(('month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'day')
Period(('month', Instant((2014, 1, 2)), 12))
>>> period('month', 2014).offset(1, 'month')
Period(('month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'year')
Period(('month', Instant((2015, 1, 1)), 12))
>>> period('year', 2014).offset(1)
Period(('year', Instant((2015, 1, 1)), 1))
>>> period('year', 2014).offset(1, 'day')
Period(('year', Instant((2014, 1, 2)), 1))
>>> period('year', 2014).offset(1, 'month')
Period(('year', Instant((2014, 2, 1)), 1))
>>> period('year', 2014).offset(1, 'year')
Period(('year', Instant((2015, 1, 1)), 1))
>>> period('day', '2011-2-28').offset(1)
Period(('day', Instant((2011, 3, 1)), 1))
>>> period('month', '2011-2-28').offset(1)
Period(('month', Instant((2011, 3, 28)), 1))
>>> period('year', '2011-2-28').offset(1)
Period(('year', Instant((2012, 2, 28)), 1))
>>> period('day', '2011-3-1').offset(-1)
Period(('day', Instant((2011, 2, 28)), 1))
>>> period('month', '2011-3-1').offset(-1)
Period(('month', Instant((2011, 2, 1)), 1))
>>> period('year', '2011-3-1').offset(-1)
Period(('year', Instant((2010, 3, 1)), 1))
>>> period('day', '2014-1-30').offset(3)
Period(('day', Instant((2014, 2, 2)), 1))
>>> period('month', '2014-1-30').offset(3)
Period(('month', Instant((2014, 4, 30)), 1))
>>> period('year', '2014-1-30').offset(3)
Period(('year', Instant((2017, 1, 30)), 1))
>>> period('day', 2014).offset(-3)
Period(('day', Instant((2013, 12, 29)), 365))
>>> period('month', 2014).offset(-3)
Period(('month', Instant((2013, 10, 1)), 12))
>>> period('year', 2014).offset(-3)
Period(('year', Instant((2011, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'month')
Period(('day', Instant((2014, 2, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'year')
Period(('day', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3', 4).offset('first-of', 'month')
Period(('day', Instant((2014, 2, 1)), 4))
>>> period('day', '2014-2-3', 4).offset('first-of', 'year')
Period(('day', Instant((2014, 1, 1)), 4))
>>> period('month', '2014-2-3').offset('first-of')
Period(('month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'month')
Period(('month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'year')
Period(('month', Instant((2014, 1, 1)), 1))
>>> period('month', '2014-2-3', 4).offset('first-of')
Period(('month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'month')
Period(('month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'year')
Period(('month', Instant((2014, 1, 1)), 4))
>>> period('year', 2014).offset('first-of')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'month')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'year')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'month')
Period(('year', Instant((2014, 2, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'year')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'month')
Period(('day', Instant((2014, 2, 28)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'year')
Period(('day', Instant((2014, 12, 31)), 1))
>>> period('day', '2014-2-3', 4).offset('last-of', 'month')
Period(('day', Instant((2014, 2, 28)), 4))
>>> period('day', '2014-2-3', 4).offset('last-of', 'year')
Period(('day', Instant((2014, 12, 31)), 4))
>>> period('month', '2014-2-3').offset('last-of')
Period(('month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'month')
Period(('month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'year')
Period(('month', Instant((2014, 12, 31)), 1))
>>> period('month', '2014-2-3', 4).offset('last-of')
Period(('month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'month')
Period(('month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'year')
Period(('month', Instant((2014, 12, 31)), 4))
>>> period('year', 2014).offset('last-of')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', 2014).offset('last-of', 'month')
Period(('year', Instant((2014, 1, 31)), 1))
>>> period('year', 2014).offset('last-of', 'year')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'month')
Period(('year', Instant((2014, 2, 28)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'year')
Period(('year', Instant((2014, 12, 31)), 1))
"""
return self.__class__((self[0], self[1].offset(offset, self[0] if unit is None else unit), self[2])) | def function[offset, parameter[self, offset, unit]]:
constant[Increment (or decrement) the given period with offset units.
>>> period('day', 2014).offset(1)
Period(('day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'day')
Period(('day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'month')
Period(('day', Instant((2014, 2, 1)), 365))
>>> period('day', 2014).offset(1, 'year')
Period(('day', Instant((2015, 1, 1)), 365))
>>> period('month', 2014).offset(1)
Period(('month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'day')
Period(('month', Instant((2014, 1, 2)), 12))
>>> period('month', 2014).offset(1, 'month')
Period(('month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'year')
Period(('month', Instant((2015, 1, 1)), 12))
>>> period('year', 2014).offset(1)
Period(('year', Instant((2015, 1, 1)), 1))
>>> period('year', 2014).offset(1, 'day')
Period(('year', Instant((2014, 1, 2)), 1))
>>> period('year', 2014).offset(1, 'month')
Period(('year', Instant((2014, 2, 1)), 1))
>>> period('year', 2014).offset(1, 'year')
Period(('year', Instant((2015, 1, 1)), 1))
>>> period('day', '2011-2-28').offset(1)
Period(('day', Instant((2011, 3, 1)), 1))
>>> period('month', '2011-2-28').offset(1)
Period(('month', Instant((2011, 3, 28)), 1))
>>> period('year', '2011-2-28').offset(1)
Period(('year', Instant((2012, 2, 28)), 1))
>>> period('day', '2011-3-1').offset(-1)
Period(('day', Instant((2011, 2, 28)), 1))
>>> period('month', '2011-3-1').offset(-1)
Period(('month', Instant((2011, 2, 1)), 1))
>>> period('year', '2011-3-1').offset(-1)
Period(('year', Instant((2010, 3, 1)), 1))
>>> period('day', '2014-1-30').offset(3)
Period(('day', Instant((2014, 2, 2)), 1))
>>> period('month', '2014-1-30').offset(3)
Period(('month', Instant((2014, 4, 30)), 1))
>>> period('year', '2014-1-30').offset(3)
Period(('year', Instant((2017, 1, 30)), 1))
>>> period('day', 2014).offset(-3)
Period(('day', Instant((2013, 12, 29)), 365))
>>> period('month', 2014).offset(-3)
Period(('month', Instant((2013, 10, 1)), 12))
>>> period('year', 2014).offset(-3)
Period(('year', Instant((2011, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'month')
Period(('day', Instant((2014, 2, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'year')
Period(('day', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3', 4).offset('first-of', 'month')
Period(('day', Instant((2014, 2, 1)), 4))
>>> period('day', '2014-2-3', 4).offset('first-of', 'year')
Period(('day', Instant((2014, 1, 1)), 4))
>>> period('month', '2014-2-3').offset('first-of')
Period(('month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'month')
Period(('month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'year')
Period(('month', Instant((2014, 1, 1)), 1))
>>> period('month', '2014-2-3', 4).offset('first-of')
Period(('month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'month')
Period(('month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'year')
Period(('month', Instant((2014, 1, 1)), 4))
>>> period('year', 2014).offset('first-of')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'month')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'year')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'month')
Period(('year', Instant((2014, 2, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'year')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'month')
Period(('day', Instant((2014, 2, 28)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'year')
Period(('day', Instant((2014, 12, 31)), 1))
>>> period('day', '2014-2-3', 4).offset('last-of', 'month')
Period(('day', Instant((2014, 2, 28)), 4))
>>> period('day', '2014-2-3', 4).offset('last-of', 'year')
Period(('day', Instant((2014, 12, 31)), 4))
>>> period('month', '2014-2-3').offset('last-of')
Period(('month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'month')
Period(('month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'year')
Period(('month', Instant((2014, 12, 31)), 1))
>>> period('month', '2014-2-3', 4).offset('last-of')
Period(('month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'month')
Period(('month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'year')
Period(('month', Instant((2014, 12, 31)), 4))
>>> period('year', 2014).offset('last-of')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', 2014).offset('last-of', 'month')
Period(('year', Instant((2014, 1, 31)), 1))
>>> period('year', 2014).offset('last-of', 'year')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'month')
Period(('year', Instant((2014, 2, 28)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'year')
Period(('year', Instant((2014, 12, 31)), 1))
]
return[call[name[self].__class__, parameter[tuple[[<ast.Subscript object at 0x7da20c6e5030>, <ast.Call object at 0x7da20c6e6f80>, <ast.Subscript object at 0x7da20c7cb130>]]]]] | keyword[def] identifier[offset] ( identifier[self] , identifier[offset] , identifier[unit] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] (( identifier[self] [ literal[int] ], identifier[self] [ literal[int] ]. identifier[offset] ( identifier[offset] , identifier[self] [ literal[int] ] keyword[if] identifier[unit] keyword[is] keyword[None] keyword[else] identifier[unit] ), identifier[self] [ literal[int] ])) | def offset(self, offset, unit=None):
"""Increment (or decrement) the given period with offset units.
>>> period('day', 2014).offset(1)
Period(('day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'day')
Period(('day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'month')
Period(('day', Instant((2014, 2, 1)), 365))
>>> period('day', 2014).offset(1, 'year')
Period(('day', Instant((2015, 1, 1)), 365))
>>> period('month', 2014).offset(1)
Period(('month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'day')
Period(('month', Instant((2014, 1, 2)), 12))
>>> period('month', 2014).offset(1, 'month')
Period(('month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'year')
Period(('month', Instant((2015, 1, 1)), 12))
>>> period('year', 2014).offset(1)
Period(('year', Instant((2015, 1, 1)), 1))
>>> period('year', 2014).offset(1, 'day')
Period(('year', Instant((2014, 1, 2)), 1))
>>> period('year', 2014).offset(1, 'month')
Period(('year', Instant((2014, 2, 1)), 1))
>>> period('year', 2014).offset(1, 'year')
Period(('year', Instant((2015, 1, 1)), 1))
>>> period('day', '2011-2-28').offset(1)
Period(('day', Instant((2011, 3, 1)), 1))
>>> period('month', '2011-2-28').offset(1)
Period(('month', Instant((2011, 3, 28)), 1))
>>> period('year', '2011-2-28').offset(1)
Period(('year', Instant((2012, 2, 28)), 1))
>>> period('day', '2011-3-1').offset(-1)
Period(('day', Instant((2011, 2, 28)), 1))
>>> period('month', '2011-3-1').offset(-1)
Period(('month', Instant((2011, 2, 1)), 1))
>>> period('year', '2011-3-1').offset(-1)
Period(('year', Instant((2010, 3, 1)), 1))
>>> period('day', '2014-1-30').offset(3)
Period(('day', Instant((2014, 2, 2)), 1))
>>> period('month', '2014-1-30').offset(3)
Period(('month', Instant((2014, 4, 30)), 1))
>>> period('year', '2014-1-30').offset(3)
Period(('year', Instant((2017, 1, 30)), 1))
>>> period('day', 2014).offset(-3)
Period(('day', Instant((2013, 12, 29)), 365))
>>> period('month', 2014).offset(-3)
Period(('month', Instant((2013, 10, 1)), 12))
>>> period('year', 2014).offset(-3)
Period(('year', Instant((2011, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'month')
Period(('day', Instant((2014, 2, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'year')
Period(('day', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3', 4).offset('first-of', 'month')
Period(('day', Instant((2014, 2, 1)), 4))
>>> period('day', '2014-2-3', 4).offset('first-of', 'year')
Period(('day', Instant((2014, 1, 1)), 4))
>>> period('month', '2014-2-3').offset('first-of')
Period(('month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'month')
Period(('month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'year')
Period(('month', Instant((2014, 1, 1)), 1))
>>> period('month', '2014-2-3', 4).offset('first-of')
Period(('month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'month')
Period(('month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'year')
Period(('month', Instant((2014, 1, 1)), 4))
>>> period('year', 2014).offset('first-of')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'month')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'year')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'month')
Period(('year', Instant((2014, 2, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'year')
Period(('year', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'month')
Period(('day', Instant((2014, 2, 28)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'year')
Period(('day', Instant((2014, 12, 31)), 1))
>>> period('day', '2014-2-3', 4).offset('last-of', 'month')
Period(('day', Instant((2014, 2, 28)), 4))
>>> period('day', '2014-2-3', 4).offset('last-of', 'year')
Period(('day', Instant((2014, 12, 31)), 4))
>>> period('month', '2014-2-3').offset('last-of')
Period(('month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'month')
Period(('month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'year')
Period(('month', Instant((2014, 12, 31)), 1))
>>> period('month', '2014-2-3', 4).offset('last-of')
Period(('month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'month')
Period(('month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'year')
Period(('month', Instant((2014, 12, 31)), 4))
>>> period('year', 2014).offset('last-of')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', 2014).offset('last-of', 'month')
Period(('year', Instant((2014, 1, 31)), 1))
>>> period('year', 2014).offset('last-of', 'year')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of')
Period(('year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'month')
Period(('year', Instant((2014, 2, 28)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'year')
Period(('year', Instant((2014, 12, 31)), 1))
"""
return self.__class__((self[0], self[1].offset(offset, self[0] if unit is None else unit), self[2])) |
def _shutdown(self):
"""Private method. Reset to non-piped spawn"""
global sconf_global, _ac_config_hs
if not self.active:
raise SCons.Errors.UserError("Finish may be called only once!")
if self.logstream is not None and not dryrun:
self.logstream.write("\n")
self.logstream.close()
self.logstream = None
# remove the SConfSourceBuilder from the environment
blds = self.env['BUILDERS']
del blds['SConfSourceBuilder']
self.env.Replace( BUILDERS=blds )
self.active = 0
sconf_global = None
if not self.config_h is None:
_ac_config_hs[self.config_h] = self.config_h_text
self.env.fs = self.lastEnvFs | def function[_shutdown, parameter[self]]:
constant[Private method. Reset to non-piped spawn]
<ast.Global object at 0x7da20c6c5300>
if <ast.UnaryOp object at 0x7da20c6c6f80> begin[:]
<ast.Raise object at 0x7da20c6c7280>
if <ast.BoolOp object at 0x7da20c6c50f0> begin[:]
call[name[self].logstream.write, parameter[constant[
]]]
call[name[self].logstream.close, parameter[]]
name[self].logstream assign[=] constant[None]
variable[blds] assign[=] call[name[self].env][constant[BUILDERS]]
<ast.Delete object at 0x7da20c6c6110>
call[name[self].env.Replace, parameter[]]
name[self].active assign[=] constant[0]
variable[sconf_global] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da20c6c4a00> begin[:]
call[name[_ac_config_hs]][name[self].config_h] assign[=] name[self].config_h_text
name[self].env.fs assign[=] name[self].lastEnvFs | keyword[def] identifier[_shutdown] ( identifier[self] ):
literal[string]
keyword[global] identifier[sconf_global] , identifier[_ac_config_hs]
keyword[if] keyword[not] identifier[self] . identifier[active] :
keyword[raise] identifier[SCons] . identifier[Errors] . identifier[UserError] ( literal[string] )
keyword[if] identifier[self] . identifier[logstream] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[dryrun] :
identifier[self] . identifier[logstream] . identifier[write] ( literal[string] )
identifier[self] . identifier[logstream] . identifier[close] ()
identifier[self] . identifier[logstream] = keyword[None]
identifier[blds] = identifier[self] . identifier[env] [ literal[string] ]
keyword[del] identifier[blds] [ literal[string] ]
identifier[self] . identifier[env] . identifier[Replace] ( identifier[BUILDERS] = identifier[blds] )
identifier[self] . identifier[active] = literal[int]
identifier[sconf_global] = keyword[None]
keyword[if] keyword[not] identifier[self] . identifier[config_h] keyword[is] keyword[None] :
identifier[_ac_config_hs] [ identifier[self] . identifier[config_h] ]= identifier[self] . identifier[config_h_text]
identifier[self] . identifier[env] . identifier[fs] = identifier[self] . identifier[lastEnvFs] | def _shutdown(self):
"""Private method. Reset to non-piped spawn"""
global sconf_global, _ac_config_hs
if not self.active:
raise SCons.Errors.UserError('Finish may be called only once!') # depends on [control=['if'], data=[]]
if self.logstream is not None and (not dryrun):
self.logstream.write('\n')
self.logstream.close()
self.logstream = None # depends on [control=['if'], data=[]]
# remove the SConfSourceBuilder from the environment
blds = self.env['BUILDERS']
del blds['SConfSourceBuilder']
self.env.Replace(BUILDERS=blds)
self.active = 0
sconf_global = None
if not self.config_h is None:
_ac_config_hs[self.config_h] = self.config_h_text # depends on [control=['if'], data=[]]
self.env.fs = self.lastEnvFs |
def update(self, *args, **kwargs):
"""Update only drafts.
Status required: ``'draft'``.
Meta information inside `_deposit` are preserved.
"""
super(Deposit, self).update(*args, **kwargs) | def function[update, parameter[self]]:
constant[Update only drafts.
Status required: ``'draft'``.
Meta information inside `_deposit` are preserved.
]
call[call[name[super], parameter[name[Deposit], name[self]]].update, parameter[<ast.Starred object at 0x7da1aff1eb00>]] | keyword[def] identifier[update] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[super] ( identifier[Deposit] , identifier[self] ). identifier[update] (* identifier[args] ,** identifier[kwargs] ) | def update(self, *args, **kwargs):
"""Update only drafts.
Status required: ``'draft'``.
Meta information inside `_deposit` are preserved.
"""
super(Deposit, self).update(*args, **kwargs) |
def calc_weighted_event_var(self, D, weights, event_pat):
"""Computes normalized weighted variance around event pattern
Utility function for computing variance in a training set of weighted
event examples. For each event, the sum of squared differences for all
timepoints from the event pattern is computed, and then the weights
specify how much each of these differences contributes to the
variance (normalized by the number of voxels).
Parameters
----------
D : timepoint by voxel ndarray
fMRI data for which to compute event variances
weights : timepoint by event ndarray
specifies relative weights of timepoints for each event
event_pat : voxel by event ndarray
mean event patterns to compute variance around
Returns
-------
ev_var : ndarray of variances for each event
"""
Dz = stats.zscore(D, axis=1, ddof=1)
ev_var = np.empty(event_pat.shape[1])
for e in range(event_pat.shape[1]):
# Only compute variances for weights > 0.1% of max weight
nz = weights[:, e] > np.max(weights[:, e])/1000
sumsq = np.dot(weights[nz, e],
np.sum(np.square(Dz[nz, :] -
event_pat[:, e]), axis=1))
ev_var[e] = sumsq/(np.sum(weights[nz, e]) -
np.sum(np.square(weights[nz, e])) /
np.sum(weights[nz, e]))
ev_var = ev_var / D.shape[1]
return ev_var | def function[calc_weighted_event_var, parameter[self, D, weights, event_pat]]:
constant[Computes normalized weighted variance around event pattern
Utility function for computing variance in a training set of weighted
event examples. For each event, the sum of squared differences for all
timepoints from the event pattern is computed, and then the weights
specify how much each of these differences contributes to the
variance (normalized by the number of voxels).
Parameters
----------
D : timepoint by voxel ndarray
fMRI data for which to compute event variances
weights : timepoint by event ndarray
specifies relative weights of timepoints for each event
event_pat : voxel by event ndarray
mean event patterns to compute variance around
Returns
-------
ev_var : ndarray of variances for each event
]
variable[Dz] assign[=] call[name[stats].zscore, parameter[name[D]]]
variable[ev_var] assign[=] call[name[np].empty, parameter[call[name[event_pat].shape][constant[1]]]]
for taget[name[e]] in starred[call[name[range], parameter[call[name[event_pat].shape][constant[1]]]]] begin[:]
variable[nz] assign[=] compare[call[name[weights]][tuple[[<ast.Slice object at 0x7da2047e9000>, <ast.Name object at 0x7da2047ea230>]]] greater[>] binary_operation[call[name[np].max, parameter[call[name[weights]][tuple[[<ast.Slice object at 0x7da2047e81f0>, <ast.Name object at 0x7da2047ea140>]]]]] / constant[1000]]]
variable[sumsq] assign[=] call[name[np].dot, parameter[call[name[weights]][tuple[[<ast.Name object at 0x7da2047eac80>, <ast.Name object at 0x7da2047e9db0>]]], call[name[np].sum, parameter[call[name[np].square, parameter[binary_operation[call[name[Dz]][tuple[[<ast.Name object at 0x7da1b0790e20>, <ast.Slice object at 0x7da1b0792d70>]]] - call[name[event_pat]][tuple[[<ast.Slice object at 0x7da1b07920b0>, <ast.Name object at 0x7da1b07910f0>]]]]]]]]]]
call[name[ev_var]][name[e]] assign[=] binary_operation[name[sumsq] / binary_operation[call[name[np].sum, parameter[call[name[weights]][tuple[[<ast.Name object at 0x7da1b0793fa0>, <ast.Name object at 0x7da1b0791b40>]]]]] - binary_operation[call[name[np].sum, parameter[call[name[np].square, parameter[call[name[weights]][tuple[[<ast.Name object at 0x7da1b0792bf0>, <ast.Name object at 0x7da1b0792770>]]]]]]] / call[name[np].sum, parameter[call[name[weights]][tuple[[<ast.Name object at 0x7da1b07915d0>, <ast.Name object at 0x7da1b0790820>]]]]]]]]
variable[ev_var] assign[=] binary_operation[name[ev_var] / call[name[D].shape][constant[1]]]
return[name[ev_var]] | keyword[def] identifier[calc_weighted_event_var] ( identifier[self] , identifier[D] , identifier[weights] , identifier[event_pat] ):
literal[string]
identifier[Dz] = identifier[stats] . identifier[zscore] ( identifier[D] , identifier[axis] = literal[int] , identifier[ddof] = literal[int] )
identifier[ev_var] = identifier[np] . identifier[empty] ( identifier[event_pat] . identifier[shape] [ literal[int] ])
keyword[for] identifier[e] keyword[in] identifier[range] ( identifier[event_pat] . identifier[shape] [ literal[int] ]):
identifier[nz] = identifier[weights] [:, identifier[e] ]> identifier[np] . identifier[max] ( identifier[weights] [:, identifier[e] ])/ literal[int]
identifier[sumsq] = identifier[np] . identifier[dot] ( identifier[weights] [ identifier[nz] , identifier[e] ],
identifier[np] . identifier[sum] ( identifier[np] . identifier[square] ( identifier[Dz] [ identifier[nz] ,:]-
identifier[event_pat] [:, identifier[e] ]), identifier[axis] = literal[int] ))
identifier[ev_var] [ identifier[e] ]= identifier[sumsq] /( identifier[np] . identifier[sum] ( identifier[weights] [ identifier[nz] , identifier[e] ])-
identifier[np] . identifier[sum] ( identifier[np] . identifier[square] ( identifier[weights] [ identifier[nz] , identifier[e] ]))/
identifier[np] . identifier[sum] ( identifier[weights] [ identifier[nz] , identifier[e] ]))
identifier[ev_var] = identifier[ev_var] / identifier[D] . identifier[shape] [ literal[int] ]
keyword[return] identifier[ev_var] | def calc_weighted_event_var(self, D, weights, event_pat):
"""Computes normalized weighted variance around event pattern
Utility function for computing variance in a training set of weighted
event examples. For each event, the sum of squared differences for all
timepoints from the event pattern is computed, and then the weights
specify how much each of these differences contributes to the
variance (normalized by the number of voxels).
Parameters
----------
D : timepoint by voxel ndarray
fMRI data for which to compute event variances
weights : timepoint by event ndarray
specifies relative weights of timepoints for each event
event_pat : voxel by event ndarray
mean event patterns to compute variance around
Returns
-------
ev_var : ndarray of variances for each event
"""
Dz = stats.zscore(D, axis=1, ddof=1)
ev_var = np.empty(event_pat.shape[1])
for e in range(event_pat.shape[1]):
# Only compute variances for weights > 0.1% of max weight
nz = weights[:, e] > np.max(weights[:, e]) / 1000
sumsq = np.dot(weights[nz, e], np.sum(np.square(Dz[nz, :] - event_pat[:, e]), axis=1))
ev_var[e] = sumsq / (np.sum(weights[nz, e]) - np.sum(np.square(weights[nz, e])) / np.sum(weights[nz, e])) # depends on [control=['for'], data=['e']]
ev_var = ev_var / D.shape[1]
return ev_var |
def getTimestampUTC():
"""getTimestampUTC() -> (ts_sec, ts_usec)
Returns the current UTC time in seconds and microseconds.
"""
utc = datetime.datetime.utcnow()
ts_sec = calendar.timegm( utc.timetuple() )
ts_usec = utc.microsecond
return ts_sec, ts_usec | def function[getTimestampUTC, parameter[]]:
constant[getTimestampUTC() -> (ts_sec, ts_usec)
Returns the current UTC time in seconds and microseconds.
]
variable[utc] assign[=] call[name[datetime].datetime.utcnow, parameter[]]
variable[ts_sec] assign[=] call[name[calendar].timegm, parameter[call[name[utc].timetuple, parameter[]]]]
variable[ts_usec] assign[=] name[utc].microsecond
return[tuple[[<ast.Name object at 0x7da18eb57310>, <ast.Name object at 0x7da18eb542e0>]]] | keyword[def] identifier[getTimestampUTC] ():
literal[string]
identifier[utc] = identifier[datetime] . identifier[datetime] . identifier[utcnow] ()
identifier[ts_sec] = identifier[calendar] . identifier[timegm] ( identifier[utc] . identifier[timetuple] ())
identifier[ts_usec] = identifier[utc] . identifier[microsecond]
keyword[return] identifier[ts_sec] , identifier[ts_usec] | def getTimestampUTC():
"""getTimestampUTC() -> (ts_sec, ts_usec)
Returns the current UTC time in seconds and microseconds.
"""
utc = datetime.datetime.utcnow()
ts_sec = calendar.timegm(utc.timetuple())
ts_usec = utc.microsecond
return (ts_sec, ts_usec) |
def opens(self, tag=None, fromdate=None, todate=None):
"""
Gets total counts of recipients who opened your emails.
This is only recorded when open tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/opens", tag=tag, fromdate=fromdate, todate=todate) | def function[opens, parameter[self, tag, fromdate, todate]]:
constant[
Gets total counts of recipients who opened your emails.
This is only recorded when open tracking is enabled for that email.
]
return[call[name[self].call, parameter[constant[GET], constant[/stats/outbound/opens]]]] | keyword[def] identifier[opens] ( identifier[self] , identifier[tag] = keyword[None] , identifier[fromdate] = keyword[None] , identifier[todate] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[call] ( literal[string] , literal[string] , identifier[tag] = identifier[tag] , identifier[fromdate] = identifier[fromdate] , identifier[todate] = identifier[todate] ) | def opens(self, tag=None, fromdate=None, todate=None):
"""
Gets total counts of recipients who opened your emails.
This is only recorded when open tracking is enabled for that email.
"""
return self.call('GET', '/stats/outbound/opens', tag=tag, fromdate=fromdate, todate=todate) |
def build_queue(action, action_space, build_queue_id):
"""Cancel a unit in the build queue."""
del action_space
action.action_ui.production_panel.unit_index = build_queue_id | def function[build_queue, parameter[action, action_space, build_queue_id]]:
constant[Cancel a unit in the build queue.]
<ast.Delete object at 0x7da18f00fcd0>
name[action].action_ui.production_panel.unit_index assign[=] name[build_queue_id] | keyword[def] identifier[build_queue] ( identifier[action] , identifier[action_space] , identifier[build_queue_id] ):
literal[string]
keyword[del] identifier[action_space]
identifier[action] . identifier[action_ui] . identifier[production_panel] . identifier[unit_index] = identifier[build_queue_id] | def build_queue(action, action_space, build_queue_id):
"""Cancel a unit in the build queue."""
del action_space
action.action_ui.production_panel.unit_index = build_queue_id |
def constants(self):
"""
Gets the constants from the class that acts like a namespace for constants.
:rtype: dict<str,*>
"""
ret = {}
name = self.__class_name.split('.')[-1]
constant_class = getattr(self.__module, name)
for name, value in constant_class.__dict__.items():
if re.match(r'^[A-Z][A-Z0-9_]*$', name):
ret[name] = value
return ret | def function[constants, parameter[self]]:
constant[
Gets the constants from the class that acts like a namespace for constants.
:rtype: dict<str,*>
]
variable[ret] assign[=] dictionary[[], []]
variable[name] assign[=] call[call[name[self].__class_name.split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b18baf80>]
variable[constant_class] assign[=] call[name[getattr], parameter[name[self].__module, name[name]]]
for taget[tuple[[<ast.Name object at 0x7da1b18b91b0>, <ast.Name object at 0x7da1b18bad70>]]] in starred[call[name[constant_class].__dict__.items, parameter[]]] begin[:]
if call[name[re].match, parameter[constant[^[A-Z][A-Z0-9_]*$], name[name]]] begin[:]
call[name[ret]][name[name]] assign[=] name[value]
return[name[ret]] | keyword[def] identifier[constants] ( identifier[self] ):
literal[string]
identifier[ret] ={}
identifier[name] = identifier[self] . identifier[__class_name] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[constant_class] = identifier[getattr] ( identifier[self] . identifier[__module] , identifier[name] )
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[constant_class] . identifier[__dict__] . identifier[items] ():
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[name] ):
identifier[ret] [ identifier[name] ]= identifier[value]
keyword[return] identifier[ret] | def constants(self):
"""
Gets the constants from the class that acts like a namespace for constants.
:rtype: dict<str,*>
"""
ret = {}
name = self.__class_name.split('.')[-1]
constant_class = getattr(self.__module, name)
for (name, value) in constant_class.__dict__.items():
if re.match('^[A-Z][A-Z0-9_]*$', name):
ret[name] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return ret |
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return | def function[unindent_tree, parameter[element]]:
constant[
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
]
for taget[name[el]] in starred[call[name[element].iter, parameter[]]] begin[:]
if call[name[is_indentation_element], parameter[name[el]]] begin[:]
call[name[el].attrib.clear, parameter[]]
name[el].tag assign[=] constant[div]
return[None] | keyword[def] identifier[unindent_tree] ( identifier[element] ):
literal[string]
keyword[for] identifier[el] keyword[in] identifier[element] . identifier[iter] ():
keyword[if] identifier[is_indentation_element] ( identifier[el] ):
identifier[el] . identifier[attrib] . identifier[clear] ()
identifier[el] . identifier[tag] = literal[string]
keyword[return] | def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['el']] |
def crud_permission_name(model, action, convert=True):
"""Returns permission name using Django naming convention: app_label.action_object.
If `convert` is True, `create` and `update` actions would be renamed
to `add` and `change`.
"""
app_label = model._meta.app_label
model_lower = model.__name__.lower()
if convert:
action = MAP_PERMISSION_ACTIONS.get(action, action)
return '%s.%s_%s' % (
app_label,
action,
model_lower
) | def function[crud_permission_name, parameter[model, action, convert]]:
constant[Returns permission name using Django naming convention: app_label.action_object.
If `convert` is True, `create` and `update` actions would be renamed
to `add` and `change`.
]
variable[app_label] assign[=] name[model]._meta.app_label
variable[model_lower] assign[=] call[name[model].__name__.lower, parameter[]]
if name[convert] begin[:]
variable[action] assign[=] call[name[MAP_PERMISSION_ACTIONS].get, parameter[name[action], name[action]]]
return[binary_operation[constant[%s.%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc73940>, <ast.Name object at 0x7da18bc73340>, <ast.Name object at 0x7da18bc72d40>]]]] | keyword[def] identifier[crud_permission_name] ( identifier[model] , identifier[action] , identifier[convert] = keyword[True] ):
literal[string]
identifier[app_label] = identifier[model] . identifier[_meta] . identifier[app_label]
identifier[model_lower] = identifier[model] . identifier[__name__] . identifier[lower] ()
keyword[if] identifier[convert] :
identifier[action] = identifier[MAP_PERMISSION_ACTIONS] . identifier[get] ( identifier[action] , identifier[action] )
keyword[return] literal[string] %(
identifier[app_label] ,
identifier[action] ,
identifier[model_lower]
) | def crud_permission_name(model, action, convert=True):
"""Returns permission name using Django naming convention: app_label.action_object.
If `convert` is True, `create` and `update` actions would be renamed
to `add` and `change`.
"""
app_label = model._meta.app_label
model_lower = model.__name__.lower()
if convert:
action = MAP_PERMISSION_ACTIONS.get(action, action) # depends on [control=['if'], data=[]]
return '%s.%s_%s' % (app_label, action, model_lower) |
def type(self, value):
"""
Sets the type of the message.
:type value: Types
:param value: the type
:raise AttributeError: if value is not a valid type
"""
if value not in list(defines.Types.values()):
raise AttributeError
self._type = value | def function[type, parameter[self, value]]:
constant[
Sets the type of the message.
:type value: Types
:param value: the type
:raise AttributeError: if value is not a valid type
]
if compare[name[value] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[defines].Types.values, parameter[]]]]] begin[:]
<ast.Raise object at 0x7da204347310>
name[self]._type assign[=] name[value] | keyword[def] identifier[type] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[not] keyword[in] identifier[list] ( identifier[defines] . identifier[Types] . identifier[values] ()):
keyword[raise] identifier[AttributeError]
identifier[self] . identifier[_type] = identifier[value] | def type(self, value):
"""
Sets the type of the message.
:type value: Types
:param value: the type
:raise AttributeError: if value is not a valid type
"""
if value not in list(defines.Types.values()):
raise AttributeError # depends on [control=['if'], data=[]]
self._type = value |
def mutate_list_of_nodes(node, context):
"""
:type context: Context
"""
return_annotation_started = False
for child_node in node.children:
if child_node.type == 'operator' and child_node.value == '->':
return_annotation_started = True
if return_annotation_started and child_node.type == 'operator' and child_node.value == ':':
return_annotation_started = False
if return_annotation_started:
continue
mutate_node(child_node, context=context)
# this is just an optimization to stop early
if context.number_of_performed_mutations and context.mutation_id != ALL:
return | def function[mutate_list_of_nodes, parameter[node, context]]:
constant[
:type context: Context
]
variable[return_annotation_started] assign[=] constant[False]
for taget[name[child_node]] in starred[name[node].children] begin[:]
if <ast.BoolOp object at 0x7da20e955720> begin[:]
variable[return_annotation_started] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20e954dc0> begin[:]
variable[return_annotation_started] assign[=] constant[False]
if name[return_annotation_started] begin[:]
continue
call[name[mutate_node], parameter[name[child_node]]]
if <ast.BoolOp object at 0x7da204961870> begin[:]
return[None] | keyword[def] identifier[mutate_list_of_nodes] ( identifier[node] , identifier[context] ):
literal[string]
identifier[return_annotation_started] = keyword[False]
keyword[for] identifier[child_node] keyword[in] identifier[node] . identifier[children] :
keyword[if] identifier[child_node] . identifier[type] == literal[string] keyword[and] identifier[child_node] . identifier[value] == literal[string] :
identifier[return_annotation_started] = keyword[True]
keyword[if] identifier[return_annotation_started] keyword[and] identifier[child_node] . identifier[type] == literal[string] keyword[and] identifier[child_node] . identifier[value] == literal[string] :
identifier[return_annotation_started] = keyword[False]
keyword[if] identifier[return_annotation_started] :
keyword[continue]
identifier[mutate_node] ( identifier[child_node] , identifier[context] = identifier[context] )
keyword[if] identifier[context] . identifier[number_of_performed_mutations] keyword[and] identifier[context] . identifier[mutation_id] != identifier[ALL] :
keyword[return] | def mutate_list_of_nodes(node, context):
"""
:type context: Context
"""
return_annotation_started = False
for child_node in node.children:
if child_node.type == 'operator' and child_node.value == '->':
return_annotation_started = True # depends on [control=['if'], data=[]]
if return_annotation_started and child_node.type == 'operator' and (child_node.value == ':'):
return_annotation_started = False # depends on [control=['if'], data=[]]
if return_annotation_started:
continue # depends on [control=['if'], data=[]]
mutate_node(child_node, context=context)
# this is just an optimization to stop early
if context.number_of_performed_mutations and context.mutation_id != ALL:
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child_node']] |
def convert_to_dict(item):
'''Examine an item of any type and return a true dictionary.
If the item is already a dictionary, then the item is returned as-is. Easy.
Otherwise, it attempts to interpret it. So far, this routine can handle:
* a class, function, or anything with a .__dict__ entry
* a legacy mongoEngine document (a class for MongoDb handling)
* a list (index positions are used as keys)
* a generic object that is iterable
* a generic object with members
.. versionadded:: 0.0.4
:param item:
Any object such as a variable, instance, or function.
:returns:
A true dictionary. If unable to get convert 'item', then an empty dictionary '{}' is returned.
'''
# get type
actual_type = detect_type(item)
# given the type, do conversion
if actual_type=="dict":
return item
elif actual_type=="list":
temp = {}
ctr = 0
for entry in item:
temp[ctr]=entry
ctr += 1
return temp
elif actual_type=="mongoengine":
return item.__dict__['_data']
elif actual_type=="class":
return item.__dict__
elif actual_type=="iterable_dict":
# for a 'iterable_dict' create a real dictionary for a ALMOST-dict object.
d = {}
for key in item: # NO, you can't use iteritems(). The method might not exist.
d[key] = item[key]
return d
elif actual_type=="object":
tuples = getmembers(item)
d = {}
for (key, value) in tuples:
d[key] = value
return d
return {} | def function[convert_to_dict, parameter[item]]:
constant[Examine an item of any type and return a true dictionary.
If the item is already a dictionary, then the item is returned as-is. Easy.
Otherwise, it attempts to interpret it. So far, this routine can handle:
* a class, function, or anything with a .__dict__ entry
* a legacy mongoEngine document (a class for MongoDb handling)
* a list (index positions are used as keys)
* a generic object that is iterable
* a generic object with members
.. versionadded:: 0.0.4
:param item:
Any object such as a variable, instance, or function.
:returns:
A true dictionary. If unable to get convert 'item', then an empty dictionary '{}' is returned.
]
variable[actual_type] assign[=] call[name[detect_type], parameter[name[item]]]
if compare[name[actual_type] equal[==] constant[dict]] begin[:]
return[name[item]]
return[dictionary[[], []]] | keyword[def] identifier[convert_to_dict] ( identifier[item] ):
literal[string]
identifier[actual_type] = identifier[detect_type] ( identifier[item] )
keyword[if] identifier[actual_type] == literal[string] :
keyword[return] identifier[item]
keyword[elif] identifier[actual_type] == literal[string] :
identifier[temp] ={}
identifier[ctr] = literal[int]
keyword[for] identifier[entry] keyword[in] identifier[item] :
identifier[temp] [ identifier[ctr] ]= identifier[entry]
identifier[ctr] += literal[int]
keyword[return] identifier[temp]
keyword[elif] identifier[actual_type] == literal[string] :
keyword[return] identifier[item] . identifier[__dict__] [ literal[string] ]
keyword[elif] identifier[actual_type] == literal[string] :
keyword[return] identifier[item] . identifier[__dict__]
keyword[elif] identifier[actual_type] == literal[string] :
identifier[d] ={}
keyword[for] identifier[key] keyword[in] identifier[item] :
identifier[d] [ identifier[key] ]= identifier[item] [ identifier[key] ]
keyword[return] identifier[d]
keyword[elif] identifier[actual_type] == literal[string] :
identifier[tuples] = identifier[getmembers] ( identifier[item] )
identifier[d] ={}
keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[tuples] :
identifier[d] [ identifier[key] ]= identifier[value]
keyword[return] identifier[d]
keyword[return] {} | def convert_to_dict(item):
"""Examine an item of any type and return a true dictionary.
If the item is already a dictionary, then the item is returned as-is. Easy.
Otherwise, it attempts to interpret it. So far, this routine can handle:
* a class, function, or anything with a .__dict__ entry
* a legacy mongoEngine document (a class for MongoDb handling)
* a list (index positions are used as keys)
* a generic object that is iterable
* a generic object with members
.. versionadded:: 0.0.4
:param item:
Any object such as a variable, instance, or function.
:returns:
A true dictionary. If unable to get convert 'item', then an empty dictionary '{}' is returned.
"""
# get type
actual_type = detect_type(item)
# given the type, do conversion
if actual_type == 'dict':
return item # depends on [control=['if'], data=[]]
elif actual_type == 'list':
temp = {}
ctr = 0
for entry in item:
temp[ctr] = entry
ctr += 1 # depends on [control=['for'], data=['entry']]
return temp # depends on [control=['if'], data=[]]
elif actual_type == 'mongoengine':
return item.__dict__['_data'] # depends on [control=['if'], data=[]]
elif actual_type == 'class':
return item.__dict__ # depends on [control=['if'], data=[]]
elif actual_type == 'iterable_dict':
# for a 'iterable_dict' create a real dictionary for a ALMOST-dict object.
d = {}
for key in item: # NO, you can't use iteritems(). The method might not exist.
d[key] = item[key] # depends on [control=['for'], data=['key']]
return d # depends on [control=['if'], data=[]]
elif actual_type == 'object':
tuples = getmembers(item)
d = {}
for (key, value) in tuples:
d[key] = value # depends on [control=['for'], data=[]]
return d # depends on [control=['if'], data=[]]
return {} |
def paths_to_top(self, term):
""" Returns all possible paths to the root node
Each path includes the term given. The order of the path is
top -> bottom, i.e. it starts with the root and ends with the
given term (inclusively).
Parameters:
-----------
- term:
the id of the GO term, where the paths begin (i.e. the
accession 'GO:0003682')
Returns:
--------
- a list of lists of GO Terms
"""
# error handling consistent with original authors
if term not in self:
print("Term %s not found!" % term, file=sys.stderr)
return
def _paths_to_top_recursive(rec):
if rec.level == 0:
return [[rec]]
paths = []
for parent in rec.parents:
top_paths = _paths_to_top_recursive(parent)
for top_path in top_paths:
top_path.append(rec)
paths.append(top_path)
return paths
go_term = self[term]
return _paths_to_top_recursive(go_term) | def function[paths_to_top, parameter[self, term]]:
constant[ Returns all possible paths to the root node
Each path includes the term given. The order of the path is
top -> bottom, i.e. it starts with the root and ends with the
given term (inclusively).
Parameters:
-----------
- term:
the id of the GO term, where the paths begin (i.e. the
accession 'GO:0003682')
Returns:
--------
- a list of lists of GO Terms
]
if compare[name[term] <ast.NotIn object at 0x7da2590d7190> name[self]] begin[:]
call[name[print], parameter[binary_operation[constant[Term %s not found!] <ast.Mod object at 0x7da2590d6920> name[term]]]]
return[None]
def function[_paths_to_top_recursive, parameter[rec]]:
if compare[name[rec].level equal[==] constant[0]] begin[:]
return[list[[<ast.List object at 0x7da18dc98f70>]]]
variable[paths] assign[=] list[[]]
for taget[name[parent]] in starred[name[rec].parents] begin[:]
variable[top_paths] assign[=] call[name[_paths_to_top_recursive], parameter[name[parent]]]
for taget[name[top_path]] in starred[name[top_paths]] begin[:]
call[name[top_path].append, parameter[name[rec]]]
call[name[paths].append, parameter[name[top_path]]]
return[name[paths]]
variable[go_term] assign[=] call[name[self]][name[term]]
return[call[name[_paths_to_top_recursive], parameter[name[go_term]]]] | keyword[def] identifier[paths_to_top] ( identifier[self] , identifier[term] ):
literal[string]
keyword[if] identifier[term] keyword[not] keyword[in] identifier[self] :
identifier[print] ( literal[string] % identifier[term] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return]
keyword[def] identifier[_paths_to_top_recursive] ( identifier[rec] ):
keyword[if] identifier[rec] . identifier[level] == literal[int] :
keyword[return] [[ identifier[rec] ]]
identifier[paths] =[]
keyword[for] identifier[parent] keyword[in] identifier[rec] . identifier[parents] :
identifier[top_paths] = identifier[_paths_to_top_recursive] ( identifier[parent] )
keyword[for] identifier[top_path] keyword[in] identifier[top_paths] :
identifier[top_path] . identifier[append] ( identifier[rec] )
identifier[paths] . identifier[append] ( identifier[top_path] )
keyword[return] identifier[paths]
identifier[go_term] = identifier[self] [ identifier[term] ]
keyword[return] identifier[_paths_to_top_recursive] ( identifier[go_term] ) | def paths_to_top(self, term):
""" Returns all possible paths to the root node
Each path includes the term given. The order of the path is
top -> bottom, i.e. it starts with the root and ends with the
given term (inclusively).
Parameters:
-----------
- term:
the id of the GO term, where the paths begin (i.e. the
accession 'GO:0003682')
Returns:
--------
- a list of lists of GO Terms
"""
# error handling consistent with original authors
if term not in self:
print('Term %s not found!' % term, file=sys.stderr)
return # depends on [control=['if'], data=['term']]
def _paths_to_top_recursive(rec):
if rec.level == 0:
return [[rec]] # depends on [control=['if'], data=[]]
paths = []
for parent in rec.parents:
top_paths = _paths_to_top_recursive(parent)
for top_path in top_paths:
top_path.append(rec)
paths.append(top_path) # depends on [control=['for'], data=['top_path']] # depends on [control=['for'], data=['parent']]
return paths
go_term = self[term]
return _paths_to_top_recursive(go_term) |
def get_cookbook_path(cookbook_name):
"""Returns path to the cookbook for the given cookbook name"""
for cookbook_path in cookbook_paths:
path = os.path.join(cookbook_path, cookbook_name)
if os.path.exists(path):
return path
raise IOError('Can\'t find cookbook with name "{0}"'.format(cookbook_name)) | def function[get_cookbook_path, parameter[cookbook_name]]:
constant[Returns path to the cookbook for the given cookbook name]
for taget[name[cookbook_path]] in starred[name[cookbook_paths]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[cookbook_path], name[cookbook_name]]]
if call[name[os].path.exists, parameter[name[path]]] begin[:]
return[name[path]]
<ast.Raise object at 0x7da1b12b83d0> | keyword[def] identifier[get_cookbook_path] ( identifier[cookbook_name] ):
literal[string]
keyword[for] identifier[cookbook_path] keyword[in] identifier[cookbook_paths] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cookbook_path] , identifier[cookbook_name] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[return] identifier[path]
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[cookbook_name] )) | def get_cookbook_path(cookbook_name):
"""Returns path to the cookbook for the given cookbook name"""
for cookbook_path in cookbook_paths:
path = os.path.join(cookbook_path, cookbook_name)
if os.path.exists(path):
return path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cookbook_path']]
raise IOError('Can\'t find cookbook with name "{0}"'.format(cookbook_name)) |
def _averageFromList(self, param):
""" Averages out values passed as a comma-separated
list, disregarding the zero-valued entries.
"""
_result = 0.0
_count = 0
for _param in param.split(','):
if _param != '' and float(_param) != 0.0:
_result = _result + float(_param)
_count += 1
if _count >= 1:
_result = _result / _count
return _result | def function[_averageFromList, parameter[self, param]]:
constant[ Averages out values passed as a comma-separated
list, disregarding the zero-valued entries.
]
variable[_result] assign[=] constant[0.0]
variable[_count] assign[=] constant[0]
for taget[name[_param]] in starred[call[name[param].split, parameter[constant[,]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1bc9ea0> begin[:]
variable[_result] assign[=] binary_operation[name[_result] + call[name[float], parameter[name[_param]]]]
<ast.AugAssign object at 0x7da1b1b4ad40>
if compare[name[_count] greater_or_equal[>=] constant[1]] begin[:]
variable[_result] assign[=] binary_operation[name[_result] / name[_count]]
return[name[_result]] | keyword[def] identifier[_averageFromList] ( identifier[self] , identifier[param] ):
literal[string]
identifier[_result] = literal[int]
identifier[_count] = literal[int]
keyword[for] identifier[_param] keyword[in] identifier[param] . identifier[split] ( literal[string] ):
keyword[if] identifier[_param] != literal[string] keyword[and] identifier[float] ( identifier[_param] )!= literal[int] :
identifier[_result] = identifier[_result] + identifier[float] ( identifier[_param] )
identifier[_count] += literal[int]
keyword[if] identifier[_count] >= literal[int] :
identifier[_result] = identifier[_result] / identifier[_count]
keyword[return] identifier[_result] | def _averageFromList(self, param):
""" Averages out values passed as a comma-separated
list, disregarding the zero-valued entries.
"""
_result = 0.0
_count = 0
for _param in param.split(','):
if _param != '' and float(_param) != 0.0:
_result = _result + float(_param)
_count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_param']]
if _count >= 1:
_result = _result / _count # depends on [control=['if'], data=['_count']]
return _result |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.