code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def absolute_values(df, *, column: str, new_column: str = None):
"""
Get the absolute numeric value of each element of a column
---
### Parameters
*mandatory :*
- `column` (*str*): name of the column
*optional :*
- `new_column` (*str*): name of the column containing the result.
By default, no new column will be created and `column` will be replaced.
---
### Example
**Input**
| ENTITY | VALUE_1 | VALUE_2 |
|:------:|:-------:|:-------:|
| A | -1.512 | -1.504 |
| A | 0.432 | 0.14 |
```cson
absolute_values:
column: 'VALUE_1'
new_column: 'Pika'
```
**Output**
| ENTITY | VALUE_1 | VALUE_2 | Pika |
|:------:|:-------:|:-------:|:-----:|
| A | -1.512 | -1.504 | 1.512 |
| A | 0.432 | 0.14 | 0.432 |
"""
new_column = new_column or column
df[new_column] = abs(df[column])
return df
|
def function[absolute_values, parameter[df]]:
constant[
Get the absolute numeric value of each element of a column
---
### Parameters
*mandatory :*
- `column` (*str*): name of the column
*optional :*
- `new_column` (*str*): name of the column containing the result.
By default, no new column will be created and `column` will be replaced.
---
### Example
**Input**
| ENTITY | VALUE_1 | VALUE_2 |
|:------:|:-------:|:-------:|
| A | -1.512 | -1.504 |
| A | 0.432 | 0.14 |
```cson
absolute_values:
column: 'VALUE_1'
new_column: 'Pika'
```
**Output**
| ENTITY | VALUE_1 | VALUE_2 | Pika |
|:------:|:-------:|:-------:|:-----:|
| A | -1.512 | -1.504 | 1.512 |
| A | 0.432 | 0.14 | 0.432 |
]
variable[new_column] assign[=] <ast.BoolOp object at 0x7da1b03b8d60>
call[name[df]][name[new_column]] assign[=] call[name[abs], parameter[call[name[df]][name[column]]]]
return[name[df]]
|
keyword[def] identifier[absolute_values] ( identifier[df] ,*, identifier[column] : identifier[str] , identifier[new_column] : identifier[str] = keyword[None] ):
literal[string]
identifier[new_column] = identifier[new_column] keyword[or] identifier[column]
identifier[df] [ identifier[new_column] ]= identifier[abs] ( identifier[df] [ identifier[column] ])
keyword[return] identifier[df]
|
def absolute_values(df, *, column: str, new_column: str=None):
"""
Get the absolute numeric value of each element of a column
---
### Parameters
*mandatory :*
- `column` (*str*): name of the column
*optional :*
- `new_column` (*str*): name of the column containing the result.
By default, no new column will be created and `column` will be replaced.
---
### Example
**Input**
| ENTITY | VALUE_1 | VALUE_2 |
|:------:|:-------:|:-------:|
| A | -1.512 | -1.504 |
| A | 0.432 | 0.14 |
```cson
absolute_values:
column: 'VALUE_1'
new_column: 'Pika'
```
**Output**
| ENTITY | VALUE_1 | VALUE_2 | Pika |
|:------:|:-------:|:-------:|:-----:|
| A | -1.512 | -1.504 | 1.512 |
| A | 0.432 | 0.14 | 0.432 |
"""
new_column = new_column or column
df[new_column] = abs(df[column])
return df
|
def openssh_tunnel(self, lport, rport, server, remoteip='127.0.0.1',
keyfile=None, password=None, timeout=0.4):
"""
We decided to replace pyzmq's openssh_tunnel method to work around
issue https://github.com/zeromq/pyzmq/issues/589 which was solved
in pyzmq https://github.com/zeromq/pyzmq/pull/615
"""
ssh = "ssh "
if keyfile:
ssh += "-i " + keyfile
if ':' in server:
server, port = server.split(':')
ssh += " -p %s" % port
cmd = "%s -O check %s" % (ssh, server)
(output, exitstatus) = pexpect.run(cmd, withexitstatus=True)
if not exitstatus:
pid = int(output[output.find("(pid=")+5:output.find(")")])
cmd = "%s -O forward -L 127.0.0.1:%i:%s:%i %s" % (
ssh, lport, remoteip, rport, server)
(output, exitstatus) = pexpect.run(cmd, withexitstatus=True)
if not exitstatus:
atexit.register(_stop_tunnel, cmd.replace("-O forward",
"-O cancel",
1))
return pid
cmd = "%s -f -S none -L 127.0.0.1:%i:%s:%i %s sleep %i" % (
ssh, lport, remoteip, rport, server, timeout)
# pop SSH_ASKPASS from env
env = os.environ.copy()
env.pop('SSH_ASKPASS', None)
ssh_newkey = 'Are you sure you want to continue connecting'
tunnel = pexpect.spawn(cmd, env=env)
failed = False
while True:
try:
i = tunnel.expect([ssh_newkey, '[Pp]assword:'], timeout=.1)
if i == 0:
host = server.split('@')[-1]
question = _("The authenticity of host <b>%s</b> can't be "
"established. Are you sure you want to continue "
"connecting?") % host
reply = QMessageBox.question(self, _('Warning'), question,
QMessageBox.Yes | QMessageBox.No,
QMessageBox.No)
if reply == QMessageBox.Yes:
tunnel.sendline('yes')
continue
else:
tunnel.sendline('no')
raise RuntimeError(
_("The authenticity of the host can't be established"))
if i == 1 and password is not None:
tunnel.sendline(password)
except pexpect.TIMEOUT:
continue
except pexpect.EOF:
if tunnel.exitstatus:
raise RuntimeError(_("Tunnel '%s' failed to start") % cmd)
else:
return tunnel.pid
else:
if failed or password is None:
raise RuntimeError(_("Could not connect to remote host"))
# TODO: Use this block when pyzmq bug #620 is fixed
# # Prompt a passphrase dialog to the user for a second attempt
# password, ok = QInputDialog.getText(self, _('Password'),
# _('Enter password for: ') + server,
# echo=QLineEdit.Password)
# if ok is False:
# raise RuntimeError('Could not connect to remote host.')
tunnel.sendline(password)
failed = True
|
def function[openssh_tunnel, parameter[self, lport, rport, server, remoteip, keyfile, password, timeout]]:
constant[
We decided to replace pyzmq's openssh_tunnel method to work around
issue https://github.com/zeromq/pyzmq/issues/589 which was solved
in pyzmq https://github.com/zeromq/pyzmq/pull/615
]
variable[ssh] assign[=] constant[ssh ]
if name[keyfile] begin[:]
<ast.AugAssign object at 0x7da1b1fa1990>
if compare[constant[:] in name[server]] begin[:]
<ast.Tuple object at 0x7da1b1fa2260> assign[=] call[name[server].split, parameter[constant[:]]]
<ast.AugAssign object at 0x7da1b1fa0370>
variable[cmd] assign[=] binary_operation[constant[%s -O check %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1fa33a0>, <ast.Name object at 0x7da1b1fa3820>]]]
<ast.Tuple object at 0x7da1b1fa1330> assign[=] call[name[pexpect].run, parameter[name[cmd]]]
if <ast.UnaryOp object at 0x7da1b1fa2440> begin[:]
variable[pid] assign[=] call[name[int], parameter[call[name[output]][<ast.Slice object at 0x7da1b1fa3df0>]]]
variable[cmd] assign[=] binary_operation[constant[%s -O forward -L 127.0.0.1:%i:%s:%i %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1fa3a30>, <ast.Name object at 0x7da1b1fa3ca0>, <ast.Name object at 0x7da1b1fa2f80>, <ast.Name object at 0x7da1b1fa3a00>, <ast.Name object at 0x7da1b1fa29b0>]]]
<ast.Tuple object at 0x7da1b1fa39a0> assign[=] call[name[pexpect].run, parameter[name[cmd]]]
if <ast.UnaryOp object at 0x7da1b1fa2560> begin[:]
call[name[atexit].register, parameter[name[_stop_tunnel], call[name[cmd].replace, parameter[constant[-O forward], constant[-O cancel], constant[1]]]]]
return[name[pid]]
variable[cmd] assign[=] binary_operation[constant[%s -f -S none -L 127.0.0.1:%i:%s:%i %s sleep %i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1fa2230>, <ast.Name object at 0x7da1b1fa1b40>, <ast.Name object at 0x7da1b1fa12d0>, <ast.Name object at 0x7da1b1fa2a70>, <ast.Name object at 0x7da1b1fa2530>, <ast.Name object at 0x7da1b1fa01c0>]]]
variable[env] assign[=] call[name[os].environ.copy, parameter[]]
call[name[env].pop, parameter[constant[SSH_ASKPASS], constant[None]]]
variable[ssh_newkey] assign[=] constant[Are you sure you want to continue connecting]
variable[tunnel] assign[=] call[name[pexpect].spawn, parameter[name[cmd]]]
variable[failed] assign[=] constant[False]
while constant[True] begin[:]
<ast.Try object at 0x7da1b1fa3550>
|
keyword[def] identifier[openssh_tunnel] ( identifier[self] , identifier[lport] , identifier[rport] , identifier[server] , identifier[remoteip] = literal[string] ,
identifier[keyfile] = keyword[None] , identifier[password] = keyword[None] , identifier[timeout] = literal[int] ):
literal[string]
identifier[ssh] = literal[string]
keyword[if] identifier[keyfile] :
identifier[ssh] += literal[string] + identifier[keyfile]
keyword[if] literal[string] keyword[in] identifier[server] :
identifier[server] , identifier[port] = identifier[server] . identifier[split] ( literal[string] )
identifier[ssh] += literal[string] % identifier[port]
identifier[cmd] = literal[string] %( identifier[ssh] , identifier[server] )
( identifier[output] , identifier[exitstatus] )= identifier[pexpect] . identifier[run] ( identifier[cmd] , identifier[withexitstatus] = keyword[True] )
keyword[if] keyword[not] identifier[exitstatus] :
identifier[pid] = identifier[int] ( identifier[output] [ identifier[output] . identifier[find] ( literal[string] )+ literal[int] : identifier[output] . identifier[find] ( literal[string] )])
identifier[cmd] = literal[string] %(
identifier[ssh] , identifier[lport] , identifier[remoteip] , identifier[rport] , identifier[server] )
( identifier[output] , identifier[exitstatus] )= identifier[pexpect] . identifier[run] ( identifier[cmd] , identifier[withexitstatus] = keyword[True] )
keyword[if] keyword[not] identifier[exitstatus] :
identifier[atexit] . identifier[register] ( identifier[_stop_tunnel] , identifier[cmd] . identifier[replace] ( literal[string] ,
literal[string] ,
literal[int] ))
keyword[return] identifier[pid]
identifier[cmd] = literal[string] %(
identifier[ssh] , identifier[lport] , identifier[remoteip] , identifier[rport] , identifier[server] , identifier[timeout] )
identifier[env] = identifier[os] . identifier[environ] . identifier[copy] ()
identifier[env] . identifier[pop] ( literal[string] , keyword[None] )
identifier[ssh_newkey] = literal[string]
identifier[tunnel] = identifier[pexpect] . identifier[spawn] ( identifier[cmd] , identifier[env] = identifier[env] )
identifier[failed] = keyword[False]
keyword[while] keyword[True] :
keyword[try] :
identifier[i] = identifier[tunnel] . identifier[expect] ([ identifier[ssh_newkey] , literal[string] ], identifier[timeout] = literal[int] )
keyword[if] identifier[i] == literal[int] :
identifier[host] = identifier[server] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[question] = identifier[_] ( literal[string]
literal[string]
literal[string] )% identifier[host]
identifier[reply] = identifier[QMessageBox] . identifier[question] ( identifier[self] , identifier[_] ( literal[string] ), identifier[question] ,
identifier[QMessageBox] . identifier[Yes] | identifier[QMessageBox] . identifier[No] ,
identifier[QMessageBox] . identifier[No] )
keyword[if] identifier[reply] == identifier[QMessageBox] . identifier[Yes] :
identifier[tunnel] . identifier[sendline] ( literal[string] )
keyword[continue]
keyword[else] :
identifier[tunnel] . identifier[sendline] ( literal[string] )
keyword[raise] identifier[RuntimeError] (
identifier[_] ( literal[string] ))
keyword[if] identifier[i] == literal[int] keyword[and] identifier[password] keyword[is] keyword[not] keyword[None] :
identifier[tunnel] . identifier[sendline] ( identifier[password] )
keyword[except] identifier[pexpect] . identifier[TIMEOUT] :
keyword[continue]
keyword[except] identifier[pexpect] . identifier[EOF] :
keyword[if] identifier[tunnel] . identifier[exitstatus] :
keyword[raise] identifier[RuntimeError] ( identifier[_] ( literal[string] )% identifier[cmd] )
keyword[else] :
keyword[return] identifier[tunnel] . identifier[pid]
keyword[else] :
keyword[if] identifier[failed] keyword[or] identifier[password] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( identifier[_] ( literal[string] ))
identifier[tunnel] . identifier[sendline] ( identifier[password] )
identifier[failed] = keyword[True]
|
def openssh_tunnel(self, lport, rport, server, remoteip='127.0.0.1', keyfile=None, password=None, timeout=0.4):
"""
We decided to replace pyzmq's openssh_tunnel method to work around
issue https://github.com/zeromq/pyzmq/issues/589 which was solved
in pyzmq https://github.com/zeromq/pyzmq/pull/615
"""
ssh = 'ssh '
if keyfile:
ssh += '-i ' + keyfile # depends on [control=['if'], data=[]]
if ':' in server:
(server, port) = server.split(':')
ssh += ' -p %s' % port # depends on [control=['if'], data=['server']]
cmd = '%s -O check %s' % (ssh, server)
(output, exitstatus) = pexpect.run(cmd, withexitstatus=True)
if not exitstatus:
pid = int(output[output.find('(pid=') + 5:output.find(')')])
cmd = '%s -O forward -L 127.0.0.1:%i:%s:%i %s' % (ssh, lport, remoteip, rport, server)
(output, exitstatus) = pexpect.run(cmd, withexitstatus=True)
if not exitstatus:
atexit.register(_stop_tunnel, cmd.replace('-O forward', '-O cancel', 1))
return pid # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
cmd = '%s -f -S none -L 127.0.0.1:%i:%s:%i %s sleep %i' % (ssh, lport, remoteip, rport, server, timeout)
# pop SSH_ASKPASS from env
env = os.environ.copy()
env.pop('SSH_ASKPASS', None)
ssh_newkey = 'Are you sure you want to continue connecting'
tunnel = pexpect.spawn(cmd, env=env)
failed = False
while True:
try:
i = tunnel.expect([ssh_newkey, '[Pp]assword:'], timeout=0.1)
if i == 0:
host = server.split('@')[-1]
question = _("The authenticity of host <b>%s</b> can't be established. Are you sure you want to continue connecting?") % host
reply = QMessageBox.question(self, _('Warning'), question, QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
tunnel.sendline('yes')
continue # depends on [control=['if'], data=[]]
else:
tunnel.sendline('no')
raise RuntimeError(_("The authenticity of the host can't be established")) # depends on [control=['if'], data=[]]
if i == 1 and password is not None:
tunnel.sendline(password) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except pexpect.TIMEOUT:
continue # depends on [control=['except'], data=[]]
except pexpect.EOF:
if tunnel.exitstatus:
raise RuntimeError(_("Tunnel '%s' failed to start") % cmd) # depends on [control=['if'], data=[]]
else:
return tunnel.pid # depends on [control=['except'], data=[]]
else:
if failed or password is None:
raise RuntimeError(_('Could not connect to remote host')) # depends on [control=['if'], data=[]]
# TODO: Use this block when pyzmq bug #620 is fixed
# # Prompt a passphrase dialog to the user for a second attempt
# password, ok = QInputDialog.getText(self, _('Password'),
# _('Enter password for: ') + server,
# echo=QLineEdit.Password)
# if ok is False:
# raise RuntimeError('Could not connect to remote host.')
tunnel.sendline(password)
failed = True # depends on [control=['while'], data=[]]
|
def list_workspaces(self,
page_limit=None,
include_count=None,
sort=None,
cursor=None,
include_audit=None,
**kwargs):
"""
List workspaces.
List the workspaces associated with a Watson Assistant service instance.
This operation is limited to 500 requests per 30 minutes. For more information,
see **Rate limiting**.
:param int page_limit: The number of records to return in each page of results.
:param bool include_count: Whether to include information about the number of
records returned.
:param str sort: The attribute by which returned workspaces will be sorted. To
reverse the sort order, prefix the value with a minus sign (`-`).
:param str cursor: A token identifying the page of results to retrieve.
:param bool include_audit: Whether to include the audit properties (`created` and
`updated` timestamps) in the response.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
sdk_headers = get_sdk_headers('conversation', 'V1', 'list_workspaces')
headers.update(sdk_headers)
params = {
'version': self.version,
'page_limit': page_limit,
'include_count': include_count,
'sort': sort,
'cursor': cursor,
'include_audit': include_audit
}
url = '/v1/workspaces'
response = self.request(
method='GET',
url=url,
headers=headers,
params=params,
accept_json=True)
return response
|
def function[list_workspaces, parameter[self, page_limit, include_count, sort, cursor, include_audit]]:
constant[
List workspaces.
List the workspaces associated with a Watson Assistant service instance.
This operation is limited to 500 requests per 30 minutes. For more information,
see **Rate limiting**.
:param int page_limit: The number of records to return in each page of results.
:param bool include_count: Whether to include information about the number of
records returned.
:param str sort: The attribute by which returned workspaces will be sorted. To
reverse the sort order, prefix the value with a minus sign (`-`).
:param str cursor: A token identifying the page of results to retrieve.
:param bool include_audit: Whether to include the audit properties (`created` and
`updated` timestamps) in the response.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
]
variable[headers] assign[=] dictionary[[], []]
if compare[constant[headers] in name[kwargs]] begin[:]
call[name[headers].update, parameter[call[name[kwargs].get, parameter[constant[headers]]]]]
variable[sdk_headers] assign[=] call[name[get_sdk_headers], parameter[constant[conversation], constant[V1], constant[list_workspaces]]]
call[name[headers].update, parameter[name[sdk_headers]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18bcca680>, <ast.Constant object at 0x7da18bcc81c0>, <ast.Constant object at 0x7da18bcca2c0>, <ast.Constant object at 0x7da18bcc82b0>, <ast.Constant object at 0x7da18bcc89a0>, <ast.Constant object at 0x7da18bccab00>], [<ast.Attribute object at 0x7da18bcc9b40>, <ast.Name object at 0x7da18bcc9420>, <ast.Name object at 0x7da18bcc8910>, <ast.Name object at 0x7da18bcc8190>, <ast.Name object at 0x7da18bcc9a50>, <ast.Name object at 0x7da18bccba90>]]
variable[url] assign[=] constant[/v1/workspaces]
variable[response] assign[=] call[name[self].request, parameter[]]
return[name[response]]
|
keyword[def] identifier[list_workspaces] ( identifier[self] ,
identifier[page_limit] = keyword[None] ,
identifier[include_count] = keyword[None] ,
identifier[sort] = keyword[None] ,
identifier[cursor] = keyword[None] ,
identifier[include_audit] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[headers] ={}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[headers] . identifier[update] ( identifier[kwargs] . identifier[get] ( literal[string] ))
identifier[sdk_headers] = identifier[get_sdk_headers] ( literal[string] , literal[string] , literal[string] )
identifier[headers] . identifier[update] ( identifier[sdk_headers] )
identifier[params] ={
literal[string] : identifier[self] . identifier[version] ,
literal[string] : identifier[page_limit] ,
literal[string] : identifier[include_count] ,
literal[string] : identifier[sort] ,
literal[string] : identifier[cursor] ,
literal[string] : identifier[include_audit]
}
identifier[url] = literal[string]
identifier[response] = identifier[self] . identifier[request] (
identifier[method] = literal[string] ,
identifier[url] = identifier[url] ,
identifier[headers] = identifier[headers] ,
identifier[params] = identifier[params] ,
identifier[accept_json] = keyword[True] )
keyword[return] identifier[response]
|
def list_workspaces(self, page_limit=None, include_count=None, sort=None, cursor=None, include_audit=None, **kwargs):
"""
List workspaces.
List the workspaces associated with a Watson Assistant service instance.
This operation is limited to 500 requests per 30 minutes. For more information,
see **Rate limiting**.
:param int page_limit: The number of records to return in each page of results.
:param bool include_count: Whether to include information about the number of
records returned.
:param str sort: The attribute by which returned workspaces will be sorted. To
reverse the sort order, prefix the value with a minus sign (`-`).
:param str cursor: A token identifying the page of results to retrieve.
:param bool include_audit: Whether to include the audit properties (`created` and
`updated` timestamps) in the response.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers')) # depends on [control=['if'], data=['kwargs']]
sdk_headers = get_sdk_headers('conversation', 'V1', 'list_workspaces')
headers.update(sdk_headers)
params = {'version': self.version, 'page_limit': page_limit, 'include_count': include_count, 'sort': sort, 'cursor': cursor, 'include_audit': include_audit}
url = '/v1/workspaces'
response = self.request(method='GET', url=url, headers=headers, params=params, accept_json=True)
return response
|
def comply(self, path):
"""Issues a chown and chmod to the file paths specified."""
utils.ensure_permissions(path, self.user.pw_name, self.group.gr_name,
self.mode)
|
def function[comply, parameter[self, path]]:
constant[Issues a chown and chmod to the file paths specified.]
call[name[utils].ensure_permissions, parameter[name[path], name[self].user.pw_name, name[self].group.gr_name, name[self].mode]]
|
keyword[def] identifier[comply] ( identifier[self] , identifier[path] ):
literal[string]
identifier[utils] . identifier[ensure_permissions] ( identifier[path] , identifier[self] . identifier[user] . identifier[pw_name] , identifier[self] . identifier[group] . identifier[gr_name] ,
identifier[self] . identifier[mode] )
|
def comply(self, path):
"""Issues a chown and chmod to the file paths specified."""
utils.ensure_permissions(path, self.user.pw_name, self.group.gr_name, self.mode)
|
def interrupt_kernel(self):
""" Attempts to interrupt the running kernel.
Also unsets _reading flag, to avoid runtime errors
if raw_input is called again.
"""
if self.custom_interrupt:
self._reading = False
self.custom_interrupt_requested.emit()
elif self.kernel_manager.has_kernel:
self._reading = False
self.kernel_manager.interrupt_kernel()
else:
self._append_plain_text('Kernel process is either remote or '
'unspecified. Cannot interrupt.\n')
|
def function[interrupt_kernel, parameter[self]]:
constant[ Attempts to interrupt the running kernel.
Also unsets _reading flag, to avoid runtime errors
if raw_input is called again.
]
if name[self].custom_interrupt begin[:]
name[self]._reading assign[=] constant[False]
call[name[self].custom_interrupt_requested.emit, parameter[]]
|
keyword[def] identifier[interrupt_kernel] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[custom_interrupt] :
identifier[self] . identifier[_reading] = keyword[False]
identifier[self] . identifier[custom_interrupt_requested] . identifier[emit] ()
keyword[elif] identifier[self] . identifier[kernel_manager] . identifier[has_kernel] :
identifier[self] . identifier[_reading] = keyword[False]
identifier[self] . identifier[kernel_manager] . identifier[interrupt_kernel] ()
keyword[else] :
identifier[self] . identifier[_append_plain_text] ( literal[string]
literal[string] )
|
def interrupt_kernel(self):
""" Attempts to interrupt the running kernel.
Also unsets _reading flag, to avoid runtime errors
if raw_input is called again.
"""
if self.custom_interrupt:
self._reading = False
self.custom_interrupt_requested.emit() # depends on [control=['if'], data=[]]
elif self.kernel_manager.has_kernel:
self._reading = False
self.kernel_manager.interrupt_kernel() # depends on [control=['if'], data=[]]
else:
self._append_plain_text('Kernel process is either remote or unspecified. Cannot interrupt.\n')
|
def construct(cls, project, *, run=None, name=None, data=None, **desc):
"""
Construct an animation, set the runner, and add in the two
"reserved fields" `name` and `data`.
"""
from . failed import Failed
exception = desc.pop('_exception', None)
if exception:
a = Failed(project.layout, desc, exception)
else:
try:
a = cls(project.layout, **desc)
a._set_runner(run or {})
except Exception as e:
if cls.FAIL_ON_EXCEPTION:
raise
a = Failed(project.layout, desc, e)
a.name = name
a.data = data
return a
|
def function[construct, parameter[cls, project]]:
constant[
Construct an animation, set the runner, and add in the two
"reserved fields" `name` and `data`.
]
from relative_module[failed] import module[Failed]
variable[exception] assign[=] call[name[desc].pop, parameter[constant[_exception], constant[None]]]
if name[exception] begin[:]
variable[a] assign[=] call[name[Failed], parameter[name[project].layout, name[desc], name[exception]]]
name[a].name assign[=] name[name]
name[a].data assign[=] name[data]
return[name[a]]
|
keyword[def] identifier[construct] ( identifier[cls] , identifier[project] ,*, identifier[run] = keyword[None] , identifier[name] = keyword[None] , identifier[data] = keyword[None] ,** identifier[desc] ):
literal[string]
keyword[from] . identifier[failed] keyword[import] identifier[Failed]
identifier[exception] = identifier[desc] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[exception] :
identifier[a] = identifier[Failed] ( identifier[project] . identifier[layout] , identifier[desc] , identifier[exception] )
keyword[else] :
keyword[try] :
identifier[a] = identifier[cls] ( identifier[project] . identifier[layout] ,** identifier[desc] )
identifier[a] . identifier[_set_runner] ( identifier[run] keyword[or] {})
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[cls] . identifier[FAIL_ON_EXCEPTION] :
keyword[raise]
identifier[a] = identifier[Failed] ( identifier[project] . identifier[layout] , identifier[desc] , identifier[e] )
identifier[a] . identifier[name] = identifier[name]
identifier[a] . identifier[data] = identifier[data]
keyword[return] identifier[a]
|
def construct(cls, project, *, run=None, name=None, data=None, **desc):
"""
Construct an animation, set the runner, and add in the two
"reserved fields" `name` and `data`.
"""
from .failed import Failed
exception = desc.pop('_exception', None)
if exception:
a = Failed(project.layout, desc, exception) # depends on [control=['if'], data=[]]
else:
try:
a = cls(project.layout, **desc)
a._set_runner(run or {}) # depends on [control=['try'], data=[]]
except Exception as e:
if cls.FAIL_ON_EXCEPTION:
raise # depends on [control=['if'], data=[]]
a = Failed(project.layout, desc, e) # depends on [control=['except'], data=['e']]
a.name = name
a.data = data
return a
|
def fitNull(self, verbose=False, cache=False, out_dir='./cache', fname=None, rewrite=False, seed=None, n_times=10, factr=1e3, init_method=None):
"""
Fit null model
"""
if seed is not None: sp.random.seed(seed)
read_from_file = False
if cache:
assert fname is not None, 'MultiTraitSetTest:: specify fname'
if not os.path.exists(out_dir):
os.makedirs(out_dir)
out_file = os.path.join(out_dir,fname)
read_from_file = os.path.exists(out_file) and not rewrite
RV = {}
if read_from_file:
f = h5py.File(out_file,'r')
for key in list(f.keys()):
RV[key] = f[key][:]
f.close()
self.setNull(RV)
else:
start = TIME.time()
if self.bgRE:
self._gpNull = GP2KronSum(Y=self.Y, F=None, A=None, Cg=self.Cg, Cn=self.Cn, R=None, S_R=self.S_R, U_R=self.U_R)
else:
self._gpNull = GP2KronSumLR(self.Y, self.Cn, G=sp.ones((self.N,1)), F=self.F, A=self.A)
# freezes Cg to 0
n_params = self._gpNull.covar.Cr.getNumberParams()
self._gpNull.covar.Cr.setParams(1e-9 * sp.ones(n_params))
self._gpNull.covar.act_Cr = False
for i in range(n_times):
params0 = self._initParams(init_method=init_method)
self._gpNull.setParams(params0)
conv, info = self._gpNull.optimize(verbose=verbose, factr=factr)
if conv: break
if not conv: warnings.warn("not converged")
LMLgrad = (self._gpNull.LML_grad()['covar']**2).mean()
LML = self._gpNull.LML()
if self._gpNull.mean.n_terms==1:
RV['B'] = self._gpNull.mean.B[0]
elif self._gpNull.mean.n_terms>1:
warning.warn('generalize to more than 1 fixed effect term')
if self.bgRE:
RV['params0_g'] = self.Cg.getParams()
else:
RV['params0_g'] = sp.zeros_like(self.Cn.getParams())
RV['params0_n'] = self.Cn.getParams()
if self.bgRE:
RV['Cg'] = self.Cg.K()
else:
RV['Cg'] = sp.zeros_like(self.Cn.K())
RV['Cn'] = self.Cn.K()
RV['conv'] = sp.array([conv])
RV['time'] = sp.array([TIME.time()-start])
RV['NLL0'] = sp.array([LML])
RV['LMLgrad'] = sp.array([LMLgrad])
RV['nit'] = sp.array([info['nit']])
RV['funcalls'] = sp.array([info['funcalls']])
self.null = RV
if cache:
f = h5py.File(out_file,'w')
smartDumpDictHdf5(RV,f)
f.close()
return RV
|
def function[fitNull, parameter[self, verbose, cache, out_dir, fname, rewrite, seed, n_times, factr, init_method]]:
constant[
Fit null model
]
if compare[name[seed] is_not constant[None]] begin[:]
call[name[sp].random.seed, parameter[name[seed]]]
variable[read_from_file] assign[=] constant[False]
if name[cache] begin[:]
assert[compare[name[fname] is_not constant[None]]]
if <ast.UnaryOp object at 0x7da204963c40> begin[:]
call[name[os].makedirs, parameter[name[out_dir]]]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], name[fname]]]
variable[read_from_file] assign[=] <ast.BoolOp object at 0x7da204961a80>
variable[RV] assign[=] dictionary[[], []]
if name[read_from_file] begin[:]
variable[f] assign[=] call[name[h5py].File, parameter[name[out_file], constant[r]]]
for taget[name[key]] in starred[call[name[list], parameter[call[name[f].keys, parameter[]]]]] begin[:]
call[name[RV]][name[key]] assign[=] call[call[name[f]][name[key]]][<ast.Slice object at 0x7da204963100>]
call[name[f].close, parameter[]]
call[name[self].setNull, parameter[name[RV]]]
return[name[RV]]
|
keyword[def] identifier[fitNull] ( identifier[self] , identifier[verbose] = keyword[False] , identifier[cache] = keyword[False] , identifier[out_dir] = literal[string] , identifier[fname] = keyword[None] , identifier[rewrite] = keyword[False] , identifier[seed] = keyword[None] , identifier[n_times] = literal[int] , identifier[factr] = literal[int] , identifier[init_method] = keyword[None] ):
literal[string]
keyword[if] identifier[seed] keyword[is] keyword[not] keyword[None] : identifier[sp] . identifier[random] . identifier[seed] ( identifier[seed] )
identifier[read_from_file] = keyword[False]
keyword[if] identifier[cache] :
keyword[assert] identifier[fname] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[out_dir] ):
identifier[os] . identifier[makedirs] ( identifier[out_dir] )
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[fname] )
identifier[read_from_file] = identifier[os] . identifier[path] . identifier[exists] ( identifier[out_file] ) keyword[and] keyword[not] identifier[rewrite]
identifier[RV] ={}
keyword[if] identifier[read_from_file] :
identifier[f] = identifier[h5py] . identifier[File] ( identifier[out_file] , literal[string] )
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[f] . identifier[keys] ()):
identifier[RV] [ identifier[key] ]= identifier[f] [ identifier[key] ][:]
identifier[f] . identifier[close] ()
identifier[self] . identifier[setNull] ( identifier[RV] )
keyword[else] :
identifier[start] = identifier[TIME] . identifier[time] ()
keyword[if] identifier[self] . identifier[bgRE] :
identifier[self] . identifier[_gpNull] = identifier[GP2KronSum] ( identifier[Y] = identifier[self] . identifier[Y] , identifier[F] = keyword[None] , identifier[A] = keyword[None] , identifier[Cg] = identifier[self] . identifier[Cg] , identifier[Cn] = identifier[self] . identifier[Cn] , identifier[R] = keyword[None] , identifier[S_R] = identifier[self] . identifier[S_R] , identifier[U_R] = identifier[self] . identifier[U_R] )
keyword[else] :
identifier[self] . identifier[_gpNull] = identifier[GP2KronSumLR] ( identifier[self] . identifier[Y] , identifier[self] . identifier[Cn] , identifier[G] = identifier[sp] . identifier[ones] (( identifier[self] . identifier[N] , literal[int] )), identifier[F] = identifier[self] . identifier[F] , identifier[A] = identifier[self] . identifier[A] )
identifier[n_params] = identifier[self] . identifier[_gpNull] . identifier[covar] . identifier[Cr] . identifier[getNumberParams] ()
identifier[self] . identifier[_gpNull] . identifier[covar] . identifier[Cr] . identifier[setParams] ( literal[int] * identifier[sp] . identifier[ones] ( identifier[n_params] ))
identifier[self] . identifier[_gpNull] . identifier[covar] . identifier[act_Cr] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_times] ):
identifier[params0] = identifier[self] . identifier[_initParams] ( identifier[init_method] = identifier[init_method] )
identifier[self] . identifier[_gpNull] . identifier[setParams] ( identifier[params0] )
identifier[conv] , identifier[info] = identifier[self] . identifier[_gpNull] . identifier[optimize] ( identifier[verbose] = identifier[verbose] , identifier[factr] = identifier[factr] )
keyword[if] identifier[conv] : keyword[break]
keyword[if] keyword[not] identifier[conv] : identifier[warnings] . identifier[warn] ( literal[string] )
identifier[LMLgrad] =( identifier[self] . identifier[_gpNull] . identifier[LML_grad] ()[ literal[string] ]** literal[int] ). identifier[mean] ()
identifier[LML] = identifier[self] . identifier[_gpNull] . identifier[LML] ()
keyword[if] identifier[self] . identifier[_gpNull] . identifier[mean] . identifier[n_terms] == literal[int] :
identifier[RV] [ literal[string] ]= identifier[self] . identifier[_gpNull] . identifier[mean] . identifier[B] [ literal[int] ]
keyword[elif] identifier[self] . identifier[_gpNull] . identifier[mean] . identifier[n_terms] > literal[int] :
identifier[warning] . identifier[warn] ( literal[string] )
keyword[if] identifier[self] . identifier[bgRE] :
identifier[RV] [ literal[string] ]= identifier[self] . identifier[Cg] . identifier[getParams] ()
keyword[else] :
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[zeros_like] ( identifier[self] . identifier[Cn] . identifier[getParams] ())
identifier[RV] [ literal[string] ]= identifier[self] . identifier[Cn] . identifier[getParams] ()
keyword[if] identifier[self] . identifier[bgRE] :
identifier[RV] [ literal[string] ]= identifier[self] . identifier[Cg] . identifier[K] ()
keyword[else] :
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[zeros_like] ( identifier[self] . identifier[Cn] . identifier[K] ())
identifier[RV] [ literal[string] ]= identifier[self] . identifier[Cn] . identifier[K] ()
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[array] ([ identifier[conv] ])
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[array] ([ identifier[TIME] . identifier[time] ()- identifier[start] ])
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[array] ([ identifier[LML] ])
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[array] ([ identifier[LMLgrad] ])
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[array] ([ identifier[info] [ literal[string] ]])
identifier[RV] [ literal[string] ]= identifier[sp] . identifier[array] ([ identifier[info] [ literal[string] ]])
identifier[self] . identifier[null] = identifier[RV]
keyword[if] identifier[cache] :
identifier[f] = identifier[h5py] . identifier[File] ( identifier[out_file] , literal[string] )
identifier[smartDumpDictHdf5] ( identifier[RV] , identifier[f] )
identifier[f] . identifier[close] ()
keyword[return] identifier[RV]
|
def fitNull(self, verbose=False, cache=False, out_dir='./cache', fname=None, rewrite=False, seed=None, n_times=10, factr=1000.0, init_method=None):
"""
Fit null model
"""
if seed is not None:
sp.random.seed(seed) # depends on [control=['if'], data=['seed']]
read_from_file = False
if cache:
assert fname is not None, 'MultiTraitSetTest:: specify fname'
if not os.path.exists(out_dir):
os.makedirs(out_dir) # depends on [control=['if'], data=[]]
out_file = os.path.join(out_dir, fname)
read_from_file = os.path.exists(out_file) and (not rewrite) # depends on [control=['if'], data=[]]
RV = {}
if read_from_file:
f = h5py.File(out_file, 'r')
for key in list(f.keys()):
RV[key] = f[key][:] # depends on [control=['for'], data=['key']]
f.close()
self.setNull(RV) # depends on [control=['if'], data=[]]
else:
start = TIME.time()
if self.bgRE:
self._gpNull = GP2KronSum(Y=self.Y, F=None, A=None, Cg=self.Cg, Cn=self.Cn, R=None, S_R=self.S_R, U_R=self.U_R) # depends on [control=['if'], data=[]]
else:
self._gpNull = GP2KronSumLR(self.Y, self.Cn, G=sp.ones((self.N, 1)), F=self.F, A=self.A)
# freezes Cg to 0
n_params = self._gpNull.covar.Cr.getNumberParams()
self._gpNull.covar.Cr.setParams(1e-09 * sp.ones(n_params))
self._gpNull.covar.act_Cr = False
for i in range(n_times):
params0 = self._initParams(init_method=init_method)
self._gpNull.setParams(params0)
(conv, info) = self._gpNull.optimize(verbose=verbose, factr=factr)
if conv:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not conv:
warnings.warn('not converged') # depends on [control=['if'], data=[]]
LMLgrad = (self._gpNull.LML_grad()['covar'] ** 2).mean()
LML = self._gpNull.LML()
if self._gpNull.mean.n_terms == 1:
RV['B'] = self._gpNull.mean.B[0] # depends on [control=['if'], data=[]]
elif self._gpNull.mean.n_terms > 1:
warning.warn('generalize to more than 1 fixed effect term') # depends on [control=['if'], data=[]]
if self.bgRE:
RV['params0_g'] = self.Cg.getParams() # depends on [control=['if'], data=[]]
else:
RV['params0_g'] = sp.zeros_like(self.Cn.getParams())
RV['params0_n'] = self.Cn.getParams()
if self.bgRE:
RV['Cg'] = self.Cg.K() # depends on [control=['if'], data=[]]
else:
RV['Cg'] = sp.zeros_like(self.Cn.K())
RV['Cn'] = self.Cn.K()
RV['conv'] = sp.array([conv])
RV['time'] = sp.array([TIME.time() - start])
RV['NLL0'] = sp.array([LML])
RV['LMLgrad'] = sp.array([LMLgrad])
RV['nit'] = sp.array([info['nit']])
RV['funcalls'] = sp.array([info['funcalls']])
self.null = RV
if cache:
f = h5py.File(out_file, 'w')
smartDumpDictHdf5(RV, f)
f.close() # depends on [control=['if'], data=[]]
return RV
|
def fnmatchcase(name, pat):
"""Test whether FILENAME matches PATTERN, including case.
This is a version of fnmatch() which doesn't case-normalize
its arguments.
"""
try:
re_pat = _cache[pat]
except KeyError:
res = translate(pat)
if len(_cache) >= _MAXCACHE:
# _cache.clear()
globals()['_cache'] = {}
_cache[pat] = re_pat = re.compile(res)
return re_pat.match(name) is not None
|
def function[fnmatchcase, parameter[name, pat]]:
constant[Test whether FILENAME matches PATTERN, including case.
This is a version of fnmatch() which doesn't case-normalize
its arguments.
]
<ast.Try object at 0x7da1b2345d80>
return[compare[call[name[re_pat].match, parameter[name[name]]] is_not constant[None]]]
|
keyword[def] identifier[fnmatchcase] ( identifier[name] , identifier[pat] ):
literal[string]
keyword[try] :
identifier[re_pat] = identifier[_cache] [ identifier[pat] ]
keyword[except] identifier[KeyError] :
identifier[res] = identifier[translate] ( identifier[pat] )
keyword[if] identifier[len] ( identifier[_cache] )>= identifier[_MAXCACHE] :
identifier[globals] ()[ literal[string] ]={}
identifier[_cache] [ identifier[pat] ]= identifier[re_pat] = identifier[re] . identifier[compile] ( identifier[res] )
keyword[return] identifier[re_pat] . identifier[match] ( identifier[name] ) keyword[is] keyword[not] keyword[None]
|
def fnmatchcase(name, pat):
"""Test whether FILENAME matches PATTERN, including case.
This is a version of fnmatch() which doesn't case-normalize
its arguments.
"""
try:
re_pat = _cache[pat] # depends on [control=['try'], data=[]]
except KeyError:
res = translate(pat)
if len(_cache) >= _MAXCACHE:
# _cache.clear()
globals()['_cache'] = {} # depends on [control=['if'], data=[]]
_cache[pat] = re_pat = re.compile(res) # depends on [control=['except'], data=[]]
return re_pat.match(name) is not None
|
def _get_raw_specs(self, config):
"""
This method extract only the "Validate.spec" from
modules that were collected by ConfigReader._get_modules().
And, this method append "Validate.spec" to raw_specs.
This method creates a dictionary like the following:
raw_specs = {
'redis': (
"[redis]",
"host = string(default='127.0.0.1')",
"port = integer(0, 65535, default=6379)",
"db = integer(default=0)",
"charset = string(default='utf-8')",
"password = string(default=None)"
),
...
}
raw_specs is used by ConfigReader._create_specs().
"""
# spec_name is hard-corded
raw_specs = {}
spec_name = 'Validator'
modules = self._get_modules()
for section, options in config.items():
if section == 'global':
continue
try:
name = options['module']
except KeyError:
raise ConfigMissingValue(section, 'module')
try:
spec = getattr(modules[name], spec_name)().spec
raw_specs[name] = spec
except KeyError:
raise NotSupportedError(name)
return raw_specs
|
def function[_get_raw_specs, parameter[self, config]]:
constant[
This method extract only the "Validate.spec" from
modules that were collected by ConfigReader._get_modules().
And, this method append "Validate.spec" to raw_specs.
This method creates a dictionary like the following:
raw_specs = {
'redis': (
"[redis]",
"host = string(default='127.0.0.1')",
"port = integer(0, 65535, default=6379)",
"db = integer(default=0)",
"charset = string(default='utf-8')",
"password = string(default=None)"
),
...
}
raw_specs is used by ConfigReader._create_specs().
]
variable[raw_specs] assign[=] dictionary[[], []]
variable[spec_name] assign[=] constant[Validator]
variable[modules] assign[=] call[name[self]._get_modules, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0a61000>, <ast.Name object at 0x7da1b0a60d60>]]] in starred[call[name[config].items, parameter[]]] begin[:]
if compare[name[section] equal[==] constant[global]] begin[:]
continue
<ast.Try object at 0x7da1b0a61480>
<ast.Try object at 0x7da1b0a63850>
return[name[raw_specs]]
|
keyword[def] identifier[_get_raw_specs] ( identifier[self] , identifier[config] ):
literal[string]
identifier[raw_specs] ={}
identifier[spec_name] = literal[string]
identifier[modules] = identifier[self] . identifier[_get_modules] ()
keyword[for] identifier[section] , identifier[options] keyword[in] identifier[config] . identifier[items] ():
keyword[if] identifier[section] == literal[string] :
keyword[continue]
keyword[try] :
identifier[name] = identifier[options] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ConfigMissingValue] ( identifier[section] , literal[string] )
keyword[try] :
identifier[spec] = identifier[getattr] ( identifier[modules] [ identifier[name] ], identifier[spec_name] )(). identifier[spec]
identifier[raw_specs] [ identifier[name] ]= identifier[spec]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[NotSupportedError] ( identifier[name] )
keyword[return] identifier[raw_specs]
|
def _get_raw_specs(self, config):
"""
This method extract only the "Validate.spec" from
modules that were collected by ConfigReader._get_modules().
And, this method append "Validate.spec" to raw_specs.
This method creates a dictionary like the following:
raw_specs = {
'redis': (
"[redis]",
"host = string(default='127.0.0.1')",
"port = integer(0, 65535, default=6379)",
"db = integer(default=0)",
"charset = string(default='utf-8')",
"password = string(default=None)"
),
...
}
raw_specs is used by ConfigReader._create_specs().
"""
# spec_name is hard-corded
raw_specs = {}
spec_name = 'Validator'
modules = self._get_modules()
for (section, options) in config.items():
if section == 'global':
continue # depends on [control=['if'], data=[]]
try:
name = options['module'] # depends on [control=['try'], data=[]]
except KeyError:
raise ConfigMissingValue(section, 'module') # depends on [control=['except'], data=[]]
try:
spec = getattr(modules[name], spec_name)().spec
raw_specs[name] = spec # depends on [control=['try'], data=[]]
except KeyError:
raise NotSupportedError(name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
return raw_specs
|
def _plot_variability(ts, variability, threshold=None, epochs=None):
"""Plot the timeseries and variability. Optionally plot epochs."""
import matplotlib.style
import matplotlib as mpl
mpl.style.use('classic')
import matplotlib.pyplot as plt
if variability.ndim is 1:
variability = variability[:, np.newaxis, np.newaxis]
elif variability.ndim is 2:
variability = variability[:, np.newaxis, :]
vmeasures = variability.shape[2]
channels = ts.shape[1]
dt = (1.0*ts.tspan[-1] - ts.tspan[0]) / (len(ts) - 1)
fig = plt.figure()
ylabelprops = dict(rotation=0,
horizontalalignment='right',
verticalalignment='center',
x=-0.01)
for i in range(channels):
rect = (0.1, 0.85*(channels - i - 1)/channels + 0.1,
0.8, 0.85/channels)
axprops = dict()
if channels > 10:
axprops['yticks'] = []
ax = fig.add_axes(rect, **axprops)
ax.plot(ts.tspan, ts[:, i])
if ts.labels[1] is None:
ax.set_ylabel(u'channel %d' % i, **ylabelprops)
else:
ax.set_ylabel(ts.labels[1][i], **ylabelprops)
plt.setp(ax.get_xticklabels(), visible=False)
if i is channels - 1:
plt.setp(ax.get_xticklabels(), visible=True)
ax.set_xlabel('time (s)')
ax2 = ax.twinx()
if vmeasures > 1:
mean_v = np.nanmean(variability[:, i, :], axis=1)
ax2.plot(ts.tspan, mean_v, color='g')
colors = _get_color_list()
for j in range(vmeasures):
ax2.plot(ts.tspan, variability[:, i, j], linestyle='dotted',
color=colors[(3 + j) % len(colors)])
if i is 0:
ax2.legend(['variability (mean)'] +
['variability %d' % j for j in range(vmeasures)],
loc='best')
else:
ax2.plot(ts.tspan, variability[:, i, 0])
ax2.legend(('variability',), loc='best')
if threshold is not None:
ax2.axhline(y=threshold, color='Gray', linestyle='dashed')
ax2.set_ylabel('variability')
ymin = np.nanmin(ts[:, i])
ymax = np.nanmax(ts[:, i])
tstart = ts.tspan[0]
if epochs:
# highlight epochs using rectangular patches
for e in epochs[i]:
t1 = tstart + (e[0] - 1) * dt
ax.add_patch(mpl.patches.Rectangle(
(t1, ymin), (e[1] - e[0])*dt, ymax - ymin, alpha=0.2,
color='green', ec='none'))
fig.axes[0].set_title(u'variability (threshold = %g)' % threshold)
fig.show()
|
def function[_plot_variability, parameter[ts, variability, threshold, epochs]]:
constant[Plot the timeseries and variability. Optionally plot epochs.]
import module[matplotlib.style]
import module[matplotlib] as alias[mpl]
call[name[mpl].style.use, parameter[constant[classic]]]
import module[matplotlib.pyplot] as alias[plt]
if compare[name[variability].ndim is constant[1]] begin[:]
variable[variability] assign[=] call[name[variability]][tuple[[<ast.Slice object at 0x7da18bccb970>, <ast.Attribute object at 0x7da18bcc9f30>, <ast.Attribute object at 0x7da18bcca0b0>]]]
variable[vmeasures] assign[=] call[name[variability].shape][constant[2]]
variable[channels] assign[=] call[name[ts].shape][constant[1]]
variable[dt] assign[=] binary_operation[binary_operation[binary_operation[constant[1.0] * call[name[ts].tspan][<ast.UnaryOp object at 0x7da18bcc86a0>]] - call[name[ts].tspan][constant[0]]] / binary_operation[call[name[len], parameter[name[ts]]] - constant[1]]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ylabelprops] assign[=] call[name[dict], parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[name[channels]]]] begin[:]
variable[rect] assign[=] tuple[[<ast.Constant object at 0x7da18bcc8520>, <ast.BinOp object at 0x7da18bcc8910>, <ast.Constant object at 0x7da18bcca2f0>, <ast.BinOp object at 0x7da18bccb580>]]
variable[axprops] assign[=] call[name[dict], parameter[]]
if compare[name[channels] greater[>] constant[10]] begin[:]
call[name[axprops]][constant[yticks]] assign[=] list[[]]
variable[ax] assign[=] call[name[fig].add_axes, parameter[name[rect]]]
call[name[ax].plot, parameter[name[ts].tspan, call[name[ts]][tuple[[<ast.Slice object at 0x7da18bcc9360>, <ast.Name object at 0x7da18bcca020>]]]]]
if compare[call[name[ts].labels][constant[1]] is constant[None]] begin[:]
call[name[ax].set_ylabel, parameter[binary_operation[constant[channel %d] <ast.Mod object at 0x7da2590d6920> name[i]]]]
call[name[plt].setp, parameter[call[name[ax].get_xticklabels, parameter[]]]]
if compare[name[i] is binary_operation[name[channels] - constant[1]]] begin[:]
call[name[plt].setp, parameter[call[name[ax].get_xticklabels, parameter[]]]]
call[name[ax].set_xlabel, parameter[constant[time (s)]]]
variable[ax2] assign[=] call[name[ax].twinx, parameter[]]
if compare[name[vmeasures] greater[>] constant[1]] begin[:]
variable[mean_v] assign[=] call[name[np].nanmean, parameter[call[name[variability]][tuple[[<ast.Slice object at 0x7da18bcc9120>, <ast.Name object at 0x7da18bccb2b0>, <ast.Slice object at 0x7da18bcc87c0>]]]]]
call[name[ax2].plot, parameter[name[ts].tspan, name[mean_v]]]
variable[colors] assign[=] call[name[_get_color_list], parameter[]]
for taget[name[j]] in starred[call[name[range], parameter[name[vmeasures]]]] begin[:]
call[name[ax2].plot, parameter[name[ts].tspan, call[name[variability]][tuple[[<ast.Slice object at 0x7da18bcc8f70>, <ast.Name object at 0x7da18bcca860>, <ast.Name object at 0x7da18bccabf0>]]]]]
if compare[name[i] is constant[0]] begin[:]
call[name[ax2].legend, parameter[binary_operation[list[[<ast.Constant object at 0x7da18bcc8af0>]] + <ast.ListComp object at 0x7da18bccb940>]]]
if compare[name[threshold] is_not constant[None]] begin[:]
call[name[ax2].axhline, parameter[]]
call[name[ax2].set_ylabel, parameter[constant[variability]]]
variable[ymin] assign[=] call[name[np].nanmin, parameter[call[name[ts]][tuple[[<ast.Slice object at 0x7da204620f10>, <ast.Name object at 0x7da2046207f0>]]]]]
variable[ymax] assign[=] call[name[np].nanmax, parameter[call[name[ts]][tuple[[<ast.Slice object at 0x7da204623310>, <ast.Name object at 0x7da2046200a0>]]]]]
variable[tstart] assign[=] call[name[ts].tspan][constant[0]]
if name[epochs] begin[:]
for taget[name[e]] in starred[call[name[epochs]][name[i]]] begin[:]
variable[t1] assign[=] binary_operation[name[tstart] + binary_operation[binary_operation[call[name[e]][constant[0]] - constant[1]] * name[dt]]]
call[name[ax].add_patch, parameter[call[name[mpl].patches.Rectangle, parameter[tuple[[<ast.Name object at 0x7da204623ee0>, <ast.Name object at 0x7da204621600>]], binary_operation[binary_operation[call[name[e]][constant[1]] - call[name[e]][constant[0]]] * name[dt]], binary_operation[name[ymax] - name[ymin]]]]]]
call[call[name[fig].axes][constant[0]].set_title, parameter[binary_operation[constant[variability (threshold = %g)] <ast.Mod object at 0x7da2590d6920> name[threshold]]]]
call[name[fig].show, parameter[]]
|
keyword[def] identifier[_plot_variability] ( identifier[ts] , identifier[variability] , identifier[threshold] = keyword[None] , identifier[epochs] = keyword[None] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[style]
keyword[import] identifier[matplotlib] keyword[as] identifier[mpl]
identifier[mpl] . identifier[style] . identifier[use] ( literal[string] )
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[if] identifier[variability] . identifier[ndim] keyword[is] literal[int] :
identifier[variability] = identifier[variability] [:, identifier[np] . identifier[newaxis] , identifier[np] . identifier[newaxis] ]
keyword[elif] identifier[variability] . identifier[ndim] keyword[is] literal[int] :
identifier[variability] = identifier[variability] [:, identifier[np] . identifier[newaxis] ,:]
identifier[vmeasures] = identifier[variability] . identifier[shape] [ literal[int] ]
identifier[channels] = identifier[ts] . identifier[shape] [ literal[int] ]
identifier[dt] =( literal[int] * identifier[ts] . identifier[tspan] [- literal[int] ]- identifier[ts] . identifier[tspan] [ literal[int] ])/( identifier[len] ( identifier[ts] )- literal[int] )
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ylabelprops] = identifier[dict] ( identifier[rotation] = literal[int] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[x] =- literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[channels] ):
identifier[rect] =( literal[int] , literal[int] *( identifier[channels] - identifier[i] - literal[int] )/ identifier[channels] + literal[int] ,
literal[int] , literal[int] / identifier[channels] )
identifier[axprops] = identifier[dict] ()
keyword[if] identifier[channels] > literal[int] :
identifier[axprops] [ literal[string] ]=[]
identifier[ax] = identifier[fig] . identifier[add_axes] ( identifier[rect] ,** identifier[axprops] )
identifier[ax] . identifier[plot] ( identifier[ts] . identifier[tspan] , identifier[ts] [:, identifier[i] ])
keyword[if] identifier[ts] . identifier[labels] [ literal[int] ] keyword[is] keyword[None] :
identifier[ax] . identifier[set_ylabel] ( literal[string] % identifier[i] ,** identifier[ylabelprops] )
keyword[else] :
identifier[ax] . identifier[set_ylabel] ( identifier[ts] . identifier[labels] [ literal[int] ][ identifier[i] ],** identifier[ylabelprops] )
identifier[plt] . identifier[setp] ( identifier[ax] . identifier[get_xticklabels] (), identifier[visible] = keyword[False] )
keyword[if] identifier[i] keyword[is] identifier[channels] - literal[int] :
identifier[plt] . identifier[setp] ( identifier[ax] . identifier[get_xticklabels] (), identifier[visible] = keyword[True] )
identifier[ax] . identifier[set_xlabel] ( literal[string] )
identifier[ax2] = identifier[ax] . identifier[twinx] ()
keyword[if] identifier[vmeasures] > literal[int] :
identifier[mean_v] = identifier[np] . identifier[nanmean] ( identifier[variability] [:, identifier[i] ,:], identifier[axis] = literal[int] )
identifier[ax2] . identifier[plot] ( identifier[ts] . identifier[tspan] , identifier[mean_v] , identifier[color] = literal[string] )
identifier[colors] = identifier[_get_color_list] ()
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[vmeasures] ):
identifier[ax2] . identifier[plot] ( identifier[ts] . identifier[tspan] , identifier[variability] [:, identifier[i] , identifier[j] ], identifier[linestyle] = literal[string] ,
identifier[color] = identifier[colors] [( literal[int] + identifier[j] )% identifier[len] ( identifier[colors] )])
keyword[if] identifier[i] keyword[is] literal[int] :
identifier[ax2] . identifier[legend] ([ literal[string] ]+
[ literal[string] % identifier[j] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[vmeasures] )],
identifier[loc] = literal[string] )
keyword[else] :
identifier[ax2] . identifier[plot] ( identifier[ts] . identifier[tspan] , identifier[variability] [:, identifier[i] , literal[int] ])
identifier[ax2] . identifier[legend] (( literal[string] ,), identifier[loc] = literal[string] )
keyword[if] identifier[threshold] keyword[is] keyword[not] keyword[None] :
identifier[ax2] . identifier[axhline] ( identifier[y] = identifier[threshold] , identifier[color] = literal[string] , identifier[linestyle] = literal[string] )
identifier[ax2] . identifier[set_ylabel] ( literal[string] )
identifier[ymin] = identifier[np] . identifier[nanmin] ( identifier[ts] [:, identifier[i] ])
identifier[ymax] = identifier[np] . identifier[nanmax] ( identifier[ts] [:, identifier[i] ])
identifier[tstart] = identifier[ts] . identifier[tspan] [ literal[int] ]
keyword[if] identifier[epochs] :
keyword[for] identifier[e] keyword[in] identifier[epochs] [ identifier[i] ]:
identifier[t1] = identifier[tstart] +( identifier[e] [ literal[int] ]- literal[int] )* identifier[dt]
identifier[ax] . identifier[add_patch] ( identifier[mpl] . identifier[patches] . identifier[Rectangle] (
( identifier[t1] , identifier[ymin] ),( identifier[e] [ literal[int] ]- identifier[e] [ literal[int] ])* identifier[dt] , identifier[ymax] - identifier[ymin] , identifier[alpha] = literal[int] ,
identifier[color] = literal[string] , identifier[ec] = literal[string] ))
identifier[fig] . identifier[axes] [ literal[int] ]. identifier[set_title] ( literal[string] % identifier[threshold] )
identifier[fig] . identifier[show] ()
|
def _plot_variability(ts, variability, threshold=None, epochs=None):
"""Plot the timeseries and variability. Optionally plot epochs."""
import matplotlib.style
import matplotlib as mpl
mpl.style.use('classic')
import matplotlib.pyplot as plt
if variability.ndim is 1:
variability = variability[:, np.newaxis, np.newaxis] # depends on [control=['if'], data=[]]
elif variability.ndim is 2:
variability = variability[:, np.newaxis, :] # depends on [control=['if'], data=[]]
vmeasures = variability.shape[2]
channels = ts.shape[1]
dt = (1.0 * ts.tspan[-1] - ts.tspan[0]) / (len(ts) - 1)
fig = plt.figure()
ylabelprops = dict(rotation=0, horizontalalignment='right', verticalalignment='center', x=-0.01)
for i in range(channels):
rect = (0.1, 0.85 * (channels - i - 1) / channels + 0.1, 0.8, 0.85 / channels)
axprops = dict()
if channels > 10:
axprops['yticks'] = [] # depends on [control=['if'], data=[]]
ax = fig.add_axes(rect, **axprops)
ax.plot(ts.tspan, ts[:, i])
if ts.labels[1] is None:
ax.set_ylabel(u'channel %d' % i, **ylabelprops) # depends on [control=['if'], data=[]]
else:
ax.set_ylabel(ts.labels[1][i], **ylabelprops)
plt.setp(ax.get_xticklabels(), visible=False)
if i is channels - 1:
plt.setp(ax.get_xticklabels(), visible=True)
ax.set_xlabel('time (s)') # depends on [control=['if'], data=[]]
ax2 = ax.twinx()
if vmeasures > 1:
mean_v = np.nanmean(variability[:, i, :], axis=1)
ax2.plot(ts.tspan, mean_v, color='g')
colors = _get_color_list()
for j in range(vmeasures):
ax2.plot(ts.tspan, variability[:, i, j], linestyle='dotted', color=colors[(3 + j) % len(colors)]) # depends on [control=['for'], data=['j']]
if i is 0:
ax2.legend(['variability (mean)'] + ['variability %d' % j for j in range(vmeasures)], loc='best') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['vmeasures']]
else:
ax2.plot(ts.tspan, variability[:, i, 0])
ax2.legend(('variability',), loc='best')
if threshold is not None:
ax2.axhline(y=threshold, color='Gray', linestyle='dashed') # depends on [control=['if'], data=['threshold']]
ax2.set_ylabel('variability')
ymin = np.nanmin(ts[:, i])
ymax = np.nanmax(ts[:, i])
tstart = ts.tspan[0]
if epochs:
# highlight epochs using rectangular patches
for e in epochs[i]:
t1 = tstart + (e[0] - 1) * dt
ax.add_patch(mpl.patches.Rectangle((t1, ymin), (e[1] - e[0]) * dt, ymax - ymin, alpha=0.2, color='green', ec='none')) # depends on [control=['for'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
fig.axes[0].set_title(u'variability (threshold = %g)' % threshold)
fig.show()
|
def init_celery(project_name):
""" init celery app without the need of redundant code """
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '%s.settings' % project_name)
app = Celery(project_name)
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
return app
|
def function[init_celery, parameter[project_name]]:
constant[ init celery app without the need of redundant code ]
call[name[os].environ.setdefault, parameter[constant[DJANGO_SETTINGS_MODULE], binary_operation[constant[%s.settings] <ast.Mod object at 0x7da2590d6920> name[project_name]]]]
variable[app] assign[=] call[name[Celery], parameter[name[project_name]]]
call[name[app].config_from_object, parameter[constant[django.conf:settings]]]
call[name[app].autodiscover_tasks, parameter[name[settings].INSTALLED_APPS]]
return[name[app]]
|
keyword[def] identifier[init_celery] ( identifier[project_name] ):
literal[string]
identifier[os] . identifier[environ] . identifier[setdefault] ( literal[string] , literal[string] % identifier[project_name] )
identifier[app] = identifier[Celery] ( identifier[project_name] )
identifier[app] . identifier[config_from_object] ( literal[string] )
identifier[app] . identifier[autodiscover_tasks] ( identifier[settings] . identifier[INSTALLED_APPS] , identifier[related_name] = literal[string] )
keyword[return] identifier[app]
|
def init_celery(project_name):
""" init celery app without the need of redundant code """
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '%s.settings' % project_name)
app = Celery(project_name)
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
return app
|
def __upload(self, resource, bytes):
"""Performs a single chunk upload."""
# note: string conversion required here due to open encoding bug in requests-oauthlib.
headers = {
'x-ton-expires': http_time(self.options.get('x-ton-expires', self._DEFAULT_EXPIRE)),
'content-length': str(self._file_size),
'content-type': self.content_type
}
return Request(self._client, 'post', resource,
domain=self._DEFAULT_DOMAIN, headers=headers, body=bytes).perform()
|
def function[__upload, parameter[self, resource, bytes]]:
constant[Performs a single chunk upload.]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b0656b90>, <ast.Constant object at 0x7da1b0654100>, <ast.Constant object at 0x7da1b0657f10>], [<ast.Call object at 0x7da1b06573d0>, <ast.Call object at 0x7da1b0654d90>, <ast.Attribute object at 0x7da1b0655ba0>]]
return[call[call[name[Request], parameter[name[self]._client, constant[post], name[resource]]].perform, parameter[]]]
|
keyword[def] identifier[__upload] ( identifier[self] , identifier[resource] , identifier[bytes] ):
literal[string]
identifier[headers] ={
literal[string] : identifier[http_time] ( identifier[self] . identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[_DEFAULT_EXPIRE] )),
literal[string] : identifier[str] ( identifier[self] . identifier[_file_size] ),
literal[string] : identifier[self] . identifier[content_type]
}
keyword[return] identifier[Request] ( identifier[self] . identifier[_client] , literal[string] , identifier[resource] ,
identifier[domain] = identifier[self] . identifier[_DEFAULT_DOMAIN] , identifier[headers] = identifier[headers] , identifier[body] = identifier[bytes] ). identifier[perform] ()
|
def __upload(self, resource, bytes):
"""Performs a single chunk upload."""
# note: string conversion required here due to open encoding bug in requests-oauthlib.
headers = {'x-ton-expires': http_time(self.options.get('x-ton-expires', self._DEFAULT_EXPIRE)), 'content-length': str(self._file_size), 'content-type': self.content_type}
return Request(self._client, 'post', resource, domain=self._DEFAULT_DOMAIN, headers=headers, body=bytes).perform()
|
def n_yearly_publications(self, refresh=True):
"""Number of journal publications in a given year."""
pub_years = [int(ab.coverDate.split('-')[0])
for ab in self.get_journal_abstracts(refresh=refresh)]
return Counter(pub_years)
|
def function[n_yearly_publications, parameter[self, refresh]]:
constant[Number of journal publications in a given year.]
variable[pub_years] assign[=] <ast.ListComp object at 0x7da18f00cfd0>
return[call[name[Counter], parameter[name[pub_years]]]]
|
keyword[def] identifier[n_yearly_publications] ( identifier[self] , identifier[refresh] = keyword[True] ):
literal[string]
identifier[pub_years] =[ identifier[int] ( identifier[ab] . identifier[coverDate] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[for] identifier[ab] keyword[in] identifier[self] . identifier[get_journal_abstracts] ( identifier[refresh] = identifier[refresh] )]
keyword[return] identifier[Counter] ( identifier[pub_years] )
|
def n_yearly_publications(self, refresh=True):
"""Number of journal publications in a given year."""
pub_years = [int(ab.coverDate.split('-')[0]) for ab in self.get_journal_abstracts(refresh=refresh)]
return Counter(pub_years)
|
def conf_budget(self, budget):
"""
Set limit on the number of conflicts.
"""
if self.minicard:
pysolvers.minicard_cbudget(self.minicard, budget)
|
def function[conf_budget, parameter[self, budget]]:
constant[
Set limit on the number of conflicts.
]
if name[self].minicard begin[:]
call[name[pysolvers].minicard_cbudget, parameter[name[self].minicard, name[budget]]]
|
keyword[def] identifier[conf_budget] ( identifier[self] , identifier[budget] ):
literal[string]
keyword[if] identifier[self] . identifier[minicard] :
identifier[pysolvers] . identifier[minicard_cbudget] ( identifier[self] . identifier[minicard] , identifier[budget] )
|
def conf_budget(self, budget):
"""
Set limit on the number of conflicts.
"""
if self.minicard:
pysolvers.minicard_cbudget(self.minicard, budget) # depends on [control=['if'], data=[]]
|
def _prepare_args(log_likelihood_fn, state,
log_likelihood=None, description='log_likelihood'):
"""Processes input args to meet list-like assumptions."""
state_parts = list(state) if mcmc_util.is_list_like(state) else [state]
state_parts = [tf.convert_to_tensor(s, name='current_state')
for s in state_parts]
log_likelihood = _maybe_call_fn(
log_likelihood_fn,
state_parts,
log_likelihood,
description)
return [state_parts, log_likelihood]
|
def function[_prepare_args, parameter[log_likelihood_fn, state, log_likelihood, description]]:
constant[Processes input args to meet list-like assumptions.]
variable[state_parts] assign[=] <ast.IfExp object at 0x7da1b03563e0>
variable[state_parts] assign[=] <ast.ListComp object at 0x7da1b0356230>
variable[log_likelihood] assign[=] call[name[_maybe_call_fn], parameter[name[log_likelihood_fn], name[state_parts], name[log_likelihood], name[description]]]
return[list[[<ast.Name object at 0x7da1b0356140>, <ast.Name object at 0x7da1b0356710>]]]
|
keyword[def] identifier[_prepare_args] ( identifier[log_likelihood_fn] , identifier[state] ,
identifier[log_likelihood] = keyword[None] , identifier[description] = literal[string] ):
literal[string]
identifier[state_parts] = identifier[list] ( identifier[state] ) keyword[if] identifier[mcmc_util] . identifier[is_list_like] ( identifier[state] ) keyword[else] [ identifier[state] ]
identifier[state_parts] =[ identifier[tf] . identifier[convert_to_tensor] ( identifier[s] , identifier[name] = literal[string] )
keyword[for] identifier[s] keyword[in] identifier[state_parts] ]
identifier[log_likelihood] = identifier[_maybe_call_fn] (
identifier[log_likelihood_fn] ,
identifier[state_parts] ,
identifier[log_likelihood] ,
identifier[description] )
keyword[return] [ identifier[state_parts] , identifier[log_likelihood] ]
|
def _prepare_args(log_likelihood_fn, state, log_likelihood=None, description='log_likelihood'):
"""Processes input args to meet list-like assumptions."""
state_parts = list(state) if mcmc_util.is_list_like(state) else [state]
state_parts = [tf.convert_to_tensor(s, name='current_state') for s in state_parts]
log_likelihood = _maybe_call_fn(log_likelihood_fn, state_parts, log_likelihood, description)
return [state_parts, log_likelihood]
|
def resample_multipitch(times, frequencies, target_times):
"""Resamples multipitch time series to a new timescale. Values in
``target_times`` outside the range of ``times`` return no pitch estimate.
Parameters
----------
times : np.ndarray
Array of time stamps
frequencies : list of np.ndarray
List of np.ndarrays of frequency values
target_times : np.ndarray
Array of target time stamps
Returns
-------
frequencies_resampled : list of numpy arrays
Frequency list of lists resampled to new timebase
"""
if target_times.size == 0:
return []
if times.size == 0:
return [np.array([])]*len(target_times)
n_times = len(frequencies)
# scipy's interpolate doesn't handle ragged arrays. Instead, we interpolate
# the frequency index and then map back to the frequency values.
# This only works because we're using a nearest neighbor interpolator!
frequency_index = np.arange(0, n_times)
# times are already ordered so assume_sorted=True for efficiency
# since we're interpolating the index, fill_value is set to the first index
# that is out of range. We handle this in the next line.
new_frequency_index = scipy.interpolate.interp1d(
times, frequency_index, kind='nearest', bounds_error=False,
assume_sorted=True, fill_value=n_times)(target_times)
# create array of frequencies plus additional empty element at the end for
# target time stamps that are out of the interpolation range
freq_vals = frequencies + [np.array([])]
# map interpolated indices back to frequency values
frequencies_resampled = [
freq_vals[i] for i in new_frequency_index.astype(int)]
return frequencies_resampled
|
def function[resample_multipitch, parameter[times, frequencies, target_times]]:
constant[Resamples multipitch time series to a new timescale. Values in
``target_times`` outside the range of ``times`` return no pitch estimate.
Parameters
----------
times : np.ndarray
Array of time stamps
frequencies : list of np.ndarray
List of np.ndarrays of frequency values
target_times : np.ndarray
Array of target time stamps
Returns
-------
frequencies_resampled : list of numpy arrays
Frequency list of lists resampled to new timebase
]
if compare[name[target_times].size equal[==] constant[0]] begin[:]
return[list[[]]]
if compare[name[times].size equal[==] constant[0]] begin[:]
return[binary_operation[list[[<ast.Call object at 0x7da1b0fcd8a0>]] * call[name[len], parameter[name[target_times]]]]]
variable[n_times] assign[=] call[name[len], parameter[name[frequencies]]]
variable[frequency_index] assign[=] call[name[np].arange, parameter[constant[0], name[n_times]]]
variable[new_frequency_index] assign[=] call[call[name[scipy].interpolate.interp1d, parameter[name[times], name[frequency_index]]], parameter[name[target_times]]]
variable[freq_vals] assign[=] binary_operation[name[frequencies] + list[[<ast.Call object at 0x7da1b0fcc070>]]]
variable[frequencies_resampled] assign[=] <ast.ListComp object at 0x7da20c6c4f10>
return[name[frequencies_resampled]]
|
keyword[def] identifier[resample_multipitch] ( identifier[times] , identifier[frequencies] , identifier[target_times] ):
literal[string]
keyword[if] identifier[target_times] . identifier[size] == literal[int] :
keyword[return] []
keyword[if] identifier[times] . identifier[size] == literal[int] :
keyword[return] [ identifier[np] . identifier[array] ([])]* identifier[len] ( identifier[target_times] )
identifier[n_times] = identifier[len] ( identifier[frequencies] )
identifier[frequency_index] = identifier[np] . identifier[arange] ( literal[int] , identifier[n_times] )
identifier[new_frequency_index] = identifier[scipy] . identifier[interpolate] . identifier[interp1d] (
identifier[times] , identifier[frequency_index] , identifier[kind] = literal[string] , identifier[bounds_error] = keyword[False] ,
identifier[assume_sorted] = keyword[True] , identifier[fill_value] = identifier[n_times] )( identifier[target_times] )
identifier[freq_vals] = identifier[frequencies] +[ identifier[np] . identifier[array] ([])]
identifier[frequencies_resampled] =[
identifier[freq_vals] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[new_frequency_index] . identifier[astype] ( identifier[int] )]
keyword[return] identifier[frequencies_resampled]
|
def resample_multipitch(times, frequencies, target_times):
"""Resamples multipitch time series to a new timescale. Values in
``target_times`` outside the range of ``times`` return no pitch estimate.
Parameters
----------
times : np.ndarray
Array of time stamps
frequencies : list of np.ndarray
List of np.ndarrays of frequency values
target_times : np.ndarray
Array of target time stamps
Returns
-------
frequencies_resampled : list of numpy arrays
Frequency list of lists resampled to new timebase
"""
if target_times.size == 0:
return [] # depends on [control=['if'], data=[]]
if times.size == 0:
return [np.array([])] * len(target_times) # depends on [control=['if'], data=[]]
n_times = len(frequencies)
# scipy's interpolate doesn't handle ragged arrays. Instead, we interpolate
# the frequency index and then map back to the frequency values.
# This only works because we're using a nearest neighbor interpolator!
frequency_index = np.arange(0, n_times)
# times are already ordered so assume_sorted=True for efficiency
# since we're interpolating the index, fill_value is set to the first index
# that is out of range. We handle this in the next line.
new_frequency_index = scipy.interpolate.interp1d(times, frequency_index, kind='nearest', bounds_error=False, assume_sorted=True, fill_value=n_times)(target_times)
# create array of frequencies plus additional empty element at the end for
# target time stamps that are out of the interpolation range
freq_vals = frequencies + [np.array([])]
# map interpolated indices back to frequency values
frequencies_resampled = [freq_vals[i] for i in new_frequency_index.astype(int)]
return frequencies_resampled
|
def find_executable(name: str, flags=os.X_OK) -> List[str]:
r"""Finds executable `name`.
Similar to Unix ``which`` command.
Returns list of zero or more full paths to `name`.
"""
result = []
extensions = [x for x in os.environ.get("PATHEXT", "").split(os.pathsep) if x]
path = os.environ.get("PATH", None)
if path is None:
return []
for path in os.environ.get("PATH", "").split(os.pathsep):
path = os.path.join(path, name)
if os.access(path, flags):
result.append(path)
for extension in extensions:
path_extension = path + extension
if os.access(path_extension, flags):
result.append(path_extension)
return result
|
def function[find_executable, parameter[name, flags]]:
constant[Finds executable `name`.
Similar to Unix ``which`` command.
Returns list of zero or more full paths to `name`.
]
variable[result] assign[=] list[[]]
variable[extensions] assign[=] <ast.ListComp object at 0x7da20c6a92a0>
variable[path] assign[=] call[name[os].environ.get, parameter[constant[PATH], constant[None]]]
if compare[name[path] is constant[None]] begin[:]
return[list[[]]]
for taget[name[path]] in starred[call[call[name[os].environ.get, parameter[constant[PATH], constant[]]].split, parameter[name[os].pathsep]]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[path], name[name]]]
if call[name[os].access, parameter[name[path], name[flags]]] begin[:]
call[name[result].append, parameter[name[path]]]
for taget[name[extension]] in starred[name[extensions]] begin[:]
variable[path_extension] assign[=] binary_operation[name[path] + name[extension]]
if call[name[os].access, parameter[name[path_extension], name[flags]]] begin[:]
call[name[result].append, parameter[name[path_extension]]]
return[name[result]]
|
keyword[def] identifier[find_executable] ( identifier[name] : identifier[str] , identifier[flags] = identifier[os] . identifier[X_OK] )-> identifier[List] [ identifier[str] ]:
literal[string]
identifier[result] =[]
identifier[extensions] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( identifier[os] . identifier[pathsep] ) keyword[if] identifier[x] ]
identifier[path] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[path] keyword[is] keyword[None] :
keyword[return] []
keyword[for] identifier[path] keyword[in] identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( identifier[os] . identifier[pathsep] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[name] )
keyword[if] identifier[os] . identifier[access] ( identifier[path] , identifier[flags] ):
identifier[result] . identifier[append] ( identifier[path] )
keyword[for] identifier[extension] keyword[in] identifier[extensions] :
identifier[path_extension] = identifier[path] + identifier[extension]
keyword[if] identifier[os] . identifier[access] ( identifier[path_extension] , identifier[flags] ):
identifier[result] . identifier[append] ( identifier[path_extension] )
keyword[return] identifier[result]
|
def find_executable(name: str, flags=os.X_OK) -> List[str]:
"""Finds executable `name`.
Similar to Unix ``which`` command.
Returns list of zero or more full paths to `name`.
"""
result = []
extensions = [x for x in os.environ.get('PATHEXT', '').split(os.pathsep) if x]
path = os.environ.get('PATH', None)
if path is None:
return [] # depends on [control=['if'], data=[]]
for path in os.environ.get('PATH', '').split(os.pathsep):
path = os.path.join(path, name)
if os.access(path, flags):
result.append(path) # depends on [control=['if'], data=[]]
for extension in extensions:
path_extension = path + extension
if os.access(path_extension, flags):
result.append(path_extension) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['extension']] # depends on [control=['for'], data=['path']]
return result
|
def load_site_config(name):
"""Load and return site configuration as a dict."""
return _load_config_json(
os.path.join(
CONFIG_PATH,
CONFIG_SITES_PATH,
name + CONFIG_EXT
)
)
|
def function[load_site_config, parameter[name]]:
constant[Load and return site configuration as a dict.]
return[call[name[_load_config_json], parameter[call[name[os].path.join, parameter[name[CONFIG_PATH], name[CONFIG_SITES_PATH], binary_operation[name[name] + name[CONFIG_EXT]]]]]]]
|
keyword[def] identifier[load_site_config] ( identifier[name] ):
literal[string]
keyword[return] identifier[_load_config_json] (
identifier[os] . identifier[path] . identifier[join] (
identifier[CONFIG_PATH] ,
identifier[CONFIG_SITES_PATH] ,
identifier[name] + identifier[CONFIG_EXT]
)
)
|
def load_site_config(name):
"""Load and return site configuration as a dict."""
return _load_config_json(os.path.join(CONFIG_PATH, CONFIG_SITES_PATH, name + CONFIG_EXT))
|
def read_from_file(self, filename):
"""Load a PSF file"""
self.clear()
with open(filename) as f:
# A) check the first line
line = next(f)
if not line.startswith("PSF"):
raise FileFormatError("Error while reading: A PSF file must start with a line 'PSF'.")
# B) read in all the sections, without interpreting them
current_section = None
sections = {}
for line in f:
line = line.strip()
if line == "":
continue
elif "!N" in line:
words = line.split()
current_section = []
section_name = words[1][2:]
if section_name.endswith(":"):
section_name = section_name[:-1]
sections[section_name] = current_section
else:
current_section.append(line)
# C) interpret the supported sections
# C.1) The title
self.title = sections['TITLE'][0]
molecules = []
numbers = []
# C.2) The atoms and molecules
for line in sections['ATOM']:
words = line.split()
self.atom_types.append(words[5])
self.charges.append(float(words[6]))
self.names.append(words[3])
molecules.append(int(words[2]))
atom = periodic[words[4]]
if atom is None:
numbers.append(0)
else:
numbers.append(periodic[words[4]].number)
self.molecules = np.array(molecules)-1
self.numbers = np.array(numbers)
self.charges = np.array(self.charges)
# C.3) The bonds section
tmp = []
for line in sections['BOND']:
tmp.extend(int(word) for word in line.split())
self.bonds = np.reshape(np.array(tmp), (-1, 2))-1
# C.4) The bends section
tmp = []
for line in sections['THETA']:
tmp.extend(int(word) for word in line.split())
self.bends = np.reshape(np.array(tmp), (-1, 3))-1
# C.5) The dihedral section
tmp = []
for line in sections['PHI']:
tmp.extend(int(word) for word in line.split())
self.dihedrals = np.reshape(np.array(tmp), (-1, 4))-1
# C.6) The improper section
tmp = []
for line in sections['IMPHI']:
tmp.extend(int(word) for word in line.split())
self.impropers = np.reshape(np.array(tmp), (-1, 4))-1
|
def function[read_from_file, parameter[self, filename]]:
constant[Load a PSF file]
call[name[self].clear, parameter[]]
with call[name[open], parameter[name[filename]]] begin[:]
variable[line] assign[=] call[name[next], parameter[name[f]]]
if <ast.UnaryOp object at 0x7da18f811000> begin[:]
<ast.Raise object at 0x7da18f813be0>
variable[current_section] assign[=] constant[None]
variable[sections] assign[=] dictionary[[], []]
for taget[name[line]] in starred[name[f]] begin[:]
variable[line] assign[=] call[name[line].strip, parameter[]]
if compare[name[line] equal[==] constant[]] begin[:]
continue
name[self].title assign[=] call[call[name[sections]][constant[TITLE]]][constant[0]]
variable[molecules] assign[=] list[[]]
variable[numbers] assign[=] list[[]]
for taget[name[line]] in starred[call[name[sections]][constant[ATOM]]] begin[:]
variable[words] assign[=] call[name[line].split, parameter[]]
call[name[self].atom_types.append, parameter[call[name[words]][constant[5]]]]
call[name[self].charges.append, parameter[call[name[float], parameter[call[name[words]][constant[6]]]]]]
call[name[self].names.append, parameter[call[name[words]][constant[3]]]]
call[name[molecules].append, parameter[call[name[int], parameter[call[name[words]][constant[2]]]]]]
variable[atom] assign[=] call[name[periodic]][call[name[words]][constant[4]]]
if compare[name[atom] is constant[None]] begin[:]
call[name[numbers].append, parameter[constant[0]]]
name[self].molecules assign[=] binary_operation[call[name[np].array, parameter[name[molecules]]] - constant[1]]
name[self].numbers assign[=] call[name[np].array, parameter[name[numbers]]]
name[self].charges assign[=] call[name[np].array, parameter[name[self].charges]]
variable[tmp] assign[=] list[[]]
for taget[name[line]] in starred[call[name[sections]][constant[BOND]]] begin[:]
call[name[tmp].extend, parameter[<ast.GeneratorExp object at 0x7da20c76d8d0>]]
name[self].bonds assign[=] binary_operation[call[name[np].reshape, parameter[call[name[np].array, parameter[name[tmp]]], tuple[[<ast.UnaryOp object at 0x7da20c76fe20>, <ast.Constant object at 0x7da20c76c580>]]]] - constant[1]]
variable[tmp] assign[=] list[[]]
for taget[name[line]] in starred[call[name[sections]][constant[THETA]]] begin[:]
call[name[tmp].extend, parameter[<ast.GeneratorExp object at 0x7da20c76f9d0>]]
name[self].bends assign[=] binary_operation[call[name[np].reshape, parameter[call[name[np].array, parameter[name[tmp]]], tuple[[<ast.UnaryOp object at 0x7da20c76fd90>, <ast.Constant object at 0x7da20c76e6e0>]]]] - constant[1]]
variable[tmp] assign[=] list[[]]
for taget[name[line]] in starred[call[name[sections]][constant[PHI]]] begin[:]
call[name[tmp].extend, parameter[<ast.GeneratorExp object at 0x7da20c76c4f0>]]
name[self].dihedrals assign[=] binary_operation[call[name[np].reshape, parameter[call[name[np].array, parameter[name[tmp]]], tuple[[<ast.UnaryOp object at 0x7da2041d9f60>, <ast.Constant object at 0x7da2041d8250>]]]] - constant[1]]
variable[tmp] assign[=] list[[]]
for taget[name[line]] in starred[call[name[sections]][constant[IMPHI]]] begin[:]
call[name[tmp].extend, parameter[<ast.GeneratorExp object at 0x7da2041dab30>]]
name[self].impropers assign[=] binary_operation[call[name[np].reshape, parameter[call[name[np].array, parameter[name[tmp]]], tuple[[<ast.UnaryOp object at 0x7da2041d88b0>, <ast.Constant object at 0x7da2041db310>]]]] - constant[1]]
|
keyword[def] identifier[read_from_file] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[self] . identifier[clear] ()
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[f] :
identifier[line] = identifier[next] ( identifier[f] )
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[FileFormatError] ( literal[string] )
identifier[current_section] = keyword[None]
identifier[sections] ={}
keyword[for] identifier[line] keyword[in] identifier[f] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] == literal[string] :
keyword[continue]
keyword[elif] literal[string] keyword[in] identifier[line] :
identifier[words] = identifier[line] . identifier[split] ()
identifier[current_section] =[]
identifier[section_name] = identifier[words] [ literal[int] ][ literal[int] :]
keyword[if] identifier[section_name] . identifier[endswith] ( literal[string] ):
identifier[section_name] = identifier[section_name] [:- literal[int] ]
identifier[sections] [ identifier[section_name] ]= identifier[current_section]
keyword[else] :
identifier[current_section] . identifier[append] ( identifier[line] )
identifier[self] . identifier[title] = identifier[sections] [ literal[string] ][ literal[int] ]
identifier[molecules] =[]
identifier[numbers] =[]
keyword[for] identifier[line] keyword[in] identifier[sections] [ literal[string] ]:
identifier[words] = identifier[line] . identifier[split] ()
identifier[self] . identifier[atom_types] . identifier[append] ( identifier[words] [ literal[int] ])
identifier[self] . identifier[charges] . identifier[append] ( identifier[float] ( identifier[words] [ literal[int] ]))
identifier[self] . identifier[names] . identifier[append] ( identifier[words] [ literal[int] ])
identifier[molecules] . identifier[append] ( identifier[int] ( identifier[words] [ literal[int] ]))
identifier[atom] = identifier[periodic] [ identifier[words] [ literal[int] ]]
keyword[if] identifier[atom] keyword[is] keyword[None] :
identifier[numbers] . identifier[append] ( literal[int] )
keyword[else] :
identifier[numbers] . identifier[append] ( identifier[periodic] [ identifier[words] [ literal[int] ]]. identifier[number] )
identifier[self] . identifier[molecules] = identifier[np] . identifier[array] ( identifier[molecules] )- literal[int]
identifier[self] . identifier[numbers] = identifier[np] . identifier[array] ( identifier[numbers] )
identifier[self] . identifier[charges] = identifier[np] . identifier[array] ( identifier[self] . identifier[charges] )
identifier[tmp] =[]
keyword[for] identifier[line] keyword[in] identifier[sections] [ literal[string] ]:
identifier[tmp] . identifier[extend] ( identifier[int] ( identifier[word] ) keyword[for] identifier[word] keyword[in] identifier[line] . identifier[split] ())
identifier[self] . identifier[bonds] = identifier[np] . identifier[reshape] ( identifier[np] . identifier[array] ( identifier[tmp] ),(- literal[int] , literal[int] ))- literal[int]
identifier[tmp] =[]
keyword[for] identifier[line] keyword[in] identifier[sections] [ literal[string] ]:
identifier[tmp] . identifier[extend] ( identifier[int] ( identifier[word] ) keyword[for] identifier[word] keyword[in] identifier[line] . identifier[split] ())
identifier[self] . identifier[bends] = identifier[np] . identifier[reshape] ( identifier[np] . identifier[array] ( identifier[tmp] ),(- literal[int] , literal[int] ))- literal[int]
identifier[tmp] =[]
keyword[for] identifier[line] keyword[in] identifier[sections] [ literal[string] ]:
identifier[tmp] . identifier[extend] ( identifier[int] ( identifier[word] ) keyword[for] identifier[word] keyword[in] identifier[line] . identifier[split] ())
identifier[self] . identifier[dihedrals] = identifier[np] . identifier[reshape] ( identifier[np] . identifier[array] ( identifier[tmp] ),(- literal[int] , literal[int] ))- literal[int]
identifier[tmp] =[]
keyword[for] identifier[line] keyword[in] identifier[sections] [ literal[string] ]:
identifier[tmp] . identifier[extend] ( identifier[int] ( identifier[word] ) keyword[for] identifier[word] keyword[in] identifier[line] . identifier[split] ())
identifier[self] . identifier[impropers] = identifier[np] . identifier[reshape] ( identifier[np] . identifier[array] ( identifier[tmp] ),(- literal[int] , literal[int] ))- literal[int]
|
def read_from_file(self, filename):
"""Load a PSF file"""
self.clear()
with open(filename) as f:
# A) check the first line
line = next(f)
if not line.startswith('PSF'):
raise FileFormatError("Error while reading: A PSF file must start with a line 'PSF'.") # depends on [control=['if'], data=[]]
# B) read in all the sections, without interpreting them
current_section = None
sections = {}
for line in f:
line = line.strip()
if line == '':
continue # depends on [control=['if'], data=[]]
elif '!N' in line:
words = line.split()
current_section = []
section_name = words[1][2:]
if section_name.endswith(':'):
section_name = section_name[:-1] # depends on [control=['if'], data=[]]
sections[section_name] = current_section # depends on [control=['if'], data=['line']]
else:
current_section.append(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
# C) interpret the supported sections
# C.1) The title
self.title = sections['TITLE'][0]
molecules = []
numbers = []
# C.2) The atoms and molecules
for line in sections['ATOM']:
words = line.split()
self.atom_types.append(words[5])
self.charges.append(float(words[6]))
self.names.append(words[3])
molecules.append(int(words[2]))
atom = periodic[words[4]]
if atom is None:
numbers.append(0) # depends on [control=['if'], data=[]]
else:
numbers.append(periodic[words[4]].number) # depends on [control=['for'], data=['line']]
self.molecules = np.array(molecules) - 1
self.numbers = np.array(numbers)
self.charges = np.array(self.charges)
# C.3) The bonds section
tmp = []
for line in sections['BOND']:
tmp.extend((int(word) for word in line.split())) # depends on [control=['for'], data=['line']]
self.bonds = np.reshape(np.array(tmp), (-1, 2)) - 1
# C.4) The bends section
tmp = []
for line in sections['THETA']:
tmp.extend((int(word) for word in line.split())) # depends on [control=['for'], data=['line']]
self.bends = np.reshape(np.array(tmp), (-1, 3)) - 1
# C.5) The dihedral section
tmp = []
for line in sections['PHI']:
tmp.extend((int(word) for word in line.split())) # depends on [control=['for'], data=['line']]
self.dihedrals = np.reshape(np.array(tmp), (-1, 4)) - 1
# C.6) The improper section
tmp = []
for line in sections['IMPHI']:
tmp.extend((int(word) for word in line.split())) # depends on [control=['for'], data=['line']]
self.impropers = np.reshape(np.array(tmp), (-1, 4)) - 1
|
def MakeDeployableBinary(self, template_path, output_path):
"""This will add the config to the client template."""
context = self.context + ["Client Context"]
utils.EnsureDirExists(os.path.dirname(output_path))
client_config_data = self.GetClientConfig(context)
shutil.copyfile(template_path, output_path)
zip_file = zipfile.ZipFile(output_path, mode="a")
zip_info = zipfile.ZipInfo(filename="config.yaml")
zip_file.writestr(zip_info, client_config_data)
zip_file.close()
return output_path
|
def function[MakeDeployableBinary, parameter[self, template_path, output_path]]:
constant[This will add the config to the client template.]
variable[context] assign[=] binary_operation[name[self].context + list[[<ast.Constant object at 0x7da20cabd690>]]]
call[name[utils].EnsureDirExists, parameter[call[name[os].path.dirname, parameter[name[output_path]]]]]
variable[client_config_data] assign[=] call[name[self].GetClientConfig, parameter[name[context]]]
call[name[shutil].copyfile, parameter[name[template_path], name[output_path]]]
variable[zip_file] assign[=] call[name[zipfile].ZipFile, parameter[name[output_path]]]
variable[zip_info] assign[=] call[name[zipfile].ZipInfo, parameter[]]
call[name[zip_file].writestr, parameter[name[zip_info], name[client_config_data]]]
call[name[zip_file].close, parameter[]]
return[name[output_path]]
|
keyword[def] identifier[MakeDeployableBinary] ( identifier[self] , identifier[template_path] , identifier[output_path] ):
literal[string]
identifier[context] = identifier[self] . identifier[context] +[ literal[string] ]
identifier[utils] . identifier[EnsureDirExists] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[output_path] ))
identifier[client_config_data] = identifier[self] . identifier[GetClientConfig] ( identifier[context] )
identifier[shutil] . identifier[copyfile] ( identifier[template_path] , identifier[output_path] )
identifier[zip_file] = identifier[zipfile] . identifier[ZipFile] ( identifier[output_path] , identifier[mode] = literal[string] )
identifier[zip_info] = identifier[zipfile] . identifier[ZipInfo] ( identifier[filename] = literal[string] )
identifier[zip_file] . identifier[writestr] ( identifier[zip_info] , identifier[client_config_data] )
identifier[zip_file] . identifier[close] ()
keyword[return] identifier[output_path]
|
def MakeDeployableBinary(self, template_path, output_path):
"""This will add the config to the client template."""
context = self.context + ['Client Context']
utils.EnsureDirExists(os.path.dirname(output_path))
client_config_data = self.GetClientConfig(context)
shutil.copyfile(template_path, output_path)
zip_file = zipfile.ZipFile(output_path, mode='a')
zip_info = zipfile.ZipInfo(filename='config.yaml')
zip_file.writestr(zip_info, client_config_data)
zip_file.close()
return output_path
|
def scatter(self, *args, **kwargs):
''' Creates a scatter plot of the given x and y items.
Args:
x (str or seq[float]) : values or field names of center x coordinates
y (str or seq[float]) : values or field names of center y coordinates
size (str or list[float]) : values or field names of sizes in screen units
marker (str, or list[str]): values or field names of marker types
color (color value, optional): shorthand to set both fill and line color
source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source.
An attempt will be made to convert the object to :class:`~bokeh.models.sources.ColumnDataSource`
if needed. If none is supplied, one is created for the user automatically.
**kwargs: :ref:`userguide_styling_line_properties` and :ref:`userguide_styling_fill_properties`
Examples:
>>> p.scatter([1,2,3],[4,5,6], marker="square", fill_color="red")
>>> p.scatter("data1", "data2", marker="mtype", source=data_source, ...)
.. note::
When passing ``marker="circle"`` it is also possible to supply a
``radius`` value in data-space units. When configuring marker type
from a data source column, *all* markers incuding circles may only
be configured with ``size`` in screen units.
'''
marker_type = kwargs.pop("marker", "circle")
if isinstance(marker_type, string_types) and marker_type in _MARKER_SHORTCUTS:
marker_type = _MARKER_SHORTCUTS[marker_type]
# The original scatter implementation allowed circle scatters to set a
# radius. We will leave this here for compatibility but note that it
# only works when the marker type is "circle" (and not referencing a
# data source column). Consider deprecating in the future.
if marker_type == "circle" and "radius" in kwargs:
return self.circle(*args, **kwargs)
else:
return self._scatter(*args, marker=marker_type, **kwargs)
|
def function[scatter, parameter[self]]:
constant[ Creates a scatter plot of the given x and y items.
Args:
x (str or seq[float]) : values or field names of center x coordinates
y (str or seq[float]) : values or field names of center y coordinates
size (str or list[float]) : values or field names of sizes in screen units
marker (str, or list[str]): values or field names of marker types
color (color value, optional): shorthand to set both fill and line color
source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source.
An attempt will be made to convert the object to :class:`~bokeh.models.sources.ColumnDataSource`
if needed. If none is supplied, one is created for the user automatically.
**kwargs: :ref:`userguide_styling_line_properties` and :ref:`userguide_styling_fill_properties`
Examples:
>>> p.scatter([1,2,3],[4,5,6], marker="square", fill_color="red")
>>> p.scatter("data1", "data2", marker="mtype", source=data_source, ...)
.. note::
When passing ``marker="circle"`` it is also possible to supply a
``radius`` value in data-space units. When configuring marker type
from a data source column, *all* markers incuding circles may only
be configured with ``size`` in screen units.
]
variable[marker_type] assign[=] call[name[kwargs].pop, parameter[constant[marker], constant[circle]]]
if <ast.BoolOp object at 0x7da2045678e0> begin[:]
variable[marker_type] assign[=] call[name[_MARKER_SHORTCUTS]][name[marker_type]]
if <ast.BoolOp object at 0x7da20c76d1e0> begin[:]
return[call[name[self].circle, parameter[<ast.Starred object at 0x7da204621150>]]]
|
keyword[def] identifier[scatter] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[marker_type] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
keyword[if] identifier[isinstance] ( identifier[marker_type] , identifier[string_types] ) keyword[and] identifier[marker_type] keyword[in] identifier[_MARKER_SHORTCUTS] :
identifier[marker_type] = identifier[_MARKER_SHORTCUTS] [ identifier[marker_type] ]
keyword[if] identifier[marker_type] == literal[string] keyword[and] literal[string] keyword[in] identifier[kwargs] :
keyword[return] identifier[self] . identifier[circle] (* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[return] identifier[self] . identifier[_scatter] (* identifier[args] , identifier[marker] = identifier[marker_type] ,** identifier[kwargs] )
|
def scatter(self, *args, **kwargs):
""" Creates a scatter plot of the given x and y items.
Args:
x (str or seq[float]) : values or field names of center x coordinates
y (str or seq[float]) : values or field names of center y coordinates
size (str or list[float]) : values or field names of sizes in screen units
marker (str, or list[str]): values or field names of marker types
color (color value, optional): shorthand to set both fill and line color
source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source.
An attempt will be made to convert the object to :class:`~bokeh.models.sources.ColumnDataSource`
if needed. If none is supplied, one is created for the user automatically.
**kwargs: :ref:`userguide_styling_line_properties` and :ref:`userguide_styling_fill_properties`
Examples:
>>> p.scatter([1,2,3],[4,5,6], marker="square", fill_color="red")
>>> p.scatter("data1", "data2", marker="mtype", source=data_source, ...)
.. note::
When passing ``marker="circle"`` it is also possible to supply a
``radius`` value in data-space units. When configuring marker type
from a data source column, *all* markers incuding circles may only
be configured with ``size`` in screen units.
"""
marker_type = kwargs.pop('marker', 'circle')
if isinstance(marker_type, string_types) and marker_type in _MARKER_SHORTCUTS:
marker_type = _MARKER_SHORTCUTS[marker_type] # depends on [control=['if'], data=[]]
# The original scatter implementation allowed circle scatters to set a
# radius. We will leave this here for compatibility but note that it
# only works when the marker type is "circle" (and not referencing a
# data source column). Consider deprecating in the future.
if marker_type == 'circle' and 'radius' in kwargs:
return self.circle(*args, **kwargs) # depends on [control=['if'], data=[]]
else:
return self._scatter(*args, marker=marker_type, **kwargs)
|
def get_environment() -> Environment:
"""
Returns the jinja2 templating environment updated with the most recent
cauldron environment configurations
:return:
"""
env = JINJA_ENVIRONMENT
loader = env.loader
resource_path = environ.configs.make_path(
'resources', 'templates',
override_key='template_path'
)
if not loader:
env.filters['id'] = get_id
env.filters['latex'] = get_latex
if not loader or resource_path not in loader.searchpath:
env.loader = FileSystemLoader(resource_path)
return env
|
def function[get_environment, parameter[]]:
constant[
Returns the jinja2 templating environment updated with the most recent
cauldron environment configurations
:return:
]
variable[env] assign[=] name[JINJA_ENVIRONMENT]
variable[loader] assign[=] name[env].loader
variable[resource_path] assign[=] call[name[environ].configs.make_path, parameter[constant[resources], constant[templates]]]
if <ast.UnaryOp object at 0x7da20c992410> begin[:]
call[name[env].filters][constant[id]] assign[=] name[get_id]
call[name[env].filters][constant[latex]] assign[=] name[get_latex]
if <ast.BoolOp object at 0x7da20c993df0> begin[:]
name[env].loader assign[=] call[name[FileSystemLoader], parameter[name[resource_path]]]
return[name[env]]
|
keyword[def] identifier[get_environment] ()-> identifier[Environment] :
literal[string]
identifier[env] = identifier[JINJA_ENVIRONMENT]
identifier[loader] = identifier[env] . identifier[loader]
identifier[resource_path] = identifier[environ] . identifier[configs] . identifier[make_path] (
literal[string] , literal[string] ,
identifier[override_key] = literal[string]
)
keyword[if] keyword[not] identifier[loader] :
identifier[env] . identifier[filters] [ literal[string] ]= identifier[get_id]
identifier[env] . identifier[filters] [ literal[string] ]= identifier[get_latex]
keyword[if] keyword[not] identifier[loader] keyword[or] identifier[resource_path] keyword[not] keyword[in] identifier[loader] . identifier[searchpath] :
identifier[env] . identifier[loader] = identifier[FileSystemLoader] ( identifier[resource_path] )
keyword[return] identifier[env]
|
def get_environment() -> Environment:
"""
Returns the jinja2 templating environment updated with the most recent
cauldron environment configurations
:return:
"""
env = JINJA_ENVIRONMENT
loader = env.loader
resource_path = environ.configs.make_path('resources', 'templates', override_key='template_path')
if not loader:
env.filters['id'] = get_id
env.filters['latex'] = get_latex # depends on [control=['if'], data=[]]
if not loader or resource_path not in loader.searchpath:
env.loader = FileSystemLoader(resource_path) # depends on [control=['if'], data=[]]
return env
|
def reduce_log_sum(attrs, inputs, proto_obj):
"""Reduce the array along a given axis by log sum value"""
keep_dims = True if 'keepdims' not in attrs else attrs.get('keepdims')
sum_op = symbol.sum(inputs[0], axis=attrs.get('axes'),
keepdims=keep_dims)
log_sym = symbol.log(sum_op)
return log_sym, attrs, inputs
|
def function[reduce_log_sum, parameter[attrs, inputs, proto_obj]]:
constant[Reduce the array along a given axis by log sum value]
variable[keep_dims] assign[=] <ast.IfExp object at 0x7da1b204cac0>
variable[sum_op] assign[=] call[name[symbol].sum, parameter[call[name[inputs]][constant[0]]]]
variable[log_sym] assign[=] call[name[symbol].log, parameter[name[sum_op]]]
return[tuple[[<ast.Name object at 0x7da1b204cb50>, <ast.Name object at 0x7da1b204ef50>, <ast.Name object at 0x7da1b204d870>]]]
|
keyword[def] identifier[reduce_log_sum] ( identifier[attrs] , identifier[inputs] , identifier[proto_obj] ):
literal[string]
identifier[keep_dims] = keyword[True] keyword[if] literal[string] keyword[not] keyword[in] identifier[attrs] keyword[else] identifier[attrs] . identifier[get] ( literal[string] )
identifier[sum_op] = identifier[symbol] . identifier[sum] ( identifier[inputs] [ literal[int] ], identifier[axis] = identifier[attrs] . identifier[get] ( literal[string] ),
identifier[keepdims] = identifier[keep_dims] )
identifier[log_sym] = identifier[symbol] . identifier[log] ( identifier[sum_op] )
keyword[return] identifier[log_sym] , identifier[attrs] , identifier[inputs]
|
def reduce_log_sum(attrs, inputs, proto_obj):
"""Reduce the array along a given axis by log sum value"""
keep_dims = True if 'keepdims' not in attrs else attrs.get('keepdims')
sum_op = symbol.sum(inputs[0], axis=attrs.get('axes'), keepdims=keep_dims)
log_sym = symbol.log(sum_op)
return (log_sym, attrs, inputs)
|
def load_section(self, section):
"""
Loads the contents of a #Section. The `section.identifier` is the name
of the object that we need to load.
# Arguments
section (Section): The section to load. Fill the `section.title` and
`section.content` values. Optionally, `section.loader_context` can
be filled with custom arbitrary data to reference at a later point.
"""
assert section.identifier is not None
obj, scope = import_object_with_scope(section.identifier)
if '.' in section.identifier:
default_title = section.identifier.rsplit('.', 1)[1]
else:
default_title = section.identifier
section.title = getattr(obj, '__name__', default_title)
section.content = trim(get_docstring(obj))
section.loader_context = {'obj': obj, 'scope': scope}
# Add the function signature in a code-block.
if callable(obj):
sig = get_function_signature(obj, scope if inspect.isclass(scope) else None)
sig, _ = FormatCode(sig, style_config='pep8')
section.content = '```python\n{}\n```\n'.format(sig.strip()) + section.content
|
def function[load_section, parameter[self, section]]:
constant[
Loads the contents of a #Section. The `section.identifier` is the name
of the object that we need to load.
# Arguments
section (Section): The section to load. Fill the `section.title` and
`section.content` values. Optionally, `section.loader_context` can
be filled with custom arbitrary data to reference at a later point.
]
assert[compare[name[section].identifier is_not constant[None]]]
<ast.Tuple object at 0x7da1b083d120> assign[=] call[name[import_object_with_scope], parameter[name[section].identifier]]
if compare[constant[.] in name[section].identifier] begin[:]
variable[default_title] assign[=] call[call[name[section].identifier.rsplit, parameter[constant[.], constant[1]]]][constant[1]]
name[section].title assign[=] call[name[getattr], parameter[name[obj], constant[__name__], name[default_title]]]
name[section].content assign[=] call[name[trim], parameter[call[name[get_docstring], parameter[name[obj]]]]]
name[section].loader_context assign[=] dictionary[[<ast.Constant object at 0x7da1b0847d30>, <ast.Constant object at 0x7da1b0844d30>], [<ast.Name object at 0x7da1b0845450>, <ast.Name object at 0x7da1b08460e0>]]
if call[name[callable], parameter[name[obj]]] begin[:]
variable[sig] assign[=] call[name[get_function_signature], parameter[name[obj], <ast.IfExp object at 0x7da1b0846080>]]
<ast.Tuple object at 0x7da1b08449a0> assign[=] call[name[FormatCode], parameter[name[sig]]]
name[section].content assign[=] binary_operation[call[constant[```python
{}
```
].format, parameter[call[name[sig].strip, parameter[]]]] + name[section].content]
|
keyword[def] identifier[load_section] ( identifier[self] , identifier[section] ):
literal[string]
keyword[assert] identifier[section] . identifier[identifier] keyword[is] keyword[not] keyword[None]
identifier[obj] , identifier[scope] = identifier[import_object_with_scope] ( identifier[section] . identifier[identifier] )
keyword[if] literal[string] keyword[in] identifier[section] . identifier[identifier] :
identifier[default_title] = identifier[section] . identifier[identifier] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
keyword[else] :
identifier[default_title] = identifier[section] . identifier[identifier]
identifier[section] . identifier[title] = identifier[getattr] ( identifier[obj] , literal[string] , identifier[default_title] )
identifier[section] . identifier[content] = identifier[trim] ( identifier[get_docstring] ( identifier[obj] ))
identifier[section] . identifier[loader_context] ={ literal[string] : identifier[obj] , literal[string] : identifier[scope] }
keyword[if] identifier[callable] ( identifier[obj] ):
identifier[sig] = identifier[get_function_signature] ( identifier[obj] , identifier[scope] keyword[if] identifier[inspect] . identifier[isclass] ( identifier[scope] ) keyword[else] keyword[None] )
identifier[sig] , identifier[_] = identifier[FormatCode] ( identifier[sig] , identifier[style_config] = literal[string] )
identifier[section] . identifier[content] = literal[string] . identifier[format] ( identifier[sig] . identifier[strip] ())+ identifier[section] . identifier[content]
|
def load_section(self, section):
"""
Loads the contents of a #Section. The `section.identifier` is the name
of the object that we need to load.
# Arguments
section (Section): The section to load. Fill the `section.title` and
`section.content` values. Optionally, `section.loader_context` can
be filled with custom arbitrary data to reference at a later point.
"""
assert section.identifier is not None
(obj, scope) = import_object_with_scope(section.identifier)
if '.' in section.identifier:
default_title = section.identifier.rsplit('.', 1)[1] # depends on [control=['if'], data=[]]
else:
default_title = section.identifier
section.title = getattr(obj, '__name__', default_title)
section.content = trim(get_docstring(obj))
section.loader_context = {'obj': obj, 'scope': scope}
# Add the function signature in a code-block.
if callable(obj):
sig = get_function_signature(obj, scope if inspect.isclass(scope) else None)
(sig, _) = FormatCode(sig, style_config='pep8')
section.content = '```python\n{}\n```\n'.format(sig.strip()) + section.content # depends on [control=['if'], data=[]]
|
def _decode(hashid, salt, alphabet, separators, guards):
"""Helper method that restores the values encoded in a hashid without
argument checks."""
parts = tuple(_split(hashid, guards))
hashid = parts[1] if 2 <= len(parts) <= 3 else parts[0]
if not hashid:
return
lottery_char = hashid[0]
hashid = hashid[1:]
hash_parts = _split(hashid, separators)
for part in hash_parts:
alphabet_salt = (lottery_char + salt + alphabet)[:len(alphabet)]
alphabet = _reorder(alphabet, alphabet_salt)
yield _unhash(part, alphabet)
|
def function[_decode, parameter[hashid, salt, alphabet, separators, guards]]:
constant[Helper method that restores the values encoded in a hashid without
argument checks.]
variable[parts] assign[=] call[name[tuple], parameter[call[name[_split], parameter[name[hashid], name[guards]]]]]
variable[hashid] assign[=] <ast.IfExp object at 0x7da18dc9a2f0>
if <ast.UnaryOp object at 0x7da18dc9ac20> begin[:]
return[None]
variable[lottery_char] assign[=] call[name[hashid]][constant[0]]
variable[hashid] assign[=] call[name[hashid]][<ast.Slice object at 0x7da20e954eb0>]
variable[hash_parts] assign[=] call[name[_split], parameter[name[hashid], name[separators]]]
for taget[name[part]] in starred[name[hash_parts]] begin[:]
variable[alphabet_salt] assign[=] call[binary_operation[binary_operation[name[lottery_char] + name[salt]] + name[alphabet]]][<ast.Slice object at 0x7da2044c3400>]
variable[alphabet] assign[=] call[name[_reorder], parameter[name[alphabet], name[alphabet_salt]]]
<ast.Yield object at 0x7da2044c1c30>
|
keyword[def] identifier[_decode] ( identifier[hashid] , identifier[salt] , identifier[alphabet] , identifier[separators] , identifier[guards] ):
literal[string]
identifier[parts] = identifier[tuple] ( identifier[_split] ( identifier[hashid] , identifier[guards] ))
identifier[hashid] = identifier[parts] [ literal[int] ] keyword[if] literal[int] <= identifier[len] ( identifier[parts] )<= literal[int] keyword[else] identifier[parts] [ literal[int] ]
keyword[if] keyword[not] identifier[hashid] :
keyword[return]
identifier[lottery_char] = identifier[hashid] [ literal[int] ]
identifier[hashid] = identifier[hashid] [ literal[int] :]
identifier[hash_parts] = identifier[_split] ( identifier[hashid] , identifier[separators] )
keyword[for] identifier[part] keyword[in] identifier[hash_parts] :
identifier[alphabet_salt] =( identifier[lottery_char] + identifier[salt] + identifier[alphabet] )[: identifier[len] ( identifier[alphabet] )]
identifier[alphabet] = identifier[_reorder] ( identifier[alphabet] , identifier[alphabet_salt] )
keyword[yield] identifier[_unhash] ( identifier[part] , identifier[alphabet] )
|
def _decode(hashid, salt, alphabet, separators, guards):
"""Helper method that restores the values encoded in a hashid without
argument checks."""
parts = tuple(_split(hashid, guards))
hashid = parts[1] if 2 <= len(parts) <= 3 else parts[0]
if not hashid:
return # depends on [control=['if'], data=[]]
lottery_char = hashid[0]
hashid = hashid[1:]
hash_parts = _split(hashid, separators)
for part in hash_parts:
alphabet_salt = (lottery_char + salt + alphabet)[:len(alphabet)]
alphabet = _reorder(alphabet, alphabet_salt)
yield _unhash(part, alphabet) # depends on [control=['for'], data=['part']]
|
def _check_for_degenerate_interesting_groups(items):
""" Make sure interesting_groups specify existing metadata and that
the interesting_group is not all of the same for all of the samples
"""
igkey = ("algorithm", "bcbiornaseq", "interesting_groups")
interesting_groups = tz.get_in(igkey, items[0], [])
if isinstance(interesting_groups, str):
interesting_groups = [interesting_groups]
for group in interesting_groups:
values = [tz.get_in(("metadata", group), x, None) for x in items]
if all(x is None for x in values):
raise ValueError("group %s is labelled as an interesting group, "
"but does not appear in the metadata." % group)
if len(list(tz.unique(values))) == 1:
raise ValueError("group %s is marked as an interesting group, "
"but all samples have the same value." % group)
|
def function[_check_for_degenerate_interesting_groups, parameter[items]]:
constant[ Make sure interesting_groups specify existing metadata and that
the interesting_group is not all of the same for all of the samples
]
variable[igkey] assign[=] tuple[[<ast.Constant object at 0x7da1b1846500>, <ast.Constant object at 0x7da1b1846140>, <ast.Constant object at 0x7da1b1845240>]]
variable[interesting_groups] assign[=] call[name[tz].get_in, parameter[name[igkey], call[name[items]][constant[0]], list[[]]]]
if call[name[isinstance], parameter[name[interesting_groups], name[str]]] begin[:]
variable[interesting_groups] assign[=] list[[<ast.Name object at 0x7da1b1846410>]]
for taget[name[group]] in starred[name[interesting_groups]] begin[:]
variable[values] assign[=] <ast.ListComp object at 0x7da1b1847b80>
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b1846920>]] begin[:]
<ast.Raise object at 0x7da1b1844dc0>
if compare[call[name[len], parameter[call[name[list], parameter[call[name[tz].unique, parameter[name[values]]]]]]] equal[==] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1844b50>
|
keyword[def] identifier[_check_for_degenerate_interesting_groups] ( identifier[items] ):
literal[string]
identifier[igkey] =( literal[string] , literal[string] , literal[string] )
identifier[interesting_groups] = identifier[tz] . identifier[get_in] ( identifier[igkey] , identifier[items] [ literal[int] ],[])
keyword[if] identifier[isinstance] ( identifier[interesting_groups] , identifier[str] ):
identifier[interesting_groups] =[ identifier[interesting_groups] ]
keyword[for] identifier[group] keyword[in] identifier[interesting_groups] :
identifier[values] =[ identifier[tz] . identifier[get_in] (( literal[string] , identifier[group] ), identifier[x] , keyword[None] ) keyword[for] identifier[x] keyword[in] identifier[items] ]
keyword[if] identifier[all] ( identifier[x] keyword[is] keyword[None] keyword[for] identifier[x] keyword[in] identifier[values] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[group] )
keyword[if] identifier[len] ( identifier[list] ( identifier[tz] . identifier[unique] ( identifier[values] )))== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[group] )
|
def _check_for_degenerate_interesting_groups(items):
""" Make sure interesting_groups specify existing metadata and that
the interesting_group is not all of the same for all of the samples
"""
igkey = ('algorithm', 'bcbiornaseq', 'interesting_groups')
interesting_groups = tz.get_in(igkey, items[0], [])
if isinstance(interesting_groups, str):
interesting_groups = [interesting_groups] # depends on [control=['if'], data=[]]
for group in interesting_groups:
values = [tz.get_in(('metadata', group), x, None) for x in items]
if all((x is None for x in values)):
raise ValueError('group %s is labelled as an interesting group, but does not appear in the metadata.' % group) # depends on [control=['if'], data=[]]
if len(list(tz.unique(values))) == 1:
raise ValueError('group %s is marked as an interesting group, but all samples have the same value.' % group) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']]
|
def simulate(args):
"""
%prog simulate run_dir 1 300
Simulate BAMs with varying inserts with dwgsim. The above command will
simulate between 1 to 300 CAGs in the HD region, in a directory called
`run_dir`.
"""
p = OptionParser(simulate.__doc__)
p.add_option("--method", choices=("wgsim", "eagle"), default="eagle",
help="Read simulator")
p.add_option("--ref", default="hg38", choices=("hg38", "hg19"),
help="Reference genome version")
p.add_option("--tred", default="HD", help="TRED locus")
add_simulate_options(p)
opts, args = p.parse_args(args)
if len(args) != 3:
sys.exit(not p.print_help())
rundir, startunits, endunits = args
ref = opts.ref
ref_fasta = "/mnt/ref/{}.upper.fa".format(ref)
startunits, endunits = int(startunits), int(endunits)
basecwd = os.getcwd()
mkdir(rundir)
os.chdir(rundir)
cwd = os.getcwd()
# TRED region (e.g. Huntington)
pad_left, pad_right = 1000, 10000
repo = TREDsRepo(ref=ref)
tred = repo[opts.tred]
chr, start, end = tred.chr, tred.repeat_start, tred.repeat_end
logging.debug("Simulating {}".format(tred))
fasta = Fasta(ref_fasta)
seq_left = fasta[chr][start - pad_left:start - 1]
seq_right = fasta[chr][end: end + pad_right]
motif = tred.repeat
simulate_method = wgsim if opts.method == "wgsim" else eagle
# Write fake sequence
for units in range(startunits, endunits + 1):
pf = str(units)
mkdir(pf)
os.chdir(pf)
seq = str(seq_left) + motif * units + str(seq_right)
fastafile = pf + ".fasta"
make_fasta(seq, fastafile, id=chr.upper())
# Simulate reads on it
simulate_method([fastafile, "--depth={}".format(opts.depth),
"--readlen={}".format(opts.readlen),
"--distance={}".format(opts.distance),
"--outfile={}".format(pf)])
read1 = pf + ".bwa.read1.fastq"
read2 = pf + ".bwa.read2.fastq"
samfile, _ = align([ref_fasta, read1, read2])
indexed_samfile = index([samfile])
sh("mv {} ../{}.bam".format(indexed_samfile, pf))
sh("mv {}.bai ../{}.bam.bai".format(indexed_samfile, pf))
os.chdir(cwd)
shutil.rmtree(pf)
os.chdir(basecwd)
|
def function[simulate, parameter[args]]:
constant[
%prog simulate run_dir 1 300
Simulate BAMs with varying inserts with dwgsim. The above command will
simulate between 1 to 300 CAGs in the HD region, in a directory called
`run_dir`.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[simulate].__doc__]]
call[name[p].add_option, parameter[constant[--method]]]
call[name[p].add_option, parameter[constant[--ref]]]
call[name[p].add_option, parameter[constant[--tred]]]
call[name[add_simulate_options], parameter[name[p]]]
<ast.Tuple object at 0x7da18f7235b0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[3]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18f720f10>]]
<ast.Tuple object at 0x7da18f721300> assign[=] name[args]
variable[ref] assign[=] name[opts].ref
variable[ref_fasta] assign[=] call[constant[/mnt/ref/{}.upper.fa].format, parameter[name[ref]]]
<ast.Tuple object at 0x7da18f720850> assign[=] tuple[[<ast.Call object at 0x7da18f721270>, <ast.Call object at 0x7da18f721ab0>]]
variable[basecwd] assign[=] call[name[os].getcwd, parameter[]]
call[name[mkdir], parameter[name[rundir]]]
call[name[os].chdir, parameter[name[rundir]]]
variable[cwd] assign[=] call[name[os].getcwd, parameter[]]
<ast.Tuple object at 0x7da18f721ae0> assign[=] tuple[[<ast.Constant object at 0x7da18f7239a0>, <ast.Constant object at 0x7da18f7200a0>]]
variable[repo] assign[=] call[name[TREDsRepo], parameter[]]
variable[tred] assign[=] call[name[repo]][name[opts].tred]
<ast.Tuple object at 0x7da18f721690> assign[=] tuple[[<ast.Attribute object at 0x7da18f722260>, <ast.Attribute object at 0x7da18f721780>, <ast.Attribute object at 0x7da18f7236d0>]]
call[name[logging].debug, parameter[call[constant[Simulating {}].format, parameter[name[tred]]]]]
variable[fasta] assign[=] call[name[Fasta], parameter[name[ref_fasta]]]
variable[seq_left] assign[=] call[call[name[fasta]][name[chr]]][<ast.Slice object at 0x7da18f720700>]
variable[seq_right] assign[=] call[call[name[fasta]][name[chr]]][<ast.Slice object at 0x7da18f721a50>]
variable[motif] assign[=] name[tred].repeat
variable[simulate_method] assign[=] <ast.IfExp object at 0x7da18f721090>
for taget[name[units]] in starred[call[name[range], parameter[name[startunits], binary_operation[name[endunits] + constant[1]]]]] begin[:]
variable[pf] assign[=] call[name[str], parameter[name[units]]]
call[name[mkdir], parameter[name[pf]]]
call[name[os].chdir, parameter[name[pf]]]
variable[seq] assign[=] binary_operation[binary_operation[call[name[str], parameter[name[seq_left]]] + binary_operation[name[motif] * name[units]]] + call[name[str], parameter[name[seq_right]]]]
variable[fastafile] assign[=] binary_operation[name[pf] + constant[.fasta]]
call[name[make_fasta], parameter[name[seq], name[fastafile]]]
call[name[simulate_method], parameter[list[[<ast.Name object at 0x7da1b09619f0>, <ast.Call object at 0x7da1b0961990>, <ast.Call object at 0x7da1b09617e0>, <ast.Call object at 0x7da1b0961060>, <ast.Call object at 0x7da1b09611b0>]]]]
variable[read1] assign[=] binary_operation[name[pf] + constant[.bwa.read1.fastq]]
variable[read2] assign[=] binary_operation[name[pf] + constant[.bwa.read2.fastq]]
<ast.Tuple object at 0x7da1b0960520> assign[=] call[name[align], parameter[list[[<ast.Name object at 0x7da1b09608e0>, <ast.Name object at 0x7da1b0960910>, <ast.Name object at 0x7da1b09609a0>]]]]
variable[indexed_samfile] assign[=] call[name[index], parameter[list[[<ast.Name object at 0x7da1b0960a00>]]]]
call[name[sh], parameter[call[constant[mv {} ../{}.bam].format, parameter[name[indexed_samfile], name[pf]]]]]
call[name[sh], parameter[call[constant[mv {}.bai ../{}.bam.bai].format, parameter[name[indexed_samfile], name[pf]]]]]
call[name[os].chdir, parameter[name[cwd]]]
call[name[shutil].rmtree, parameter[name[pf]]]
call[name[os].chdir, parameter[name[basecwd]]]
|
keyword[def] identifier[simulate] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[simulate] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[choices] =( literal[string] , literal[string] ), identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] , identifier[choices] =( literal[string] , literal[string] ),
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] )
identifier[add_simulate_options] ( identifier[p] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[rundir] , identifier[startunits] , identifier[endunits] = identifier[args]
identifier[ref] = identifier[opts] . identifier[ref]
identifier[ref_fasta] = literal[string] . identifier[format] ( identifier[ref] )
identifier[startunits] , identifier[endunits] = identifier[int] ( identifier[startunits] ), identifier[int] ( identifier[endunits] )
identifier[basecwd] = identifier[os] . identifier[getcwd] ()
identifier[mkdir] ( identifier[rundir] )
identifier[os] . identifier[chdir] ( identifier[rundir] )
identifier[cwd] = identifier[os] . identifier[getcwd] ()
identifier[pad_left] , identifier[pad_right] = literal[int] , literal[int]
identifier[repo] = identifier[TREDsRepo] ( identifier[ref] = identifier[ref] )
identifier[tred] = identifier[repo] [ identifier[opts] . identifier[tred] ]
identifier[chr] , identifier[start] , identifier[end] = identifier[tred] . identifier[chr] , identifier[tred] . identifier[repeat_start] , identifier[tred] . identifier[repeat_end]
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[tred] ))
identifier[fasta] = identifier[Fasta] ( identifier[ref_fasta] )
identifier[seq_left] = identifier[fasta] [ identifier[chr] ][ identifier[start] - identifier[pad_left] : identifier[start] - literal[int] ]
identifier[seq_right] = identifier[fasta] [ identifier[chr] ][ identifier[end] : identifier[end] + identifier[pad_right] ]
identifier[motif] = identifier[tred] . identifier[repeat]
identifier[simulate_method] = identifier[wgsim] keyword[if] identifier[opts] . identifier[method] == literal[string] keyword[else] identifier[eagle]
keyword[for] identifier[units] keyword[in] identifier[range] ( identifier[startunits] , identifier[endunits] + literal[int] ):
identifier[pf] = identifier[str] ( identifier[units] )
identifier[mkdir] ( identifier[pf] )
identifier[os] . identifier[chdir] ( identifier[pf] )
identifier[seq] = identifier[str] ( identifier[seq_left] )+ identifier[motif] * identifier[units] + identifier[str] ( identifier[seq_right] )
identifier[fastafile] = identifier[pf] + literal[string]
identifier[make_fasta] ( identifier[seq] , identifier[fastafile] , identifier[id] = identifier[chr] . identifier[upper] ())
identifier[simulate_method] ([ identifier[fastafile] , literal[string] . identifier[format] ( identifier[opts] . identifier[depth] ),
literal[string] . identifier[format] ( identifier[opts] . identifier[readlen] ),
literal[string] . identifier[format] ( identifier[opts] . identifier[distance] ),
literal[string] . identifier[format] ( identifier[pf] )])
identifier[read1] = identifier[pf] + literal[string]
identifier[read2] = identifier[pf] + literal[string]
identifier[samfile] , identifier[_] = identifier[align] ([ identifier[ref_fasta] , identifier[read1] , identifier[read2] ])
identifier[indexed_samfile] = identifier[index] ([ identifier[samfile] ])
identifier[sh] ( literal[string] . identifier[format] ( identifier[indexed_samfile] , identifier[pf] ))
identifier[sh] ( literal[string] . identifier[format] ( identifier[indexed_samfile] , identifier[pf] ))
identifier[os] . identifier[chdir] ( identifier[cwd] )
identifier[shutil] . identifier[rmtree] ( identifier[pf] )
identifier[os] . identifier[chdir] ( identifier[basecwd] )
|
def simulate(args):
"""
%prog simulate run_dir 1 300
Simulate BAMs with varying inserts with dwgsim. The above command will
simulate between 1 to 300 CAGs in the HD region, in a directory called
`run_dir`.
"""
p = OptionParser(simulate.__doc__)
p.add_option('--method', choices=('wgsim', 'eagle'), default='eagle', help='Read simulator')
p.add_option('--ref', default='hg38', choices=('hg38', 'hg19'), help='Reference genome version')
p.add_option('--tred', default='HD', help='TRED locus')
add_simulate_options(p)
(opts, args) = p.parse_args(args)
if len(args) != 3:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(rundir, startunits, endunits) = args
ref = opts.ref
ref_fasta = '/mnt/ref/{}.upper.fa'.format(ref)
(startunits, endunits) = (int(startunits), int(endunits))
basecwd = os.getcwd()
mkdir(rundir)
os.chdir(rundir)
cwd = os.getcwd()
# TRED region (e.g. Huntington)
(pad_left, pad_right) = (1000, 10000)
repo = TREDsRepo(ref=ref)
tred = repo[opts.tred]
(chr, start, end) = (tred.chr, tred.repeat_start, tred.repeat_end)
logging.debug('Simulating {}'.format(tred))
fasta = Fasta(ref_fasta)
seq_left = fasta[chr][start - pad_left:start - 1]
seq_right = fasta[chr][end:end + pad_right]
motif = tred.repeat
simulate_method = wgsim if opts.method == 'wgsim' else eagle
# Write fake sequence
for units in range(startunits, endunits + 1):
pf = str(units)
mkdir(pf)
os.chdir(pf)
seq = str(seq_left) + motif * units + str(seq_right)
fastafile = pf + '.fasta'
make_fasta(seq, fastafile, id=chr.upper())
# Simulate reads on it
simulate_method([fastafile, '--depth={}'.format(opts.depth), '--readlen={}'.format(opts.readlen), '--distance={}'.format(opts.distance), '--outfile={}'.format(pf)])
read1 = pf + '.bwa.read1.fastq'
read2 = pf + '.bwa.read2.fastq'
(samfile, _) = align([ref_fasta, read1, read2])
indexed_samfile = index([samfile])
sh('mv {} ../{}.bam'.format(indexed_samfile, pf))
sh('mv {}.bai ../{}.bam.bai'.format(indexed_samfile, pf))
os.chdir(cwd)
shutil.rmtree(pf) # depends on [control=['for'], data=['units']]
os.chdir(basecwd)
|
def check_content(self, content_pattern_names, content, encoding):
"""Check which of the named patterns matches the given content.
Returns a pair (matching, nonmatching), in which each element is a tuple of pattern names.
:param iterable content_pattern_names: names of content patterns to check.
:param bytes content: the content to check.
:param str encoding: the expected encoding of content.
"""
if not content_pattern_names or not encoding:
return (), ()
matching = []
nonmatching = []
for content_pattern_name in content_pattern_names:
if self._content_matchers[content_pattern_name].matches(content.decode(encoding)):
matching.append(content_pattern_name)
else:
nonmatching.append(content_pattern_name)
return tuple(matching), tuple(nonmatching)
|
def function[check_content, parameter[self, content_pattern_names, content, encoding]]:
constant[Check which of the named patterns matches the given content.
Returns a pair (matching, nonmatching), in which each element is a tuple of pattern names.
:param iterable content_pattern_names: names of content patterns to check.
:param bytes content: the content to check.
:param str encoding: the expected encoding of content.
]
if <ast.BoolOp object at 0x7da1b1e8ece0> begin[:]
return[tuple[[<ast.Tuple object at 0x7da1b1e8e440>, <ast.Tuple object at 0x7da1b1e8e470>]]]
variable[matching] assign[=] list[[]]
variable[nonmatching] assign[=] list[[]]
for taget[name[content_pattern_name]] in starred[name[content_pattern_names]] begin[:]
if call[call[name[self]._content_matchers][name[content_pattern_name]].matches, parameter[call[name[content].decode, parameter[name[encoding]]]]] begin[:]
call[name[matching].append, parameter[name[content_pattern_name]]]
return[tuple[[<ast.Call object at 0x7da1b22a7940>, <ast.Call object at 0x7da1b22a6230>]]]
|
keyword[def] identifier[check_content] ( identifier[self] , identifier[content_pattern_names] , identifier[content] , identifier[encoding] ):
literal[string]
keyword[if] keyword[not] identifier[content_pattern_names] keyword[or] keyword[not] identifier[encoding] :
keyword[return] (),()
identifier[matching] =[]
identifier[nonmatching] =[]
keyword[for] identifier[content_pattern_name] keyword[in] identifier[content_pattern_names] :
keyword[if] identifier[self] . identifier[_content_matchers] [ identifier[content_pattern_name] ]. identifier[matches] ( identifier[content] . identifier[decode] ( identifier[encoding] )):
identifier[matching] . identifier[append] ( identifier[content_pattern_name] )
keyword[else] :
identifier[nonmatching] . identifier[append] ( identifier[content_pattern_name] )
keyword[return] identifier[tuple] ( identifier[matching] ), identifier[tuple] ( identifier[nonmatching] )
|
def check_content(self, content_pattern_names, content, encoding):
"""Check which of the named patterns matches the given content.
Returns a pair (matching, nonmatching), in which each element is a tuple of pattern names.
:param iterable content_pattern_names: names of content patterns to check.
:param bytes content: the content to check.
:param str encoding: the expected encoding of content.
"""
if not content_pattern_names or not encoding:
return ((), ()) # depends on [control=['if'], data=[]]
matching = []
nonmatching = []
for content_pattern_name in content_pattern_names:
if self._content_matchers[content_pattern_name].matches(content.decode(encoding)):
matching.append(content_pattern_name) # depends on [control=['if'], data=[]]
else:
nonmatching.append(content_pattern_name) # depends on [control=['for'], data=['content_pattern_name']]
return (tuple(matching), tuple(nonmatching))
|
def has_in_members(self, member):
"""
:calls: `GET /teams/:id/members/:user <http://developer.github.com/v3/orgs/teams>`_
:param member: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(member, github.NamedUser.NamedUser), member
status, headers, data = self._requester.requestJson(
"GET",
self.url + "/members/" + member._identity
)
return status == 204
|
def function[has_in_members, parameter[self, member]]:
constant[
:calls: `GET /teams/:id/members/:user <http://developer.github.com/v3/orgs/teams>`_
:param member: :class:`github.NamedUser.NamedUser`
:rtype: bool
]
assert[call[name[isinstance], parameter[name[member], name[github].NamedUser.NamedUser]]]
<ast.Tuple object at 0x7da1b21ee200> assign[=] call[name[self]._requester.requestJson, parameter[constant[GET], binary_operation[binary_operation[name[self].url + constant[/members/]] + name[member]._identity]]]
return[compare[name[status] equal[==] constant[204]]]
|
keyword[def] identifier[has_in_members] ( identifier[self] , identifier[member] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[member] , identifier[github] . identifier[NamedUser] . identifier[NamedUser] ), identifier[member]
identifier[status] , identifier[headers] , identifier[data] = identifier[self] . identifier[_requester] . identifier[requestJson] (
literal[string] ,
identifier[self] . identifier[url] + literal[string] + identifier[member] . identifier[_identity]
)
keyword[return] identifier[status] == literal[int]
|
def has_in_members(self, member):
"""
:calls: `GET /teams/:id/members/:user <http://developer.github.com/v3/orgs/teams>`_
:param member: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(member, github.NamedUser.NamedUser), member
(status, headers, data) = self._requester.requestJson('GET', self.url + '/members/' + member._identity)
return status == 204
|
def _maybe_create_new_template(self):
"""Returns True if a new template is created as opposed to reusing the existing template.
When there are multiple available formats, the formatter uses the
first format where a formatting template could be created.
"""
ii = 0
while ii < len(self._possible_formats):
number_format = self._possible_formats[ii]
pattern = number_format.pattern
if self._current_formatting_pattern == pattern:
return False
if self._create_formatting_template(number_format):
self._current_formatting_pattern = pattern
if number_format.national_prefix_formatting_rule is None:
self._should_add_space_after_national_prefix = False
else:
self._should_add_space_after_national_prefix = bool(_NATIONAL_PREFIX_SEPARATORS_PATTERN.search(number_format.national_prefix_formatting_rule))
# With a new formatting template, the matched position using
# the old template needs to be reset.
self._last_match_position = 0
return True
else:
# Remove the current number format from _possible_formats
del self._possible_formats[ii]
ii -= 1
ii += 1
self._able_to_format = False
return False
|
def function[_maybe_create_new_template, parameter[self]]:
constant[Returns True if a new template is created as opposed to reusing the existing template.
When there are multiple available formats, the formatter uses the
first format where a formatting template could be created.
]
variable[ii] assign[=] constant[0]
while compare[name[ii] less[<] call[name[len], parameter[name[self]._possible_formats]]] begin[:]
variable[number_format] assign[=] call[name[self]._possible_formats][name[ii]]
variable[pattern] assign[=] name[number_format].pattern
if compare[name[self]._current_formatting_pattern equal[==] name[pattern]] begin[:]
return[constant[False]]
if call[name[self]._create_formatting_template, parameter[name[number_format]]] begin[:]
name[self]._current_formatting_pattern assign[=] name[pattern]
if compare[name[number_format].national_prefix_formatting_rule is constant[None]] begin[:]
name[self]._should_add_space_after_national_prefix assign[=] constant[False]
name[self]._last_match_position assign[=] constant[0]
return[constant[True]]
<ast.AugAssign object at 0x7da1b1950820>
name[self]._able_to_format assign[=] constant[False]
return[constant[False]]
|
keyword[def] identifier[_maybe_create_new_template] ( identifier[self] ):
literal[string]
identifier[ii] = literal[int]
keyword[while] identifier[ii] < identifier[len] ( identifier[self] . identifier[_possible_formats] ):
identifier[number_format] = identifier[self] . identifier[_possible_formats] [ identifier[ii] ]
identifier[pattern] = identifier[number_format] . identifier[pattern]
keyword[if] identifier[self] . identifier[_current_formatting_pattern] == identifier[pattern] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_create_formatting_template] ( identifier[number_format] ):
identifier[self] . identifier[_current_formatting_pattern] = identifier[pattern]
keyword[if] identifier[number_format] . identifier[national_prefix_formatting_rule] keyword[is] keyword[None] :
identifier[self] . identifier[_should_add_space_after_national_prefix] = keyword[False]
keyword[else] :
identifier[self] . identifier[_should_add_space_after_national_prefix] = identifier[bool] ( identifier[_NATIONAL_PREFIX_SEPARATORS_PATTERN] . identifier[search] ( identifier[number_format] . identifier[national_prefix_formatting_rule] ))
identifier[self] . identifier[_last_match_position] = literal[int]
keyword[return] keyword[True]
keyword[else] :
keyword[del] identifier[self] . identifier[_possible_formats] [ identifier[ii] ]
identifier[ii] -= literal[int]
identifier[ii] += literal[int]
identifier[self] . identifier[_able_to_format] = keyword[False]
keyword[return] keyword[False]
|
def _maybe_create_new_template(self):
"""Returns True if a new template is created as opposed to reusing the existing template.
When there are multiple available formats, the formatter uses the
first format where a formatting template could be created.
"""
ii = 0
while ii < len(self._possible_formats):
number_format = self._possible_formats[ii]
pattern = number_format.pattern
if self._current_formatting_pattern == pattern:
return False # depends on [control=['if'], data=[]]
if self._create_formatting_template(number_format):
self._current_formatting_pattern = pattern
if number_format.national_prefix_formatting_rule is None:
self._should_add_space_after_national_prefix = False # depends on [control=['if'], data=[]]
else:
self._should_add_space_after_national_prefix = bool(_NATIONAL_PREFIX_SEPARATORS_PATTERN.search(number_format.national_prefix_formatting_rule))
# With a new formatting template, the matched position using
# the old template needs to be reset.
self._last_match_position = 0
return True # depends on [control=['if'], data=[]]
else:
# Remove the current number format from _possible_formats
del self._possible_formats[ii]
ii -= 1
ii += 1 # depends on [control=['while'], data=['ii']]
self._able_to_format = False
return False
|
def replace(table, field, a, b, **kwargs):
"""
Convenience function to replace all occurrences of `a` with `b` under the
given field. See also :func:`convert`.
The ``where`` keyword argument can be given with a callable or expression
which is evaluated on each row and which should return True if the
conversion should be applied on that row, else False.
"""
return convert(table, field, {a: b}, **kwargs)
|
def function[replace, parameter[table, field, a, b]]:
constant[
Convenience function to replace all occurrences of `a` with `b` under the
given field. See also :func:`convert`.
The ``where`` keyword argument can be given with a callable or expression
which is evaluated on each row and which should return True if the
conversion should be applied on that row, else False.
]
return[call[name[convert], parameter[name[table], name[field], dictionary[[<ast.Name object at 0x7da2045657b0>], [<ast.Name object at 0x7da204566890>]]]]]
|
keyword[def] identifier[replace] ( identifier[table] , identifier[field] , identifier[a] , identifier[b] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[convert] ( identifier[table] , identifier[field] ,{ identifier[a] : identifier[b] },** identifier[kwargs] )
|
def replace(table, field, a, b, **kwargs):
"""
Convenience function to replace all occurrences of `a` with `b` under the
given field. See also :func:`convert`.
The ``where`` keyword argument can be given with a callable or expression
which is evaluated on each row and which should return True if the
conversion should be applied on that row, else False.
"""
return convert(table, field, {a: b}, **kwargs)
|
def section_tortuosity(section):
'''Tortuosity of a section
The tortuosity is defined as the ratio of the path length of a section
and the euclidian distnce between its end points.
The path length is the sum of distances between consecutive points.
If the section contains less than 2 points, the value 1 is returned.
'''
pts = section.points
return 1 if len(pts) < 2 else mm.section_length(pts) / mm.point_dist(pts[-1], pts[0])
|
def function[section_tortuosity, parameter[section]]:
constant[Tortuosity of a section
The tortuosity is defined as the ratio of the path length of a section
and the euclidian distnce between its end points.
The path length is the sum of distances between consecutive points.
If the section contains less than 2 points, the value 1 is returned.
]
variable[pts] assign[=] name[section].points
return[<ast.IfExp object at 0x7da20c76c130>]
|
keyword[def] identifier[section_tortuosity] ( identifier[section] ):
literal[string]
identifier[pts] = identifier[section] . identifier[points]
keyword[return] literal[int] keyword[if] identifier[len] ( identifier[pts] )< literal[int] keyword[else] identifier[mm] . identifier[section_length] ( identifier[pts] )/ identifier[mm] . identifier[point_dist] ( identifier[pts] [- literal[int] ], identifier[pts] [ literal[int] ])
|
def section_tortuosity(section):
"""Tortuosity of a section
The tortuosity is defined as the ratio of the path length of a section
and the euclidian distnce between its end points.
The path length is the sum of distances between consecutive points.
If the section contains less than 2 points, the value 1 is returned.
"""
pts = section.points
return 1 if len(pts) < 2 else mm.section_length(pts) / mm.point_dist(pts[-1], pts[0])
|
def laplacian_pyramid_image(shape, n_levels=4, sd=None):
"""Simple laplacian pyramid paramaterization of an image.
For more flexibility, use a sum of lowres_tensor()s.
Args:
shape: shape of resulting image, [batch, width, height, channels].
n_levels: number of levels of laplacian pyarmid.
sd: standard deviation of param initialization.
Returns:
tensor with shape from first argument.
"""
batch_dims = shape[:-3]
w, h, ch = shape[-3:]
pyramid = 0
for n in range(n_levels):
k = 2 ** n
pyramid += lowres_tensor(shape, batch_dims + (w // k, h // k, ch), sd=sd)
return pyramid
|
def function[laplacian_pyramid_image, parameter[shape, n_levels, sd]]:
constant[Simple laplacian pyramid paramaterization of an image.
For more flexibility, use a sum of lowres_tensor()s.
Args:
shape: shape of resulting image, [batch, width, height, channels].
n_levels: number of levels of laplacian pyarmid.
sd: standard deviation of param initialization.
Returns:
tensor with shape from first argument.
]
variable[batch_dims] assign[=] call[name[shape]][<ast.Slice object at 0x7da1b20c9db0>]
<ast.Tuple object at 0x7da1b20c8190> assign[=] call[name[shape]][<ast.Slice object at 0x7da1b20cbbe0>]
variable[pyramid] assign[=] constant[0]
for taget[name[n]] in starred[call[name[range], parameter[name[n_levels]]]] begin[:]
variable[k] assign[=] binary_operation[constant[2] ** name[n]]
<ast.AugAssign object at 0x7da1b20c8e80>
return[name[pyramid]]
|
keyword[def] identifier[laplacian_pyramid_image] ( identifier[shape] , identifier[n_levels] = literal[int] , identifier[sd] = keyword[None] ):
literal[string]
identifier[batch_dims] = identifier[shape] [:- literal[int] ]
identifier[w] , identifier[h] , identifier[ch] = identifier[shape] [- literal[int] :]
identifier[pyramid] = literal[int]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[n_levels] ):
identifier[k] = literal[int] ** identifier[n]
identifier[pyramid] += identifier[lowres_tensor] ( identifier[shape] , identifier[batch_dims] +( identifier[w] // identifier[k] , identifier[h] // identifier[k] , identifier[ch] ), identifier[sd] = identifier[sd] )
keyword[return] identifier[pyramid]
|
def laplacian_pyramid_image(shape, n_levels=4, sd=None):
"""Simple laplacian pyramid paramaterization of an image.
For more flexibility, use a sum of lowres_tensor()s.
Args:
shape: shape of resulting image, [batch, width, height, channels].
n_levels: number of levels of laplacian pyarmid.
sd: standard deviation of param initialization.
Returns:
tensor with shape from first argument.
"""
batch_dims = shape[:-3]
(w, h, ch) = shape[-3:]
pyramid = 0
for n in range(n_levels):
k = 2 ** n
pyramid += lowres_tensor(shape, batch_dims + (w // k, h // k, ch), sd=sd) # depends on [control=['for'], data=['n']]
return pyramid
|
async def remove_participant(self, p: Participant):
""" remove a participant from the tournament
|methcoro|
Args:
p: the participant to remove
Raises:
APIException
"""
await self.connection('DELETE', 'tournaments/{}/participants/{}'.format(self._id, p._id))
if p in self.participants:
self.participants.remove(p)
|
<ast.AsyncFunctionDef object at 0x7da18fe909d0>
|
keyword[async] keyword[def] identifier[remove_participant] ( identifier[self] , identifier[p] : identifier[Participant] ):
literal[string]
keyword[await] identifier[self] . identifier[connection] ( literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[_id] , identifier[p] . identifier[_id] ))
keyword[if] identifier[p] keyword[in] identifier[self] . identifier[participants] :
identifier[self] . identifier[participants] . identifier[remove] ( identifier[p] )
|
async def remove_participant(self, p: Participant):
""" remove a participant from the tournament
|methcoro|
Args:
p: the participant to remove
Raises:
APIException
"""
await self.connection('DELETE', 'tournaments/{}/participants/{}'.format(self._id, p._id))
if p in self.participants:
self.participants.remove(p) # depends on [control=['if'], data=['p']]
|
def is_alive(self):
"""Returns a boolean flag with the state of the connection."""
null = chr(0)
if self.remote_conn is None:
log.error("Connection is not initialised, is_alive returns False")
return False
if self.protocol == "telnet":
try:
# Try sending IAC + NOP (IAC is telnet way of sending command)
# IAC = Interpret as Command; it comes before the NOP.
log.debug("Sending IAC + NOP")
# Need to send multiple times to test connection
self.remote_conn.sock.sendall(telnetlib.IAC + telnetlib.NOP)
self.remote_conn.sock.sendall(telnetlib.IAC + telnetlib.NOP)
self.remote_conn.sock.sendall(telnetlib.IAC + telnetlib.NOP)
return True
except AttributeError:
return False
else:
# SSH
try:
# Try sending ASCII null byte to maintain the connection alive
log.debug("Sending the NULL byte")
self.write_channel(null)
return self.remote_conn.transport.is_active()
except (socket.error, EOFError):
log.error("Unable to send", exc_info=True)
# If unable to send, we can tell for sure that the connection is unusable
return False
return False
|
def function[is_alive, parameter[self]]:
constant[Returns a boolean flag with the state of the connection.]
variable[null] assign[=] call[name[chr], parameter[constant[0]]]
if compare[name[self].remote_conn is constant[None]] begin[:]
call[name[log].error, parameter[constant[Connection is not initialised, is_alive returns False]]]
return[constant[False]]
if compare[name[self].protocol equal[==] constant[telnet]] begin[:]
<ast.Try object at 0x7da18f09edd0>
return[constant[False]]
|
keyword[def] identifier[is_alive] ( identifier[self] ):
literal[string]
identifier[null] = identifier[chr] ( literal[int] )
keyword[if] identifier[self] . identifier[remote_conn] keyword[is] keyword[None] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[protocol] == literal[string] :
keyword[try] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[remote_conn] . identifier[sock] . identifier[sendall] ( identifier[telnetlib] . identifier[IAC] + identifier[telnetlib] . identifier[NOP] )
identifier[self] . identifier[remote_conn] . identifier[sock] . identifier[sendall] ( identifier[telnetlib] . identifier[IAC] + identifier[telnetlib] . identifier[NOP] )
identifier[self] . identifier[remote_conn] . identifier[sock] . identifier[sendall] ( identifier[telnetlib] . identifier[IAC] + identifier[telnetlib] . identifier[NOP] )
keyword[return] keyword[True]
keyword[except] identifier[AttributeError] :
keyword[return] keyword[False]
keyword[else] :
keyword[try] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[write_channel] ( identifier[null] )
keyword[return] identifier[self] . identifier[remote_conn] . identifier[transport] . identifier[is_active] ()
keyword[except] ( identifier[socket] . identifier[error] , identifier[EOFError] ):
identifier[log] . identifier[error] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[return] keyword[False]
keyword[return] keyword[False]
|
def is_alive(self):
"""Returns a boolean flag with the state of the connection."""
null = chr(0)
if self.remote_conn is None:
log.error('Connection is not initialised, is_alive returns False')
return False # depends on [control=['if'], data=[]]
if self.protocol == 'telnet':
try:
# Try sending IAC + NOP (IAC is telnet way of sending command)
# IAC = Interpret as Command; it comes before the NOP.
log.debug('Sending IAC + NOP')
# Need to send multiple times to test connection
self.remote_conn.sock.sendall(telnetlib.IAC + telnetlib.NOP)
self.remote_conn.sock.sendall(telnetlib.IAC + telnetlib.NOP)
self.remote_conn.sock.sendall(telnetlib.IAC + telnetlib.NOP)
return True # depends on [control=['try'], data=[]]
except AttributeError:
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
# SSH
try:
# Try sending ASCII null byte to maintain the connection alive
log.debug('Sending the NULL byte')
self.write_channel(null)
return self.remote_conn.transport.is_active() # depends on [control=['try'], data=[]]
except (socket.error, EOFError):
log.error('Unable to send', exc_info=True)
# If unable to send, we can tell for sure that the connection is unusable
return False # depends on [control=['except'], data=[]]
return False
|
def is_letter(uni_char):
"""Determine whether the given Unicode character is a Unicode letter"""
category = Category.get(uni_char)
return (category == Category.UPPERCASE_LETTER or
category == Category.LOWERCASE_LETTER or
category == Category.TITLECASE_LETTER or
category == Category.MODIFIER_LETTER or
category == Category.OTHER_LETTER)
|
def function[is_letter, parameter[uni_char]]:
constant[Determine whether the given Unicode character is a Unicode letter]
variable[category] assign[=] call[name[Category].get, parameter[name[uni_char]]]
return[<ast.BoolOp object at 0x7da1b188cdf0>]
|
keyword[def] identifier[is_letter] ( identifier[uni_char] ):
literal[string]
identifier[category] = identifier[Category] . identifier[get] ( identifier[uni_char] )
keyword[return] ( identifier[category] == identifier[Category] . identifier[UPPERCASE_LETTER] keyword[or]
identifier[category] == identifier[Category] . identifier[LOWERCASE_LETTER] keyword[or]
identifier[category] == identifier[Category] . identifier[TITLECASE_LETTER] keyword[or]
identifier[category] == identifier[Category] . identifier[MODIFIER_LETTER] keyword[or]
identifier[category] == identifier[Category] . identifier[OTHER_LETTER] )
|
def is_letter(uni_char):
"""Determine whether the given Unicode character is a Unicode letter"""
category = Category.get(uni_char)
return category == Category.UPPERCASE_LETTER or category == Category.LOWERCASE_LETTER or category == Category.TITLECASE_LETTER or (category == Category.MODIFIER_LETTER) or (category == Category.OTHER_LETTER)
|
def uavionix_adsb_out_cfg_encode(self, ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect):
'''
Static data to configure the ADS-B transponder (send within 10 sec of
a POR and every 10 sec thereafter)
ICAO : Vehicle address (24 bit) (uint32_t)
callsign : Vehicle identifier (8 characters, null terminated, valid characters are A-Z, 0-9, " " only) (char)
emitterType : Transmitting vehicle type. See ADSB_EMITTER_TYPE enum (uint8_t)
aircraftSize : Aircraft length and width encoding (table 2-35 of DO-282B) (uint8_t)
gpsOffsetLat : GPS antenna lateral offset (table 2-36 of DO-282B) (uint8_t)
gpsOffsetLon : GPS antenna longitudinal offset from nose [if non-zero, take position (in meters) divide by 2 and add one] (table 2-37 DO-282B) (uint8_t)
stallSpeed : Aircraft stall speed in cm/s (uint16_t)
rfSelect : ADS-B transponder reciever and transmit enable flags (uint8_t)
'''
return MAVLink_uavionix_adsb_out_cfg_message(ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect)
|
def function[uavionix_adsb_out_cfg_encode, parameter[self, ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect]]:
constant[
Static data to configure the ADS-B transponder (send within 10 sec of
a POR and every 10 sec thereafter)
ICAO : Vehicle address (24 bit) (uint32_t)
callsign : Vehicle identifier (8 characters, null terminated, valid characters are A-Z, 0-9, " " only) (char)
emitterType : Transmitting vehicle type. See ADSB_EMITTER_TYPE enum (uint8_t)
aircraftSize : Aircraft length and width encoding (table 2-35 of DO-282B) (uint8_t)
gpsOffsetLat : GPS antenna lateral offset (table 2-36 of DO-282B) (uint8_t)
gpsOffsetLon : GPS antenna longitudinal offset from nose [if non-zero, take position (in meters) divide by 2 and add one] (table 2-37 DO-282B) (uint8_t)
stallSpeed : Aircraft stall speed in cm/s (uint16_t)
rfSelect : ADS-B transponder reciever and transmit enable flags (uint8_t)
]
return[call[name[MAVLink_uavionix_adsb_out_cfg_message], parameter[name[ICAO], name[callsign], name[emitterType], name[aircraftSize], name[gpsOffsetLat], name[gpsOffsetLon], name[stallSpeed], name[rfSelect]]]]
|
keyword[def] identifier[uavionix_adsb_out_cfg_encode] ( identifier[self] , identifier[ICAO] , identifier[callsign] , identifier[emitterType] , identifier[aircraftSize] , identifier[gpsOffsetLat] , identifier[gpsOffsetLon] , identifier[stallSpeed] , identifier[rfSelect] ):
literal[string]
keyword[return] identifier[MAVLink_uavionix_adsb_out_cfg_message] ( identifier[ICAO] , identifier[callsign] , identifier[emitterType] , identifier[aircraftSize] , identifier[gpsOffsetLat] , identifier[gpsOffsetLon] , identifier[stallSpeed] , identifier[rfSelect] )
|
def uavionix_adsb_out_cfg_encode(self, ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect):
"""
Static data to configure the ADS-B transponder (send within 10 sec of
a POR and every 10 sec thereafter)
ICAO : Vehicle address (24 bit) (uint32_t)
callsign : Vehicle identifier (8 characters, null terminated, valid characters are A-Z, 0-9, " " only) (char)
emitterType : Transmitting vehicle type. See ADSB_EMITTER_TYPE enum (uint8_t)
aircraftSize : Aircraft length and width encoding (table 2-35 of DO-282B) (uint8_t)
gpsOffsetLat : GPS antenna lateral offset (table 2-36 of DO-282B) (uint8_t)
gpsOffsetLon : GPS antenna longitudinal offset from nose [if non-zero, take position (in meters) divide by 2 and add one] (table 2-37 DO-282B) (uint8_t)
stallSpeed : Aircraft stall speed in cm/s (uint16_t)
rfSelect : ADS-B transponder reciever and transmit enable flags (uint8_t)
"""
return MAVLink_uavionix_adsb_out_cfg_message(ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect)
|
def intersection(self, key, *others):
"""Return a new set with elements common to the set and all others."""
if not isinstance(key, str):
raise ValueError("String expected.")
self.db.sinterstore(key, [self.key] + [o.key for o in others])
return Set(key)
|
def function[intersection, parameter[self, key]]:
constant[Return a new set with elements common to the set and all others.]
if <ast.UnaryOp object at 0x7da2044c0670> begin[:]
<ast.Raise object at 0x7da2044c29b0>
call[name[self].db.sinterstore, parameter[name[key], binary_operation[list[[<ast.Attribute object at 0x7da2044c2740>]] + <ast.ListComp object at 0x7da2044c0c40>]]]
return[call[name[Set], parameter[name[key]]]]
|
keyword[def] identifier[intersection] ( identifier[self] , identifier[key] ,* identifier[others] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[str] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[db] . identifier[sinterstore] ( identifier[key] ,[ identifier[self] . identifier[key] ]+[ identifier[o] . identifier[key] keyword[for] identifier[o] keyword[in] identifier[others] ])
keyword[return] identifier[Set] ( identifier[key] )
|
def intersection(self, key, *others):
"""Return a new set with elements common to the set and all others."""
if not isinstance(key, str):
raise ValueError('String expected.') # depends on [control=['if'], data=[]]
self.db.sinterstore(key, [self.key] + [o.key for o in others])
return Set(key)
|
def parametrized_function(decorator):
'''Decorator used to create decorators with arguments.
Should be used with function returning another function
that will be called with the original function has the first
parameter.
No difference are made between method and function,
so the wrapper function will have to know if the first
argument is an instance (self).
Note that when using reflect or annotate module functions,
depth should be incremented by one.
Example::
@decorator.parametrized_function
def mydecorator(function_original, decorator, arguments):
def wrapper(call, arguments):
# processing
return function_original(call, arguments)
return wrapper
@mydecorator(decorator, arguments)
def myfunction():
pass
'''
def meta_decorator(*args, **kwargs):
return _NormalMetaDecorator(decorator, args, kwargs)
return meta_decorator
|
def function[parametrized_function, parameter[decorator]]:
constant[Decorator used to create decorators with arguments.
Should be used with function returning another function
that will be called with the original function has the first
parameter.
No difference are made between method and function,
so the wrapper function will have to know if the first
argument is an instance (self).
Note that when using reflect or annotate module functions,
depth should be incremented by one.
Example::
@decorator.parametrized_function
def mydecorator(function_original, decorator, arguments):
def wrapper(call, arguments):
# processing
return function_original(call, arguments)
return wrapper
@mydecorator(decorator, arguments)
def myfunction():
pass
]
def function[meta_decorator, parameter[]]:
return[call[name[_NormalMetaDecorator], parameter[name[decorator], name[args], name[kwargs]]]]
return[name[meta_decorator]]
|
keyword[def] identifier[parametrized_function] ( identifier[decorator] ):
literal[string]
keyword[def] identifier[meta_decorator] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[_NormalMetaDecorator] ( identifier[decorator] , identifier[args] , identifier[kwargs] )
keyword[return] identifier[meta_decorator]
|
def parametrized_function(decorator):
"""Decorator used to create decorators with arguments.
Should be used with function returning another function
that will be called with the original function has the first
parameter.
No difference are made between method and function,
so the wrapper function will have to know if the first
argument is an instance (self).
Note that when using reflect or annotate module functions,
depth should be incremented by one.
Example::
@decorator.parametrized_function
def mydecorator(function_original, decorator, arguments):
def wrapper(call, arguments):
# processing
return function_original(call, arguments)
return wrapper
@mydecorator(decorator, arguments)
def myfunction():
pass
"""
def meta_decorator(*args, **kwargs):
return _NormalMetaDecorator(decorator, args, kwargs)
return meta_decorator
|
def start_server(self):
"""Start the RPC Server.
:return:
"""
self._stopped.clear()
if not self._connection or self._connection.is_closed:
self._create_connection()
while not self._stopped.is_set():
try:
# Check our connection for errors.
self._connection.check_for_errors()
self._update_consumers()
except amqpstorm.AMQPError as why:
# If an error occurs, re-connect and let update_consumers
# re-open the channels.
LOGGER.warning(why)
self._stop_consumers()
self._create_connection()
time.sleep(1)
|
def function[start_server, parameter[self]]:
constant[Start the RPC Server.
:return:
]
call[name[self]._stopped.clear, parameter[]]
if <ast.BoolOp object at 0x7da2054a6b60> begin[:]
call[name[self]._create_connection, parameter[]]
while <ast.UnaryOp object at 0x7da18f58e530> begin[:]
<ast.Try object at 0x7da18f58d840>
call[name[time].sleep, parameter[constant[1]]]
|
keyword[def] identifier[start_server] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_stopped] . identifier[clear] ()
keyword[if] keyword[not] identifier[self] . identifier[_connection] keyword[or] identifier[self] . identifier[_connection] . identifier[is_closed] :
identifier[self] . identifier[_create_connection] ()
keyword[while] keyword[not] identifier[self] . identifier[_stopped] . identifier[is_set] ():
keyword[try] :
identifier[self] . identifier[_connection] . identifier[check_for_errors] ()
identifier[self] . identifier[_update_consumers] ()
keyword[except] identifier[amqpstorm] . identifier[AMQPError] keyword[as] identifier[why] :
identifier[LOGGER] . identifier[warning] ( identifier[why] )
identifier[self] . identifier[_stop_consumers] ()
identifier[self] . identifier[_create_connection] ()
identifier[time] . identifier[sleep] ( literal[int] )
|
def start_server(self):
"""Start the RPC Server.
:return:
"""
self._stopped.clear()
if not self._connection or self._connection.is_closed:
self._create_connection() # depends on [control=['if'], data=[]]
while not self._stopped.is_set():
try:
# Check our connection for errors.
self._connection.check_for_errors()
self._update_consumers() # depends on [control=['try'], data=[]]
except amqpstorm.AMQPError as why:
# If an error occurs, re-connect and let update_consumers
# re-open the channels.
LOGGER.warning(why)
self._stop_consumers()
self._create_connection() # depends on [control=['except'], data=['why']]
time.sleep(1) # depends on [control=['while'], data=[]]
|
def conform(self, frame, axis='items'):
"""
Conform input DataFrame to align with chosen axis pair.
Parameters
----------
frame : DataFrame
axis : {'items', 'major', 'minor'}
Axis the input corresponds to. E.g., if axis='major', then
the frame's columns would be items, and the index would be
values of the minor axis
Returns
-------
DataFrame
"""
axes = self._get_plane_axes(axis)
return frame.reindex(**self._extract_axes_for_slice(self, axes))
|
def function[conform, parameter[self, frame, axis]]:
constant[
Conform input DataFrame to align with chosen axis pair.
Parameters
----------
frame : DataFrame
axis : {'items', 'major', 'minor'}
Axis the input corresponds to. E.g., if axis='major', then
the frame's columns would be items, and the index would be
values of the minor axis
Returns
-------
DataFrame
]
variable[axes] assign[=] call[name[self]._get_plane_axes, parameter[name[axis]]]
return[call[name[frame].reindex, parameter[]]]
|
keyword[def] identifier[conform] ( identifier[self] , identifier[frame] , identifier[axis] = literal[string] ):
literal[string]
identifier[axes] = identifier[self] . identifier[_get_plane_axes] ( identifier[axis] )
keyword[return] identifier[frame] . identifier[reindex] (** identifier[self] . identifier[_extract_axes_for_slice] ( identifier[self] , identifier[axes] ))
|
def conform(self, frame, axis='items'):
"""
Conform input DataFrame to align with chosen axis pair.
Parameters
----------
frame : DataFrame
axis : {'items', 'major', 'minor'}
Axis the input corresponds to. E.g., if axis='major', then
the frame's columns would be items, and the index would be
values of the minor axis
Returns
-------
DataFrame
"""
axes = self._get_plane_axes(axis)
return frame.reindex(**self._extract_axes_for_slice(self, axes))
|
def sort_protein_sequences(protein_sequences):
"""
Sort protein sequences in decreasing order of priority
"""
return list(
sorted(
protein_sequences,
key=ProteinSequence.ascending_sort_key,
reverse=True))
|
def function[sort_protein_sequences, parameter[protein_sequences]]:
constant[
Sort protein sequences in decreasing order of priority
]
return[call[name[list], parameter[call[name[sorted], parameter[name[protein_sequences]]]]]]
|
keyword[def] identifier[sort_protein_sequences] ( identifier[protein_sequences] ):
literal[string]
keyword[return] identifier[list] (
identifier[sorted] (
identifier[protein_sequences] ,
identifier[key] = identifier[ProteinSequence] . identifier[ascending_sort_key] ,
identifier[reverse] = keyword[True] ))
|
def sort_protein_sequences(protein_sequences):
"""
Sort protein sequences in decreasing order of priority
"""
return list(sorted(protein_sequences, key=ProteinSequence.ascending_sort_key, reverse=True))
|
def sendError(self, errorcode):
"""This method uses the socket passed, and uses the errorcode to
compose and send an error packet."""
log.debug("In sendError, being asked to send error %d", errorcode)
errpkt = TftpPacketERR()
errpkt.errorcode = errorcode
if self.context.tidport == None:
log.debug("Error packet received outside session. Discarding")
else:
self.context.sock.sendto(errpkt.encode().buffer,
(self.context.host,
self.context.tidport))
self.context.last_pkt = errpkt
|
def function[sendError, parameter[self, errorcode]]:
constant[This method uses the socket passed, and uses the errorcode to
compose and send an error packet.]
call[name[log].debug, parameter[constant[In sendError, being asked to send error %d], name[errorcode]]]
variable[errpkt] assign[=] call[name[TftpPacketERR], parameter[]]
name[errpkt].errorcode assign[=] name[errorcode]
if compare[name[self].context.tidport equal[==] constant[None]] begin[:]
call[name[log].debug, parameter[constant[Error packet received outside session. Discarding]]]
name[self].context.last_pkt assign[=] name[errpkt]
|
keyword[def] identifier[sendError] ( identifier[self] , identifier[errorcode] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] , identifier[errorcode] )
identifier[errpkt] = identifier[TftpPacketERR] ()
identifier[errpkt] . identifier[errorcode] = identifier[errorcode]
keyword[if] identifier[self] . identifier[context] . identifier[tidport] == keyword[None] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[self] . identifier[context] . identifier[sock] . identifier[sendto] ( identifier[errpkt] . identifier[encode] (). identifier[buffer] ,
( identifier[self] . identifier[context] . identifier[host] ,
identifier[self] . identifier[context] . identifier[tidport] ))
identifier[self] . identifier[context] . identifier[last_pkt] = identifier[errpkt]
|
def sendError(self, errorcode):
"""This method uses the socket passed, and uses the errorcode to
compose and send an error packet."""
log.debug('In sendError, being asked to send error %d', errorcode)
errpkt = TftpPacketERR()
errpkt.errorcode = errorcode
if self.context.tidport == None:
log.debug('Error packet received outside session. Discarding') # depends on [control=['if'], data=[]]
else:
self.context.sock.sendto(errpkt.encode().buffer, (self.context.host, self.context.tidport))
self.context.last_pkt = errpkt
|
def releaseNetToMs():
"""RELEASE Section 9.3.18.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2d) # 00101101
c = CauseHdr(ieiC=0x08, eightBitC=0x0)
d = CauseHdr(ieiC=0x08, eightBitC=0x0)
e = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
f = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = a / b / c / d / e / f
return packet
|
def function[releaseNetToMs, parameter[]]:
constant[RELEASE Section 9.3.18.1]
variable[a] assign[=] call[name[TpPd], parameter[]]
variable[b] assign[=] call[name[MessageType], parameter[]]
variable[c] assign[=] call[name[CauseHdr], parameter[]]
variable[d] assign[=] call[name[CauseHdr], parameter[]]
variable[e] assign[=] call[name[FacilityHdr], parameter[]]
variable[f] assign[=] call[name[UserUserHdr], parameter[]]
variable[packet] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]] / name[e]] / name[f]]
return[name[packet]]
|
keyword[def] identifier[releaseNetToMs] ():
literal[string]
identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] )
identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] )
identifier[c] = identifier[CauseHdr] ( identifier[ieiC] = literal[int] , identifier[eightBitC] = literal[int] )
identifier[d] = identifier[CauseHdr] ( identifier[ieiC] = literal[int] , identifier[eightBitC] = literal[int] )
identifier[e] = identifier[FacilityHdr] ( identifier[ieiF] = literal[int] , identifier[eightBitF] = literal[int] )
identifier[f] = identifier[UserUserHdr] ( identifier[ieiUU] = literal[int] , identifier[eightBitUU] = literal[int] )
identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d] / identifier[e] / identifier[f]
keyword[return] identifier[packet]
|
def releaseNetToMs():
"""RELEASE Section 9.3.18.1"""
a = TpPd(pd=3)
b = MessageType(mesType=45) # 00101101
c = CauseHdr(ieiC=8, eightBitC=0)
d = CauseHdr(ieiC=8, eightBitC=0)
e = FacilityHdr(ieiF=28, eightBitF=0)
f = UserUserHdr(ieiUU=126, eightBitUU=0)
packet = a / b / c / d / e / f
return packet
|
def _correlate(self):
"""
Run correlation algorithm.
"""
a = self.algorithm(**self.algorithm_params)
self.correlation_result = a.run()
|
def function[_correlate, parameter[self]]:
constant[
Run correlation algorithm.
]
variable[a] assign[=] call[name[self].algorithm, parameter[]]
name[self].correlation_result assign[=] call[name[a].run, parameter[]]
|
keyword[def] identifier[_correlate] ( identifier[self] ):
literal[string]
identifier[a] = identifier[self] . identifier[algorithm] (** identifier[self] . identifier[algorithm_params] )
identifier[self] . identifier[correlation_result] = identifier[a] . identifier[run] ()
|
def _correlate(self):
"""
Run correlation algorithm.
"""
a = self.algorithm(**self.algorithm_params)
self.correlation_result = a.run()
|
def rotate_left(self):
"""
Rotate the node to the left.
"""
right = self.right
new = self._replace(right=self.right.left, red=True)
top = right._replace(left=new, red=self.red)
return top
|
def function[rotate_left, parameter[self]]:
constant[
Rotate the node to the left.
]
variable[right] assign[=] name[self].right
variable[new] assign[=] call[name[self]._replace, parameter[]]
variable[top] assign[=] call[name[right]._replace, parameter[]]
return[name[top]]
|
keyword[def] identifier[rotate_left] ( identifier[self] ):
literal[string]
identifier[right] = identifier[self] . identifier[right]
identifier[new] = identifier[self] . identifier[_replace] ( identifier[right] = identifier[self] . identifier[right] . identifier[left] , identifier[red] = keyword[True] )
identifier[top] = identifier[right] . identifier[_replace] ( identifier[left] = identifier[new] , identifier[red] = identifier[self] . identifier[red] )
keyword[return] identifier[top]
|
def rotate_left(self):
"""
Rotate the node to the left.
"""
right = self.right
new = self._replace(right=self.right.left, red=True)
top = right._replace(left=new, red=self.red)
return top
|
def _generate_async(self, generator):
"""
Return the previous generator object after having run the first element
evaluation as a background task.
Args:
generator (iterable): A generator function.
Returns:
iterable: The generator function with first element evaluated
in background.
"""
first_value_future = self._workers.submit(next, generator)
def get_first_element(future=first_value_future):
"""
Get first element value from future.
Args:
future (concurrent.futures._base.Future): First value future.
Returns:
Evaluated value
"""
try:
yield future.result()
except StopIteration:
return
return chain(get_first_element(), generator)
|
def function[_generate_async, parameter[self, generator]]:
constant[
Return the previous generator object after having run the first element
evaluation as a background task.
Args:
generator (iterable): A generator function.
Returns:
iterable: The generator function with first element evaluated
in background.
]
variable[first_value_future] assign[=] call[name[self]._workers.submit, parameter[name[next], name[generator]]]
def function[get_first_element, parameter[future]]:
constant[
Get first element value from future.
Args:
future (concurrent.futures._base.Future): First value future.
Returns:
Evaluated value
]
<ast.Try object at 0x7da1b1a769b0>
return[call[name[chain], parameter[call[name[get_first_element], parameter[]], name[generator]]]]
|
keyword[def] identifier[_generate_async] ( identifier[self] , identifier[generator] ):
literal[string]
identifier[first_value_future] = identifier[self] . identifier[_workers] . identifier[submit] ( identifier[next] , identifier[generator] )
keyword[def] identifier[get_first_element] ( identifier[future] = identifier[first_value_future] ):
literal[string]
keyword[try] :
keyword[yield] identifier[future] . identifier[result] ()
keyword[except] identifier[StopIteration] :
keyword[return]
keyword[return] identifier[chain] ( identifier[get_first_element] (), identifier[generator] )
|
def _generate_async(self, generator):
"""
Return the previous generator object after having run the first element
evaluation as a background task.
Args:
generator (iterable): A generator function.
Returns:
iterable: The generator function with first element evaluated
in background.
"""
first_value_future = self._workers.submit(next, generator)
def get_first_element(future=first_value_future):
"""
Get first element value from future.
Args:
future (concurrent.futures._base.Future): First value future.
Returns:
Evaluated value
"""
try:
yield future.result() # depends on [control=['try'], data=[]]
except StopIteration:
return # depends on [control=['except'], data=[]]
return chain(get_first_element(), generator)
|
def set_comparison(self):
"""Defines the comparison values for the ratio.
This function is added for easier modularity.
"""
self.comp1 = self.zvals[0]
self.comp2 = self.zvals[1]
return
|
def function[set_comparison, parameter[self]]:
constant[Defines the comparison values for the ratio.
This function is added for easier modularity.
]
name[self].comp1 assign[=] call[name[self].zvals][constant[0]]
name[self].comp2 assign[=] call[name[self].zvals][constant[1]]
return[None]
|
keyword[def] identifier[set_comparison] ( identifier[self] ):
literal[string]
identifier[self] . identifier[comp1] = identifier[self] . identifier[zvals] [ literal[int] ]
identifier[self] . identifier[comp2] = identifier[self] . identifier[zvals] [ literal[int] ]
keyword[return]
|
def set_comparison(self):
"""Defines the comparison values for the ratio.
This function is added for easier modularity.
"""
self.comp1 = self.zvals[0]
self.comp2 = self.zvals[1]
return
|
async def send_traceback(destination: discord.abc.Messageable, verbosity: int, *exc_info):
"""
Sends a traceback of an exception to a destination.
Used when REPL fails for any reason.
:param destination: Where to send this information to
:param verbosity: How far back this traceback should go. 0 shows just the last stack.
:param exc_info: Information about this exception, from sys.exc_info or similar.
:return: The last message sent
"""
# to make pylint stop moaning
etype, value, trace = exc_info
traceback_content = "".join(traceback.format_exception(etype, value, trace, verbosity)).replace("``", "`\u200b`")
paginator = commands.Paginator(prefix='```py')
for line in traceback_content.split('\n'):
paginator.add_line(line)
message = None
for page in paginator.pages:
message = await destination.send(page)
return message
|
<ast.AsyncFunctionDef object at 0x7da1b1e68310>
|
keyword[async] keyword[def] identifier[send_traceback] ( identifier[destination] : identifier[discord] . identifier[abc] . identifier[Messageable] , identifier[verbosity] : identifier[int] ,* identifier[exc_info] ):
literal[string]
identifier[etype] , identifier[value] , identifier[trace] = identifier[exc_info]
identifier[traceback_content] = literal[string] . identifier[join] ( identifier[traceback] . identifier[format_exception] ( identifier[etype] , identifier[value] , identifier[trace] , identifier[verbosity] )). identifier[replace] ( literal[string] , literal[string] )
identifier[paginator] = identifier[commands] . identifier[Paginator] ( identifier[prefix] = literal[string] )
keyword[for] identifier[line] keyword[in] identifier[traceback_content] . identifier[split] ( literal[string] ):
identifier[paginator] . identifier[add_line] ( identifier[line] )
identifier[message] = keyword[None]
keyword[for] identifier[page] keyword[in] identifier[paginator] . identifier[pages] :
identifier[message] = keyword[await] identifier[destination] . identifier[send] ( identifier[page] )
keyword[return] identifier[message]
|
async def send_traceback(destination: discord.abc.Messageable, verbosity: int, *exc_info):
"""
Sends a traceback of an exception to a destination.
Used when REPL fails for any reason.
:param destination: Where to send this information to
:param verbosity: How far back this traceback should go. 0 shows just the last stack.
:param exc_info: Information about this exception, from sys.exc_info or similar.
:return: The last message sent
"""
# to make pylint stop moaning
(etype, value, trace) = exc_info
traceback_content = ''.join(traceback.format_exception(etype, value, trace, verbosity)).replace('``', '`\u200b`')
paginator = commands.Paginator(prefix='```py')
for line in traceback_content.split('\n'):
paginator.add_line(line) # depends on [control=['for'], data=['line']]
message = None
for page in paginator.pages:
message = await destination.send(page) # depends on [control=['for'], data=['page']]
return message
|
def _verify_barycentric(lambda1, lambda2, lambda3):
"""Verifies that weights are barycentric and on the reference triangle.
I.e., checks that they sum to one and are all non-negative.
Args:
lambda1 (float): Parameter along the reference triangle.
lambda2 (float): Parameter along the reference triangle.
lambda3 (float): Parameter along the reference triangle.
Raises:
ValueError: If the weights are not valid barycentric
coordinates, i.e. they don't sum to ``1``.
ValueError: If some weights are negative.
"""
weights_total = lambda1 + lambda2 + lambda3
if not np.allclose(weights_total, 1.0, atol=0.0):
raise ValueError(
"Weights do not sum to 1", lambda1, lambda2, lambda3
)
if lambda1 < 0.0 or lambda2 < 0.0 or lambda3 < 0.0:
raise ValueError(
"Weights must be positive", lambda1, lambda2, lambda3
)
|
def function[_verify_barycentric, parameter[lambda1, lambda2, lambda3]]:
constant[Verifies that weights are barycentric and on the reference triangle.
I.e., checks that they sum to one and are all non-negative.
Args:
lambda1 (float): Parameter along the reference triangle.
lambda2 (float): Parameter along the reference triangle.
lambda3 (float): Parameter along the reference triangle.
Raises:
ValueError: If the weights are not valid barycentric
coordinates, i.e. they don't sum to ``1``.
ValueError: If some weights are negative.
]
variable[weights_total] assign[=] binary_operation[binary_operation[name[lambda1] + name[lambda2]] + name[lambda3]]
if <ast.UnaryOp object at 0x7da204623b50> begin[:]
<ast.Raise object at 0x7da204620280>
if <ast.BoolOp object at 0x7da204623eb0> begin[:]
<ast.Raise object at 0x7da204622140>
|
keyword[def] identifier[_verify_barycentric] ( identifier[lambda1] , identifier[lambda2] , identifier[lambda3] ):
literal[string]
identifier[weights_total] = identifier[lambda1] + identifier[lambda2] + identifier[lambda3]
keyword[if] keyword[not] identifier[np] . identifier[allclose] ( identifier[weights_total] , literal[int] , identifier[atol] = literal[int] ):
keyword[raise] identifier[ValueError] (
literal[string] , identifier[lambda1] , identifier[lambda2] , identifier[lambda3]
)
keyword[if] identifier[lambda1] < literal[int] keyword[or] identifier[lambda2] < literal[int] keyword[or] identifier[lambda3] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] , identifier[lambda1] , identifier[lambda2] , identifier[lambda3]
)
|
def _verify_barycentric(lambda1, lambda2, lambda3):
"""Verifies that weights are barycentric and on the reference triangle.
I.e., checks that they sum to one and are all non-negative.
Args:
lambda1 (float): Parameter along the reference triangle.
lambda2 (float): Parameter along the reference triangle.
lambda3 (float): Parameter along the reference triangle.
Raises:
ValueError: If the weights are not valid barycentric
coordinates, i.e. they don't sum to ``1``.
ValueError: If some weights are negative.
"""
weights_total = lambda1 + lambda2 + lambda3
if not np.allclose(weights_total, 1.0, atol=0.0):
raise ValueError('Weights do not sum to 1', lambda1, lambda2, lambda3) # depends on [control=['if'], data=[]]
if lambda1 < 0.0 or lambda2 < 0.0 or lambda3 < 0.0:
raise ValueError('Weights must be positive', lambda1, lambda2, lambda3) # depends on [control=['if'], data=[]]
|
def connection_lost(self, exc: Optional[Exception]) -> None:
"""
7.1.4. The WebSocket Connection is Closed.
"""
logger.debug("%s - event = connection_lost(%s)", self.side, exc)
self.state = State.CLOSED
logger.debug("%s - state = CLOSED", self.side)
if not hasattr(self, "close_code"):
self.close_code = 1006
if not hasattr(self, "close_reason"):
self.close_reason = ""
logger.debug(
"%s x code = %d, reason = %s",
self.side,
self.close_code,
self.close_reason or "[no reason]",
)
self.abort_keepalive_pings()
# If self.connection_lost_waiter isn't pending, that's a bug, because:
# - it's set only here in connection_lost() which is called only once;
# - it must never be canceled.
self.connection_lost_waiter.set_result(None)
super().connection_lost(exc)
|
def function[connection_lost, parameter[self, exc]]:
constant[
7.1.4. The WebSocket Connection is Closed.
]
call[name[logger].debug, parameter[constant[%s - event = connection_lost(%s)], name[self].side, name[exc]]]
name[self].state assign[=] name[State].CLOSED
call[name[logger].debug, parameter[constant[%s - state = CLOSED], name[self].side]]
if <ast.UnaryOp object at 0x7da207f02470> begin[:]
name[self].close_code assign[=] constant[1006]
if <ast.UnaryOp object at 0x7da207f032b0> begin[:]
name[self].close_reason assign[=] constant[]
call[name[logger].debug, parameter[constant[%s x code = %d, reason = %s], name[self].side, name[self].close_code, <ast.BoolOp object at 0x7da207f022f0>]]
call[name[self].abort_keepalive_pings, parameter[]]
call[name[self].connection_lost_waiter.set_result, parameter[constant[None]]]
call[call[name[super], parameter[]].connection_lost, parameter[name[exc]]]
|
keyword[def] identifier[connection_lost] ( identifier[self] , identifier[exc] : identifier[Optional] [ identifier[Exception] ])-> keyword[None] :
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[side] , identifier[exc] )
identifier[self] . identifier[state] = identifier[State] . identifier[CLOSED]
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[side] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[close_code] = literal[int]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[close_reason] = literal[string]
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[side] ,
identifier[self] . identifier[close_code] ,
identifier[self] . identifier[close_reason] keyword[or] literal[string] ,
)
identifier[self] . identifier[abort_keepalive_pings] ()
identifier[self] . identifier[connection_lost_waiter] . identifier[set_result] ( keyword[None] )
identifier[super] (). identifier[connection_lost] ( identifier[exc] )
|
def connection_lost(self, exc: Optional[Exception]) -> None:
"""
7.1.4. The WebSocket Connection is Closed.
"""
logger.debug('%s - event = connection_lost(%s)', self.side, exc)
self.state = State.CLOSED
logger.debug('%s - state = CLOSED', self.side)
if not hasattr(self, 'close_code'):
self.close_code = 1006 # depends on [control=['if'], data=[]]
if not hasattr(self, 'close_reason'):
self.close_reason = '' # depends on [control=['if'], data=[]]
logger.debug('%s x code = %d, reason = %s', self.side, self.close_code, self.close_reason or '[no reason]')
self.abort_keepalive_pings()
# If self.connection_lost_waiter isn't pending, that's a bug, because:
# - it's set only here in connection_lost() which is called only once;
# - it must never be canceled.
self.connection_lost_waiter.set_result(None)
super().connection_lost(exc)
|
def add_link(self, targets, weight):
"""
Add link(s) pointing to ``targets``.
If a link already exists pointing to a target, just add ``weight``
to that link's weight
Args:
targets (Node or list[Node]): node or nodes to link to
weight (int or float): weight for the new link(s)
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_link(node_2, 1)
>>> new_link = node_1.link_list[0]
>>> print(new_link)
node.Link instance pointing to node with value "Two" with weight 1
"""
# Generalize targets to a list to simplify code
if not isinstance(targets, list):
target_list = [targets]
else:
target_list = targets
for target in target_list:
# Check to see if self already has a link to target
for existing_link in self.link_list:
if existing_link.target == target:
existing_link.weight += weight
break
else:
self.link_list.append(Link(target, weight))
|
def function[add_link, parameter[self, targets, weight]]:
constant[
Add link(s) pointing to ``targets``.
If a link already exists pointing to a target, just add ``weight``
to that link's weight
Args:
targets (Node or list[Node]): node or nodes to link to
weight (int or float): weight for the new link(s)
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_link(node_2, 1)
>>> new_link = node_1.link_list[0]
>>> print(new_link)
node.Link instance pointing to node with value "Two" with weight 1
]
if <ast.UnaryOp object at 0x7da207f011e0> begin[:]
variable[target_list] assign[=] list[[<ast.Name object at 0x7da207f02740>]]
for taget[name[target]] in starred[name[target_list]] begin[:]
for taget[name[existing_link]] in starred[name[self].link_list] begin[:]
if compare[name[existing_link].target equal[==] name[target]] begin[:]
<ast.AugAssign object at 0x7da207f03190>
break
|
keyword[def] identifier[add_link] ( identifier[self] , identifier[targets] , identifier[weight] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[targets] , identifier[list] ):
identifier[target_list] =[ identifier[targets] ]
keyword[else] :
identifier[target_list] = identifier[targets]
keyword[for] identifier[target] keyword[in] identifier[target_list] :
keyword[for] identifier[existing_link] keyword[in] identifier[self] . identifier[link_list] :
keyword[if] identifier[existing_link] . identifier[target] == identifier[target] :
identifier[existing_link] . identifier[weight] += identifier[weight]
keyword[break]
keyword[else] :
identifier[self] . identifier[link_list] . identifier[append] ( identifier[Link] ( identifier[target] , identifier[weight] ))
|
def add_link(self, targets, weight):
"""
Add link(s) pointing to ``targets``.
If a link already exists pointing to a target, just add ``weight``
to that link's weight
Args:
targets (Node or list[Node]): node or nodes to link to
weight (int or float): weight for the new link(s)
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_link(node_2, 1)
>>> new_link = node_1.link_list[0]
>>> print(new_link)
node.Link instance pointing to node with value "Two" with weight 1
"""
# Generalize targets to a list to simplify code
if not isinstance(targets, list):
target_list = [targets] # depends on [control=['if'], data=[]]
else:
target_list = targets
for target in target_list:
# Check to see if self already has a link to target
for existing_link in self.link_list:
if existing_link.target == target:
existing_link.weight += weight
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['existing_link']]
else:
self.link_list.append(Link(target, weight)) # depends on [control=['for'], data=['target']]
|
def import_wd_atmcof(self, plfile, atmfile, wdidx, Nabun=19, Nlogg=11, Npb=25, Nints=4):
"""
Parses WD's atmcof and reads in all Legendre polynomials for the
given passband.
@plfile: path and filename of atmcofplanck.dat
@atmfile: path and filename of atmcof.dat
@wdidx: WD index of the passed passband. This can be automated
but it's not a high priority.
@Nabun: number of metallicity nodes in atmcof.dat. For the 2003 version
the number of nodes is 19.
@Nlogg: number of logg nodes in atmcof.dat. For the 2003 version
the number of nodes is 11.
@Npb: number of passbands in atmcof.dat. For the 2003 version
the number of passbands is 25.
@Nints: number of temperature intervals (input lines) per entry.
For the 2003 version the number of lines is 4.
"""
# Initialize the external atmcof module if necessary:
# PERHAPS WD_DATA SHOULD BE GLOBAL??
self.wd_data = libphoebe.wd_readdata(plfile, atmfile)
# That is all that was necessary for *_extern_planckint() and
# *_extern_atmx() functions. However, we also want to support
# circumventing WD subroutines and use WD tables directly. For
# that, we need to do a bit more work.
# Store the passband index for use in planckint() and atmx():
self.extern_wd_idx = wdidx
# Break up the table along axes and extract a single passband data:
atmtab = np.reshape(self.wd_data['atm_table'], (Nabun, Npb, Nlogg, Nints, -1))
atmtab = atmtab[:, wdidx, :, :, :]
# Finally, reverse the metallicity axis because it is sorted in
# reverse order in atmcof:
self.extern_wd_atmx = atmtab[::-1, :, :, :]
self.content += ['extern_planckint', 'extern_atmx']
self.atmlist += ['extern_planckint', 'extern_atmx']
|
def function[import_wd_atmcof, parameter[self, plfile, atmfile, wdidx, Nabun, Nlogg, Npb, Nints]]:
constant[
Parses WD's atmcof and reads in all Legendre polynomials for the
given passband.
@plfile: path and filename of atmcofplanck.dat
@atmfile: path and filename of atmcof.dat
@wdidx: WD index of the passed passband. This can be automated
but it's not a high priority.
@Nabun: number of metallicity nodes in atmcof.dat. For the 2003 version
the number of nodes is 19.
@Nlogg: number of logg nodes in atmcof.dat. For the 2003 version
the number of nodes is 11.
@Npb: number of passbands in atmcof.dat. For the 2003 version
the number of passbands is 25.
@Nints: number of temperature intervals (input lines) per entry.
For the 2003 version the number of lines is 4.
]
name[self].wd_data assign[=] call[name[libphoebe].wd_readdata, parameter[name[plfile], name[atmfile]]]
name[self].extern_wd_idx assign[=] name[wdidx]
variable[atmtab] assign[=] call[name[np].reshape, parameter[call[name[self].wd_data][constant[atm_table]], tuple[[<ast.Name object at 0x7da18bcc9150>, <ast.Name object at 0x7da18bcc9960>, <ast.Name object at 0x7da18bccbeb0>, <ast.Name object at 0x7da18bcc8490>, <ast.UnaryOp object at 0x7da18bcc9840>]]]]
variable[atmtab] assign[=] call[name[atmtab]][tuple[[<ast.Slice object at 0x7da18bcc82b0>, <ast.Name object at 0x7da18bcc8820>, <ast.Slice object at 0x7da18bcc95d0>, <ast.Slice object at 0x7da18bccb7c0>, <ast.Slice object at 0x7da18bcc8940>]]]
name[self].extern_wd_atmx assign[=] call[name[atmtab]][tuple[[<ast.Slice object at 0x7da18bcc9f30>, <ast.Slice object at 0x7da18bccb400>, <ast.Slice object at 0x7da18bcc9db0>, <ast.Slice object at 0x7da18bccb4c0>]]]
<ast.AugAssign object at 0x7da18bccbee0>
<ast.AugAssign object at 0x7da18bcca980>
|
keyword[def] identifier[import_wd_atmcof] ( identifier[self] , identifier[plfile] , identifier[atmfile] , identifier[wdidx] , identifier[Nabun] = literal[int] , identifier[Nlogg] = literal[int] , identifier[Npb] = literal[int] , identifier[Nints] = literal[int] ):
literal[string]
identifier[self] . identifier[wd_data] = identifier[libphoebe] . identifier[wd_readdata] ( identifier[plfile] , identifier[atmfile] )
identifier[self] . identifier[extern_wd_idx] = identifier[wdidx]
identifier[atmtab] = identifier[np] . identifier[reshape] ( identifier[self] . identifier[wd_data] [ literal[string] ],( identifier[Nabun] , identifier[Npb] , identifier[Nlogg] , identifier[Nints] ,- literal[int] ))
identifier[atmtab] = identifier[atmtab] [:, identifier[wdidx] ,:,:,:]
identifier[self] . identifier[extern_wd_atmx] = identifier[atmtab] [::- literal[int] ,:,:,:]
identifier[self] . identifier[content] +=[ literal[string] , literal[string] ]
identifier[self] . identifier[atmlist] +=[ literal[string] , literal[string] ]
|
def import_wd_atmcof(self, plfile, atmfile, wdidx, Nabun=19, Nlogg=11, Npb=25, Nints=4):
"""
Parses WD's atmcof and reads in all Legendre polynomials for the
given passband.
@plfile: path and filename of atmcofplanck.dat
@atmfile: path and filename of atmcof.dat
@wdidx: WD index of the passed passband. This can be automated
but it's not a high priority.
@Nabun: number of metallicity nodes in atmcof.dat. For the 2003 version
the number of nodes is 19.
@Nlogg: number of logg nodes in atmcof.dat. For the 2003 version
the number of nodes is 11.
@Npb: number of passbands in atmcof.dat. For the 2003 version
the number of passbands is 25.
@Nints: number of temperature intervals (input lines) per entry.
For the 2003 version the number of lines is 4.
"""
# Initialize the external atmcof module if necessary:
# PERHAPS WD_DATA SHOULD BE GLOBAL??
self.wd_data = libphoebe.wd_readdata(plfile, atmfile)
# That is all that was necessary for *_extern_planckint() and
# *_extern_atmx() functions. However, we also want to support
# circumventing WD subroutines and use WD tables directly. For
# that, we need to do a bit more work.
# Store the passband index for use in planckint() and atmx():
self.extern_wd_idx = wdidx
# Break up the table along axes and extract a single passband data:
atmtab = np.reshape(self.wd_data['atm_table'], (Nabun, Npb, Nlogg, Nints, -1))
atmtab = atmtab[:, wdidx, :, :, :]
# Finally, reverse the metallicity axis because it is sorted in
# reverse order in atmcof:
self.extern_wd_atmx = atmtab[::-1, :, :, :]
self.content += ['extern_planckint', 'extern_atmx']
self.atmlist += ['extern_planckint', 'extern_atmx']
|
def refactor_module_to_package(self):
"""Convert the current module into a package."""
refactor = ModuleToPackage(self.project, self.resource)
return self._get_changes(refactor)
|
def function[refactor_module_to_package, parameter[self]]:
constant[Convert the current module into a package.]
variable[refactor] assign[=] call[name[ModuleToPackage], parameter[name[self].project, name[self].resource]]
return[call[name[self]._get_changes, parameter[name[refactor]]]]
|
keyword[def] identifier[refactor_module_to_package] ( identifier[self] ):
literal[string]
identifier[refactor] = identifier[ModuleToPackage] ( identifier[self] . identifier[project] , identifier[self] . identifier[resource] )
keyword[return] identifier[self] . identifier[_get_changes] ( identifier[refactor] )
|
def refactor_module_to_package(self):
"""Convert the current module into a package."""
refactor = ModuleToPackage(self.project, self.resource)
return self._get_changes(refactor)
|
def get_original(document, xslt):
"""Get the original chain given document path and xslt local path
:param str document: local absolute path to document
:param str xslt: local absolute path to xst file
:return: new chain generated.
:rtype: str
"""
dom = etree.parse(document) # TODO: cuando este probando -
# fuente:
# http://stackoverflow.com/questions/16698935/how-to-transform-an-xml-file-using-xslt-in-python
xslt = etree.parse(xslt)
transform = etree.XSLT(xslt)
newdom = transform(dom)
return newdom
|
def function[get_original, parameter[document, xslt]]:
constant[Get the original chain given document path and xslt local path
:param str document: local absolute path to document
:param str xslt: local absolute path to xst file
:return: new chain generated.
:rtype: str
]
variable[dom] assign[=] call[name[etree].parse, parameter[name[document]]]
variable[xslt] assign[=] call[name[etree].parse, parameter[name[xslt]]]
variable[transform] assign[=] call[name[etree].XSLT, parameter[name[xslt]]]
variable[newdom] assign[=] call[name[transform], parameter[name[dom]]]
return[name[newdom]]
|
keyword[def] identifier[get_original] ( identifier[document] , identifier[xslt] ):
literal[string]
identifier[dom] = identifier[etree] . identifier[parse] ( identifier[document] )
identifier[xslt] = identifier[etree] . identifier[parse] ( identifier[xslt] )
identifier[transform] = identifier[etree] . identifier[XSLT] ( identifier[xslt] )
identifier[newdom] = identifier[transform] ( identifier[dom] )
keyword[return] identifier[newdom]
|
def get_original(document, xslt):
"""Get the original chain given document path and xslt local path
:param str document: local absolute path to document
:param str xslt: local absolute path to xst file
:return: new chain generated.
:rtype: str
"""
dom = etree.parse(document) # TODO: cuando este probando -
# fuente:
# http://stackoverflow.com/questions/16698935/how-to-transform-an-xml-file-using-xslt-in-python
xslt = etree.parse(xslt)
transform = etree.XSLT(xslt)
newdom = transform(dom)
return newdom
|
def cleaned_up_slab(self):
"""
Returns a slab with the adsorbates removed
"""
ads_strs = list(self.ads_entries_dict.keys())
cleaned = self.structure.copy()
cleaned.remove_species(ads_strs)
return cleaned
|
def function[cleaned_up_slab, parameter[self]]:
constant[
Returns a slab with the adsorbates removed
]
variable[ads_strs] assign[=] call[name[list], parameter[call[name[self].ads_entries_dict.keys, parameter[]]]]
variable[cleaned] assign[=] call[name[self].structure.copy, parameter[]]
call[name[cleaned].remove_species, parameter[name[ads_strs]]]
return[name[cleaned]]
|
keyword[def] identifier[cleaned_up_slab] ( identifier[self] ):
literal[string]
identifier[ads_strs] = identifier[list] ( identifier[self] . identifier[ads_entries_dict] . identifier[keys] ())
identifier[cleaned] = identifier[self] . identifier[structure] . identifier[copy] ()
identifier[cleaned] . identifier[remove_species] ( identifier[ads_strs] )
keyword[return] identifier[cleaned]
|
def cleaned_up_slab(self):
"""
Returns a slab with the adsorbates removed
"""
ads_strs = list(self.ads_entries_dict.keys())
cleaned = self.structure.copy()
cleaned.remove_species(ads_strs)
return cleaned
|
def from_ranges(ranges, name, data_key, start_key='offset', length_key='length'):
"""
Creates a list of commands from a list of ranges. Each range
is converted to two commands: a start_* and a stop_*.
"""
commands = []
for r in ranges:
data = r[data_key]
start = r[start_key]
stop = start + r[length_key]
commands.extend(Command.start_stop(name, start, stop, data))
return commands
|
def function[from_ranges, parameter[ranges, name, data_key, start_key, length_key]]:
constant[
Creates a list of commands from a list of ranges. Each range
is converted to two commands: a start_* and a stop_*.
]
variable[commands] assign[=] list[[]]
for taget[name[r]] in starred[name[ranges]] begin[:]
variable[data] assign[=] call[name[r]][name[data_key]]
variable[start] assign[=] call[name[r]][name[start_key]]
variable[stop] assign[=] binary_operation[name[start] + call[name[r]][name[length_key]]]
call[name[commands].extend, parameter[call[name[Command].start_stop, parameter[name[name], name[start], name[stop], name[data]]]]]
return[name[commands]]
|
keyword[def] identifier[from_ranges] ( identifier[ranges] , identifier[name] , identifier[data_key] , identifier[start_key] = literal[string] , identifier[length_key] = literal[string] ):
literal[string]
identifier[commands] =[]
keyword[for] identifier[r] keyword[in] identifier[ranges] :
identifier[data] = identifier[r] [ identifier[data_key] ]
identifier[start] = identifier[r] [ identifier[start_key] ]
identifier[stop] = identifier[start] + identifier[r] [ identifier[length_key] ]
identifier[commands] . identifier[extend] ( identifier[Command] . identifier[start_stop] ( identifier[name] , identifier[start] , identifier[stop] , identifier[data] ))
keyword[return] identifier[commands]
|
def from_ranges(ranges, name, data_key, start_key='offset', length_key='length'):
"""
Creates a list of commands from a list of ranges. Each range
is converted to two commands: a start_* and a stop_*.
"""
commands = []
for r in ranges:
data = r[data_key]
start = r[start_key]
stop = start + r[length_key]
commands.extend(Command.start_stop(name, start, stop, data)) # depends on [control=['for'], data=['r']]
return commands
|
def destroyCommit(self, varBind, **context):
"""Destroy Managed Object Instance.
Implements the second of the multi-step workflow similar to the SNMP SET
command processing (:RFC:`1905#section-4.2.5`).
The goal of the second phase is to actually remove requested Managed
Object Instance from the MIB tree. When multiple Managed Objects Instances
are destroyed/modified at once (likely coming all in one SNMP PDU), each
of them has to run through the second (*commit*) phase successfully for
the system to transition to the third (*cleanup*) phase. If any single
*commit* step fails, the system transitions into the *undo* state for
each of Managed Objects Instances being processed at once.
The role of this object in the MIB tree is non-terminal. It does not
access the actual Managed Object Instance, but just traverses one level
down the MIB tree and hands off the query to the underlying objects.
Parameters
----------
varBind: :py:class:`~pysnmp.smi.rfc1902.ObjectType` object representing
new Managed Object Instance value to destroy
Other Parameters
----------------
\*\*context:
Query parameters:
* `cbFun` (callable) - user-supplied callable that is invoked to
pass the new value of the Managed Object Instance or an error.
* `instances` (dict): user-supplied dict for temporarily holding
Managed Objects Instances being destroyed.
Notes
-----
The callback functions (e.g. `cbFun`) have the same signature as this
method where `varBind` contains the new Managed Object Instance value.
In case of an error, the `error` key in the `context` dict will contain
an exception object.
"""
name, val = varBind
(debug.logger & debug.FLAG_INS and
debug.logger('%s: destroyCommit(%s, %r)' % (self, name, val)))
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
# NOTE: multiple names are possible in a single PDU, that could collide
# Therefore let's keep old object indexed by (negative) var-bind index
try:
instances[self.ST_DESTROY][-idx - 1] = self._vars.pop(name)
except KeyError:
pass
cbFun = context['cbFun']
cbFun(varBind, **context)
|
def function[destroyCommit, parameter[self, varBind]]:
constant[Destroy Managed Object Instance.
Implements the second of the multi-step workflow similar to the SNMP SET
command processing (:RFC:`1905#section-4.2.5`).
The goal of the second phase is to actually remove requested Managed
Object Instance from the MIB tree. When multiple Managed Objects Instances
are destroyed/modified at once (likely coming all in one SNMP PDU), each
of them has to run through the second (*commit*) phase successfully for
the system to transition to the third (*cleanup*) phase. If any single
*commit* step fails, the system transitions into the *undo* state for
each of Managed Objects Instances being processed at once.
The role of this object in the MIB tree is non-terminal. It does not
access the actual Managed Object Instance, but just traverses one level
down the MIB tree and hands off the query to the underlying objects.
Parameters
----------
varBind: :py:class:`~pysnmp.smi.rfc1902.ObjectType` object representing
new Managed Object Instance value to destroy
Other Parameters
----------------
\*\*context:
Query parameters:
* `cbFun` (callable) - user-supplied callable that is invoked to
pass the new value of the Managed Object Instance or an error.
* `instances` (dict): user-supplied dict for temporarily holding
Managed Objects Instances being destroyed.
Notes
-----
The callback functions (e.g. `cbFun`) have the same signature as this
method where `varBind` contains the new Managed Object Instance value.
In case of an error, the `error` key in the `context` dict will contain
an exception object.
]
<ast.Tuple object at 0x7da1b155f640> assign[=] name[varBind]
<ast.BoolOp object at 0x7da20c76ff40>
variable[instances] assign[=] call[call[name[context]][constant[instances]].setdefault, parameter[name[self].name, dictionary[[<ast.Attribute object at 0x7da20c76c640>, <ast.Attribute object at 0x7da20c76cee0>], [<ast.Dict object at 0x7da20c76d3f0>, <ast.Dict object at 0x7da20c76c880>]]]]
variable[idx] assign[=] call[name[context]][constant[idx]]
<ast.Try object at 0x7da20c76dba0>
variable[cbFun] assign[=] call[name[context]][constant[cbFun]]
call[name[cbFun], parameter[name[varBind]]]
|
keyword[def] identifier[destroyCommit] ( identifier[self] , identifier[varBind] ,** identifier[context] ):
literal[string]
identifier[name] , identifier[val] = identifier[varBind]
( identifier[debug] . identifier[logger] & identifier[debug] . identifier[FLAG_INS] keyword[and]
identifier[debug] . identifier[logger] ( literal[string] %( identifier[self] , identifier[name] , identifier[val] )))
identifier[instances] = identifier[context] [ literal[string] ]. identifier[setdefault] ( identifier[self] . identifier[name] ,{ identifier[self] . identifier[ST_CREATE] :{}, identifier[self] . identifier[ST_DESTROY] :{}})
identifier[idx] = identifier[context] [ literal[string] ]
keyword[try] :
identifier[instances] [ identifier[self] . identifier[ST_DESTROY] ][- identifier[idx] - literal[int] ]= identifier[self] . identifier[_vars] . identifier[pop] ( identifier[name] )
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[cbFun] = identifier[context] [ literal[string] ]
identifier[cbFun] ( identifier[varBind] ,** identifier[context] )
|
def destroyCommit(self, varBind, **context):
"""Destroy Managed Object Instance.
Implements the second of the multi-step workflow similar to the SNMP SET
command processing (:RFC:`1905#section-4.2.5`).
The goal of the second phase is to actually remove requested Managed
Object Instance from the MIB tree. When multiple Managed Objects Instances
are destroyed/modified at once (likely coming all in one SNMP PDU), each
of them has to run through the second (*commit*) phase successfully for
the system to transition to the third (*cleanup*) phase. If any single
*commit* step fails, the system transitions into the *undo* state for
each of Managed Objects Instances being processed at once.
The role of this object in the MIB tree is non-terminal. It does not
access the actual Managed Object Instance, but just traverses one level
down the MIB tree and hands off the query to the underlying objects.
Parameters
----------
varBind: :py:class:`~pysnmp.smi.rfc1902.ObjectType` object representing
new Managed Object Instance value to destroy
Other Parameters
----------------
\\*\\*context:
Query parameters:
* `cbFun` (callable) - user-supplied callable that is invoked to
pass the new value of the Managed Object Instance or an error.
* `instances` (dict): user-supplied dict for temporarily holding
Managed Objects Instances being destroyed.
Notes
-----
The callback functions (e.g. `cbFun`) have the same signature as this
method where `varBind` contains the new Managed Object Instance value.
In case of an error, the `error` key in the `context` dict will contain
an exception object.
"""
(name, val) = varBind
debug.logger & debug.FLAG_INS and debug.logger('%s: destroyCommit(%s, %r)' % (self, name, val))
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
# NOTE: multiple names are possible in a single PDU, that could collide
# Therefore let's keep old object indexed by (negative) var-bind index
try:
instances[self.ST_DESTROY][-idx - 1] = self._vars.pop(name) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
cbFun = context['cbFun']
cbFun(varBind, **context)
|
def update_model_dict(self):
"""Updates the model dictionary"""
dct = {}
models = self.chimera.openModels
for md in models.list():
dct[md.name] = md.id
self.model_dict = dct
|
def function[update_model_dict, parameter[self]]:
constant[Updates the model dictionary]
variable[dct] assign[=] dictionary[[], []]
variable[models] assign[=] name[self].chimera.openModels
for taget[name[md]] in starred[call[name[models].list, parameter[]]] begin[:]
call[name[dct]][name[md].name] assign[=] name[md].id
name[self].model_dict assign[=] name[dct]
|
keyword[def] identifier[update_model_dict] ( identifier[self] ):
literal[string]
identifier[dct] ={}
identifier[models] = identifier[self] . identifier[chimera] . identifier[openModels]
keyword[for] identifier[md] keyword[in] identifier[models] . identifier[list] ():
identifier[dct] [ identifier[md] . identifier[name] ]= identifier[md] . identifier[id]
identifier[self] . identifier[model_dict] = identifier[dct]
|
def update_model_dict(self):
"""Updates the model dictionary"""
dct = {}
models = self.chimera.openModels
for md in models.list():
dct[md.name] = md.id # depends on [control=['for'], data=['md']]
self.model_dict = dct
|
def _random_ipv4_address_from_subnet(self, subnet, network=False):
"""
Produces a random IPv4 address or network with a valid CIDR
from within a given subnet.
:param subnet: IPv4Network to choose from within
:param network: Return a network address, and not an IP address
"""
address = str(
subnet[self.generator.random.randint(
0, subnet.num_addresses - 1,
)],
)
if network:
address += '/' + str(self.generator.random.randint(
subnet.prefixlen,
subnet.max_prefixlen,
))
address = str(ip_network(address, strict=False))
return address
|
def function[_random_ipv4_address_from_subnet, parameter[self, subnet, network]]:
constant[
Produces a random IPv4 address or network with a valid CIDR
from within a given subnet.
:param subnet: IPv4Network to choose from within
:param network: Return a network address, and not an IP address
]
variable[address] assign[=] call[name[str], parameter[call[name[subnet]][call[name[self].generator.random.randint, parameter[constant[0], binary_operation[name[subnet].num_addresses - constant[1]]]]]]]
if name[network] begin[:]
<ast.AugAssign object at 0x7da18dc9bf10>
variable[address] assign[=] call[name[str], parameter[call[name[ip_network], parameter[name[address]]]]]
return[name[address]]
|
keyword[def] identifier[_random_ipv4_address_from_subnet] ( identifier[self] , identifier[subnet] , identifier[network] = keyword[False] ):
literal[string]
identifier[address] = identifier[str] (
identifier[subnet] [ identifier[self] . identifier[generator] . identifier[random] . identifier[randint] (
literal[int] , identifier[subnet] . identifier[num_addresses] - literal[int] ,
)],
)
keyword[if] identifier[network] :
identifier[address] += literal[string] + identifier[str] ( identifier[self] . identifier[generator] . identifier[random] . identifier[randint] (
identifier[subnet] . identifier[prefixlen] ,
identifier[subnet] . identifier[max_prefixlen] ,
))
identifier[address] = identifier[str] ( identifier[ip_network] ( identifier[address] , identifier[strict] = keyword[False] ))
keyword[return] identifier[address]
|
def _random_ipv4_address_from_subnet(self, subnet, network=False):
"""
Produces a random IPv4 address or network with a valid CIDR
from within a given subnet.
:param subnet: IPv4Network to choose from within
:param network: Return a network address, and not an IP address
"""
address = str(subnet[self.generator.random.randint(0, subnet.num_addresses - 1)])
if network:
address += '/' + str(self.generator.random.randint(subnet.prefixlen, subnet.max_prefixlen))
address = str(ip_network(address, strict=False)) # depends on [control=['if'], data=[]]
return address
|
def load_features(self):
"""
Loads all the known features from the feature service
"""
# Loading all loci that
# are in self.loci variable defined
# when the pyGFE object is created
for loc in self.loci:
if self.verbose:
self.logger.info(self.logname + "Loading features for " + loc)
# Loading all features for loc from feature service
self.all_feats.update({loc: self.locus_features(loc)})
if self.verbose:
self.logger.info(self.logname + "Finished loading features for " + loc)
if self.verbose:
mem = "{:4.4f}".format(sys.getsizeof(self.all_feats) / 1000000)
self.logger.info(self.logname + "Finished loading all features * all_feats = " + mem + " MB *")
|
def function[load_features, parameter[self]]:
constant[
Loads all the known features from the feature service
]
for taget[name[loc]] in starred[name[self].loci] begin[:]
if name[self].verbose begin[:]
call[name[self].logger.info, parameter[binary_operation[binary_operation[name[self].logname + constant[Loading features for ]] + name[loc]]]]
call[name[self].all_feats.update, parameter[dictionary[[<ast.Name object at 0x7da1b25057e0>], [<ast.Call object at 0x7da1b25057b0>]]]]
if name[self].verbose begin[:]
call[name[self].logger.info, parameter[binary_operation[binary_operation[name[self].logname + constant[Finished loading features for ]] + name[loc]]]]
if name[self].verbose begin[:]
variable[mem] assign[=] call[constant[{:4.4f}].format, parameter[binary_operation[call[name[sys].getsizeof, parameter[name[self].all_feats]] / constant[1000000]]]]
call[name[self].logger.info, parameter[binary_operation[binary_operation[binary_operation[name[self].logname + constant[Finished loading all features * all_feats = ]] + name[mem]] + constant[ MB *]]]]
|
keyword[def] identifier[load_features] ( identifier[self] ):
literal[string]
keyword[for] identifier[loc] keyword[in] identifier[self] . identifier[loci] :
keyword[if] identifier[self] . identifier[verbose] :
identifier[self] . identifier[logger] . identifier[info] ( identifier[self] . identifier[logname] + literal[string] + identifier[loc] )
identifier[self] . identifier[all_feats] . identifier[update] ({ identifier[loc] : identifier[self] . identifier[locus_features] ( identifier[loc] )})
keyword[if] identifier[self] . identifier[verbose] :
identifier[self] . identifier[logger] . identifier[info] ( identifier[self] . identifier[logname] + literal[string] + identifier[loc] )
keyword[if] identifier[self] . identifier[verbose] :
identifier[mem] = literal[string] . identifier[format] ( identifier[sys] . identifier[getsizeof] ( identifier[self] . identifier[all_feats] )/ literal[int] )
identifier[self] . identifier[logger] . identifier[info] ( identifier[self] . identifier[logname] + literal[string] + identifier[mem] + literal[string] )
|
def load_features(self):
"""
Loads all the known features from the feature service
"""
# Loading all loci that
# are in self.loci variable defined
# when the pyGFE object is created
for loc in self.loci:
if self.verbose:
self.logger.info(self.logname + 'Loading features for ' + loc) # depends on [control=['if'], data=[]]
# Loading all features for loc from feature service
self.all_feats.update({loc: self.locus_features(loc)})
if self.verbose:
self.logger.info(self.logname + 'Finished loading features for ' + loc) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['loc']]
if self.verbose:
mem = '{:4.4f}'.format(sys.getsizeof(self.all_feats) / 1000000)
self.logger.info(self.logname + 'Finished loading all features * all_feats = ' + mem + ' MB *') # depends on [control=['if'], data=[]]
|
def _db_upgrade(self, db_name):
""" Upgrade nipap database schema
"""
current_db_version = self._get_db_version()
self._execute(db_schema.functions)
for i in range(current_db_version, nipap.__db_version__):
self._logger.info("Upgrading DB schema:", i, "to", i+1)
upgrade_sql = db_schema.upgrade[i-1] # 0 count on array
self._execute(upgrade_sql % (db_name))
self._execute(db_schema.triggers)
|
def function[_db_upgrade, parameter[self, db_name]]:
constant[ Upgrade nipap database schema
]
variable[current_db_version] assign[=] call[name[self]._get_db_version, parameter[]]
call[name[self]._execute, parameter[name[db_schema].functions]]
for taget[name[i]] in starred[call[name[range], parameter[name[current_db_version], name[nipap].__db_version__]]] begin[:]
call[name[self]._logger.info, parameter[constant[Upgrading DB schema:], name[i], constant[to], binary_operation[name[i] + constant[1]]]]
variable[upgrade_sql] assign[=] call[name[db_schema].upgrade][binary_operation[name[i] - constant[1]]]
call[name[self]._execute, parameter[binary_operation[name[upgrade_sql] <ast.Mod object at 0x7da2590d6920> name[db_name]]]]
call[name[self]._execute, parameter[name[db_schema].triggers]]
|
keyword[def] identifier[_db_upgrade] ( identifier[self] , identifier[db_name] ):
literal[string]
identifier[current_db_version] = identifier[self] . identifier[_get_db_version] ()
identifier[self] . identifier[_execute] ( identifier[db_schema] . identifier[functions] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[current_db_version] , identifier[nipap] . identifier[__db_version__] ):
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[i] , literal[string] , identifier[i] + literal[int] )
identifier[upgrade_sql] = identifier[db_schema] . identifier[upgrade] [ identifier[i] - literal[int] ]
identifier[self] . identifier[_execute] ( identifier[upgrade_sql] %( identifier[db_name] ))
identifier[self] . identifier[_execute] ( identifier[db_schema] . identifier[triggers] )
|
def _db_upgrade(self, db_name):
""" Upgrade nipap database schema
"""
current_db_version = self._get_db_version()
self._execute(db_schema.functions)
for i in range(current_db_version, nipap.__db_version__):
self._logger.info('Upgrading DB schema:', i, 'to', i + 1)
upgrade_sql = db_schema.upgrade[i - 1] # 0 count on array
self._execute(upgrade_sql % db_name) # depends on [control=['for'], data=['i']]
self._execute(db_schema.triggers)
|
def SG(self):
r'''Specific gravity of the mixture, [dimensionless].
For gas-phase conditions, this is calculated at 15.6 °C (60 °F) and 1
atm for the mixture and the reference fluid, air.
For liquid and solid phase conditions, this is calculated based on a
reference fluid of water at 4°C at 1 atm, but the with the liquid or
solid mixture's density at the currently specified conditions.
Examples
--------
>>> Mixture('MTBE').SG
0.7428160596603596
'''
return phase_select_property(phase=self.phase, s=self.SGs, l=self.SGl, g=self.SGg)
|
def function[SG, parameter[self]]:
constant[Specific gravity of the mixture, [dimensionless].
For gas-phase conditions, this is calculated at 15.6 °C (60 °F) and 1
atm for the mixture and the reference fluid, air.
For liquid and solid phase conditions, this is calculated based on a
reference fluid of water at 4°C at 1 atm, but the with the liquid or
solid mixture's density at the currently specified conditions.
Examples
--------
>>> Mixture('MTBE').SG
0.7428160596603596
]
return[call[name[phase_select_property], parameter[]]]
|
keyword[def] identifier[SG] ( identifier[self] ):
literal[string]
keyword[return] identifier[phase_select_property] ( identifier[phase] = identifier[self] . identifier[phase] , identifier[s] = identifier[self] . identifier[SGs] , identifier[l] = identifier[self] . identifier[SGl] , identifier[g] = identifier[self] . identifier[SGg] )
|
def SG(self):
"""Specific gravity of the mixture, [dimensionless].
For gas-phase conditions, this is calculated at 15.6 °C (60 °F) and 1
atm for the mixture and the reference fluid, air.
For liquid and solid phase conditions, this is calculated based on a
reference fluid of water at 4°C at 1 atm, but the with the liquid or
solid mixture's density at the currently specified conditions.
Examples
--------
>>> Mixture('MTBE').SG
0.7428160596603596
"""
return phase_select_property(phase=self.phase, s=self.SGs, l=self.SGl, g=self.SGg)
|
def handle_extracted_license(self, extr_lic):
"""
Build and return an ExtractedLicense or None.
Note that this function adds the license to the document.
"""
lic = self.parse_only_extr_license(extr_lic)
if lic is not None:
self.doc.add_extr_lic(lic)
return lic
|
def function[handle_extracted_license, parameter[self, extr_lic]]:
constant[
Build and return an ExtractedLicense or None.
Note that this function adds the license to the document.
]
variable[lic] assign[=] call[name[self].parse_only_extr_license, parameter[name[extr_lic]]]
if compare[name[lic] is_not constant[None]] begin[:]
call[name[self].doc.add_extr_lic, parameter[name[lic]]]
return[name[lic]]
|
keyword[def] identifier[handle_extracted_license] ( identifier[self] , identifier[extr_lic] ):
literal[string]
identifier[lic] = identifier[self] . identifier[parse_only_extr_license] ( identifier[extr_lic] )
keyword[if] identifier[lic] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[doc] . identifier[add_extr_lic] ( identifier[lic] )
keyword[return] identifier[lic]
|
def handle_extracted_license(self, extr_lic):
"""
Build and return an ExtractedLicense or None.
Note that this function adds the license to the document.
"""
lic = self.parse_only_extr_license(extr_lic)
if lic is not None:
self.doc.add_extr_lic(lic) # depends on [control=['if'], data=['lic']]
return lic
|
def service_name(self):
"""
Service name inside the Docker Swarm
service_suffix should be a numerical value unique for user
{service_prefix}-{service_owner}-{service_suffix}
"""
if hasattr(self, "server_name") and self.server_name:
server_name = self.server_name
else:
server_name = 1
return "{}-{}-{}".format(self.service_prefix,
self.service_owner,
server_name
)
|
def function[service_name, parameter[self]]:
constant[
Service name inside the Docker Swarm
service_suffix should be a numerical value unique for user
{service_prefix}-{service_owner}-{service_suffix}
]
if <ast.BoolOp object at 0x7da1b0214be0> begin[:]
variable[server_name] assign[=] name[self].server_name
return[call[constant[{}-{}-{}].format, parameter[name[self].service_prefix, name[self].service_owner, name[server_name]]]]
|
keyword[def] identifier[service_name] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[server_name] :
identifier[server_name] = identifier[self] . identifier[server_name]
keyword[else] :
identifier[server_name] = literal[int]
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[service_prefix] ,
identifier[self] . identifier[service_owner] ,
identifier[server_name]
)
|
def service_name(self):
"""
Service name inside the Docker Swarm
service_suffix should be a numerical value unique for user
{service_prefix}-{service_owner}-{service_suffix}
"""
if hasattr(self, 'server_name') and self.server_name:
server_name = self.server_name # depends on [control=['if'], data=[]]
else:
server_name = 1
return '{}-{}-{}'.format(self.service_prefix, self.service_owner, server_name)
|
def find_objects_wo_child(config=None,
config_path=None,
parent_regex=None,
child_regex=None,
ignore_ws=False,
saltenv='base'):
'''
Return a list of parent ``ciscoconfparse.IOSCfgLine`` objects, which matched
the ``parent_regex`` and whose children did *not* match ``child_regex``.
Only the parent ``ciscoconfparse.IOSCfgLine`` objects will be returned. For
simplicity, this method only finds oldest ancestors without immediate
children that match.
.. warning::
This function is mostly valuable when invoked from other Salt
components (i.e., execution modules, states, templates etc.). For CLI
usage, please consider using
:py:func:`ciscoconfparse.find_lines_wo_child <salt.ciscoconfparse_mod.find_lines_wo_child>`
config
The configuration sent as text.
.. note::
This argument is ignored when ``config_path`` is specified.
config_path
The absolute or remote path to the file with the configuration to be
parsed. This argument supports the usual Salt filesystem URIs, e.g.,
``salt://``, ``https://``, ``ftp://``, ``s3://``, etc.
parent_regex
The regular expression to match the parent lines against.
child_regex
The regular expression to match the child lines against.
ignore_ws: ``False``
Whether to ignore the white spaces.
saltenv: ``base``
Salt fileserver environment from which to retrieve the file. This
argument is ignored when ``config_path`` is not a ``salt://`` URL.
Usage example:
.. code-block:: python
objects = __salt__['ciscoconfparse.find_objects_wo_child'](config_path='https://bit.ly/2mAdq7z',
parent_regex='line con',
child_regex='stopbits')
for obj in objects:
print(obj.text)
'''
ccp = _get_ccp(config=config, config_path=config_path, saltenv=saltenv)
lines = ccp.find_objects_wo_child(parent_regex, child_regex, ignore_ws=ignore_ws)
return lines
|
def function[find_objects_wo_child, parameter[config, config_path, parent_regex, child_regex, ignore_ws, saltenv]]:
constant[
Return a list of parent ``ciscoconfparse.IOSCfgLine`` objects, which matched
the ``parent_regex`` and whose children did *not* match ``child_regex``.
Only the parent ``ciscoconfparse.IOSCfgLine`` objects will be returned. For
simplicity, this method only finds oldest ancestors without immediate
children that match.
.. warning::
This function is mostly valuable when invoked from other Salt
components (i.e., execution modules, states, templates etc.). For CLI
usage, please consider using
:py:func:`ciscoconfparse.find_lines_wo_child <salt.ciscoconfparse_mod.find_lines_wo_child>`
config
The configuration sent as text.
.. note::
This argument is ignored when ``config_path`` is specified.
config_path
The absolute or remote path to the file with the configuration to be
parsed. This argument supports the usual Salt filesystem URIs, e.g.,
``salt://``, ``https://``, ``ftp://``, ``s3://``, etc.
parent_regex
The regular expression to match the parent lines against.
child_regex
The regular expression to match the child lines against.
ignore_ws: ``False``
Whether to ignore the white spaces.
saltenv: ``base``
Salt fileserver environment from which to retrieve the file. This
argument is ignored when ``config_path`` is not a ``salt://`` URL.
Usage example:
.. code-block:: python
objects = __salt__['ciscoconfparse.find_objects_wo_child'](config_path='https://bit.ly/2mAdq7z',
parent_regex='line con',
child_regex='stopbits')
for obj in objects:
print(obj.text)
]
variable[ccp] assign[=] call[name[_get_ccp], parameter[]]
variable[lines] assign[=] call[name[ccp].find_objects_wo_child, parameter[name[parent_regex], name[child_regex]]]
return[name[lines]]
|
keyword[def] identifier[find_objects_wo_child] ( identifier[config] = keyword[None] ,
identifier[config_path] = keyword[None] ,
identifier[parent_regex] = keyword[None] ,
identifier[child_regex] = keyword[None] ,
identifier[ignore_ws] = keyword[False] ,
identifier[saltenv] = literal[string] ):
literal[string]
identifier[ccp] = identifier[_get_ccp] ( identifier[config] = identifier[config] , identifier[config_path] = identifier[config_path] , identifier[saltenv] = identifier[saltenv] )
identifier[lines] = identifier[ccp] . identifier[find_objects_wo_child] ( identifier[parent_regex] , identifier[child_regex] , identifier[ignore_ws] = identifier[ignore_ws] )
keyword[return] identifier[lines]
|
def find_objects_wo_child(config=None, config_path=None, parent_regex=None, child_regex=None, ignore_ws=False, saltenv='base'):
"""
Return a list of parent ``ciscoconfparse.IOSCfgLine`` objects, which matched
the ``parent_regex`` and whose children did *not* match ``child_regex``.
Only the parent ``ciscoconfparse.IOSCfgLine`` objects will be returned. For
simplicity, this method only finds oldest ancestors without immediate
children that match.
.. warning::
This function is mostly valuable when invoked from other Salt
components (i.e., execution modules, states, templates etc.). For CLI
usage, please consider using
:py:func:`ciscoconfparse.find_lines_wo_child <salt.ciscoconfparse_mod.find_lines_wo_child>`
config
The configuration sent as text.
.. note::
This argument is ignored when ``config_path`` is specified.
config_path
The absolute or remote path to the file with the configuration to be
parsed. This argument supports the usual Salt filesystem URIs, e.g.,
``salt://``, ``https://``, ``ftp://``, ``s3://``, etc.
parent_regex
The regular expression to match the parent lines against.
child_regex
The regular expression to match the child lines against.
ignore_ws: ``False``
Whether to ignore the white spaces.
saltenv: ``base``
Salt fileserver environment from which to retrieve the file. This
argument is ignored when ``config_path`` is not a ``salt://`` URL.
Usage example:
.. code-block:: python
objects = __salt__['ciscoconfparse.find_objects_wo_child'](config_path='https://bit.ly/2mAdq7z',
parent_regex='line con',
child_regex='stopbits')
for obj in objects:
print(obj.text)
"""
ccp = _get_ccp(config=config, config_path=config_path, saltenv=saltenv)
lines = ccp.find_objects_wo_child(parent_regex, child_regex, ignore_ws=ignore_ws)
return lines
|
def __evaluation_error(self, result, condition, throw):
"""Helper-method for easy error-logging"""
self.log.error("Result does not match condition, dropping item. "
"Result %s; Condition: %s; Throw: %s",
result, condition, throw)
return False
|
def function[__evaluation_error, parameter[self, result, condition, throw]]:
constant[Helper-method for easy error-logging]
call[name[self].log.error, parameter[constant[Result does not match condition, dropping item. Result %s; Condition: %s; Throw: %s], name[result], name[condition], name[throw]]]
return[constant[False]]
|
keyword[def] identifier[__evaluation_error] ( identifier[self] , identifier[result] , identifier[condition] , identifier[throw] ):
literal[string]
identifier[self] . identifier[log] . identifier[error] ( literal[string]
literal[string] ,
identifier[result] , identifier[condition] , identifier[throw] )
keyword[return] keyword[False]
|
def __evaluation_error(self, result, condition, throw):
"""Helper-method for easy error-logging"""
self.log.error('Result does not match condition, dropping item. Result %s; Condition: %s; Throw: %s', result, condition, throw)
return False
|
def ws004c(self, value=None):
"""Corresponds to IDD Field `ws004c`
Args:
value (float): value for IDD Field `ws004c`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `ws004c`'.format(value))
self._ws004c = value
|
def function[ws004c, parameter[self, value]]:
constant[Corresponds to IDD Field `ws004c`
Args:
value (float): value for IDD Field `ws004c`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da18c4cec50>
name[self]._ws004c assign[=] name[value]
|
keyword[def] identifier[ws004c] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_ws004c] = identifier[value]
|
def ws004c(self, value=None):
"""Corresponds to IDD Field `ws004c`
Args:
value (float): value for IDD Field `ws004c`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `ws004c`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._ws004c = value
|
def get_user(
self, identified_with, identifier, req, resp, resource, uri_kwargs
):
"""Get user object for given identifier.
Args:
identified_with (object): authentication middleware used
to identify the user.
identifier: middleware specifix user identifier (string or tuple
in case of all built in authentication middleware classes).
Returns:
dict: user object stored in Redis if it exists, otherwise ``None``
"""
stored_value = self.kv_store.get(
self._get_storage_key(identified_with, identifier)
)
if stored_value is not None:
user = self.serialization.loads(stored_value.decode())
else:
user = None
return user
|
def function[get_user, parameter[self, identified_with, identifier, req, resp, resource, uri_kwargs]]:
constant[Get user object for given identifier.
Args:
identified_with (object): authentication middleware used
to identify the user.
identifier: middleware specifix user identifier (string or tuple
in case of all built in authentication middleware classes).
Returns:
dict: user object stored in Redis if it exists, otherwise ``None``
]
variable[stored_value] assign[=] call[name[self].kv_store.get, parameter[call[name[self]._get_storage_key, parameter[name[identified_with], name[identifier]]]]]
if compare[name[stored_value] is_not constant[None]] begin[:]
variable[user] assign[=] call[name[self].serialization.loads, parameter[call[name[stored_value].decode, parameter[]]]]
return[name[user]]
|
keyword[def] identifier[get_user] (
identifier[self] , identifier[identified_with] , identifier[identifier] , identifier[req] , identifier[resp] , identifier[resource] , identifier[uri_kwargs]
):
literal[string]
identifier[stored_value] = identifier[self] . identifier[kv_store] . identifier[get] (
identifier[self] . identifier[_get_storage_key] ( identifier[identified_with] , identifier[identifier] )
)
keyword[if] identifier[stored_value] keyword[is] keyword[not] keyword[None] :
identifier[user] = identifier[self] . identifier[serialization] . identifier[loads] ( identifier[stored_value] . identifier[decode] ())
keyword[else] :
identifier[user] = keyword[None]
keyword[return] identifier[user]
|
def get_user(self, identified_with, identifier, req, resp, resource, uri_kwargs):
"""Get user object for given identifier.
Args:
identified_with (object): authentication middleware used
to identify the user.
identifier: middleware specifix user identifier (string or tuple
in case of all built in authentication middleware classes).
Returns:
dict: user object stored in Redis if it exists, otherwise ``None``
"""
stored_value = self.kv_store.get(self._get_storage_key(identified_with, identifier))
if stored_value is not None:
user = self.serialization.loads(stored_value.decode()) # depends on [control=['if'], data=['stored_value']]
else:
user = None
return user
|
def scrape_links(html, engine):
"""
function to scrape file links from html response
"""
soup = BeautifulSoup(html, 'lxml')
links = []
if engine == 'd':
results = soup.findAll('a', {'class': 'result__a'})
for result in results:
link = result.get('href')[15:]
link = link.replace('/blob/', '/raw/')
links.append(link)
elif engine == 'g':
results = soup.findAll('h3', {'class': 'r'})
for result in results:
link = result.a['href'][7:].split('&')[0]
link = link.replace('/blob/', '/raw/')
links.append(link)
return links
|
def function[scrape_links, parameter[html, engine]]:
constant[
function to scrape file links from html response
]
variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[html], constant[lxml]]]
variable[links] assign[=] list[[]]
if compare[name[engine] equal[==] constant[d]] begin[:]
variable[results] assign[=] call[name[soup].findAll, parameter[constant[a], dictionary[[<ast.Constant object at 0x7da1b0f3d150>], [<ast.Constant object at 0x7da1b0f3cf10>]]]]
for taget[name[result]] in starred[name[results]] begin[:]
variable[link] assign[=] call[call[name[result].get, parameter[constant[href]]]][<ast.Slice object at 0x7da1b0f3d270>]
variable[link] assign[=] call[name[link].replace, parameter[constant[/blob/], constant[/raw/]]]
call[name[links].append, parameter[name[link]]]
return[name[links]]
|
keyword[def] identifier[scrape_links] ( identifier[html] , identifier[engine] ):
literal[string]
identifier[soup] = identifier[BeautifulSoup] ( identifier[html] , literal[string] )
identifier[links] =[]
keyword[if] identifier[engine] == literal[string] :
identifier[results] = identifier[soup] . identifier[findAll] ( literal[string] ,{ literal[string] : literal[string] })
keyword[for] identifier[result] keyword[in] identifier[results] :
identifier[link] = identifier[result] . identifier[get] ( literal[string] )[ literal[int] :]
identifier[link] = identifier[link] . identifier[replace] ( literal[string] , literal[string] )
identifier[links] . identifier[append] ( identifier[link] )
keyword[elif] identifier[engine] == literal[string] :
identifier[results] = identifier[soup] . identifier[findAll] ( literal[string] ,{ literal[string] : literal[string] })
keyword[for] identifier[result] keyword[in] identifier[results] :
identifier[link] = identifier[result] . identifier[a] [ literal[string] ][ literal[int] :]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[link] = identifier[link] . identifier[replace] ( literal[string] , literal[string] )
identifier[links] . identifier[append] ( identifier[link] )
keyword[return] identifier[links]
|
def scrape_links(html, engine):
"""
function to scrape file links from html response
"""
soup = BeautifulSoup(html, 'lxml')
links = []
if engine == 'd':
results = soup.findAll('a', {'class': 'result__a'})
for result in results:
link = result.get('href')[15:]
link = link.replace('/blob/', '/raw/')
links.append(link) # depends on [control=['for'], data=['result']] # depends on [control=['if'], data=[]]
elif engine == 'g':
results = soup.findAll('h3', {'class': 'r'})
for result in results:
link = result.a['href'][7:].split('&')[0]
link = link.replace('/blob/', '/raw/')
links.append(link) # depends on [control=['for'], data=['result']] # depends on [control=['if'], data=[]]
return links
|
def _growing_step_sequence(interval_growth, max_interval, init_interval, start_level=None):
"""
Returns an iterator that constructs a sequence of trigger levels with growing intervals.
The interval is growing exponentially until it reaches the maximum value. Then the interval
stays the same and the sequence becomes linear.
An optional starting level `start_level` defaults to the initial interval. The interval
starts out as `init_interval`, multiplied by `interval_growth` in each step until it
reaches the `max_interval`.
"""
interval = init_interval
next_level = start_level or init_interval
while True:
yield next_level
interval = min(interval * interval_growth, max_interval)
next_level += interval
|
def function[_growing_step_sequence, parameter[interval_growth, max_interval, init_interval, start_level]]:
constant[
Returns an iterator that constructs a sequence of trigger levels with growing intervals.
The interval is growing exponentially until it reaches the maximum value. Then the interval
stays the same and the sequence becomes linear.
An optional starting level `start_level` defaults to the initial interval. The interval
starts out as `init_interval`, multiplied by `interval_growth` in each step until it
reaches the `max_interval`.
]
variable[interval] assign[=] name[init_interval]
variable[next_level] assign[=] <ast.BoolOp object at 0x7da1b1cee260>
while constant[True] begin[:]
<ast.Yield object at 0x7da1b1cedea0>
variable[interval] assign[=] call[name[min], parameter[binary_operation[name[interval] * name[interval_growth]], name[max_interval]]]
<ast.AugAssign object at 0x7da1b1ced2d0>
|
keyword[def] identifier[_growing_step_sequence] ( identifier[interval_growth] , identifier[max_interval] , identifier[init_interval] , identifier[start_level] = keyword[None] ):
literal[string]
identifier[interval] = identifier[init_interval]
identifier[next_level] = identifier[start_level] keyword[or] identifier[init_interval]
keyword[while] keyword[True] :
keyword[yield] identifier[next_level]
identifier[interval] = identifier[min] ( identifier[interval] * identifier[interval_growth] , identifier[max_interval] )
identifier[next_level] += identifier[interval]
|
def _growing_step_sequence(interval_growth, max_interval, init_interval, start_level=None):
"""
Returns an iterator that constructs a sequence of trigger levels with growing intervals.
The interval is growing exponentially until it reaches the maximum value. Then the interval
stays the same and the sequence becomes linear.
An optional starting level `start_level` defaults to the initial interval. The interval
starts out as `init_interval`, multiplied by `interval_growth` in each step until it
reaches the `max_interval`.
"""
interval = init_interval
next_level = start_level or init_interval
while True:
yield next_level
interval = min(interval * interval_growth, max_interval)
next_level += interval # depends on [control=['while'], data=[]]
|
def minimize(self, *args, **kwargs):
'''Optimize our loss exhaustively.
This method is a thin wrapper over the :func:`iterate` method. It simply
exhausts the iterative optimization process and returns the final
monitor values.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
'''
monitors = None
for monitors in self.iterate(*args, **kwargs):
pass
return monitors
|
def function[minimize, parameter[self]]:
constant[Optimize our loss exhaustively.
This method is a thin wrapper over the :func:`iterate` method. It simply
exhausts the iterative optimization process and returns the final
monitor values.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
]
variable[monitors] assign[=] constant[None]
for taget[name[monitors]] in starred[call[name[self].iterate, parameter[<ast.Starred object at 0x7da1b106f490>]]] begin[:]
pass
return[name[monitors]]
|
keyword[def] identifier[minimize] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[monitors] = keyword[None]
keyword[for] identifier[monitors] keyword[in] identifier[self] . identifier[iterate] (* identifier[args] ,** identifier[kwargs] ):
keyword[pass]
keyword[return] identifier[monitors]
|
def minimize(self, *args, **kwargs):
"""Optimize our loss exhaustively.
This method is a thin wrapper over the :func:`iterate` method. It simply
exhausts the iterative optimization process and returns the final
monitor values.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
"""
monitors = None
for monitors in self.iterate(*args, **kwargs):
pass # depends on [control=['for'], data=[]]
return monitors
|
def hashleftjoin(left, right, key=None, lkey=None, rkey=None, missing=None,
cache=True, lprefix=None, rprefix=None):
"""Alternative implementation of :func:`petl.transform.joins.leftjoin`,
where the join is executed by constructing an in-memory lookup for the
right hand table, then iterating over rows from the left hand table.
May be faster and/or more resource efficient where the right table is small
and the left table is large.
By default data from right hand table is cached to improve performance
(only available when `key` is given).
Left and right tables with different key fields can be handled via the
`lkey` and `rkey` arguments.
"""
lkey, rkey = keys_from_args(left, right, key, lkey, rkey)
return HashLeftJoinView(left, right, lkey, rkey, missing=missing,
cache=cache, lprefix=lprefix, rprefix=rprefix)
|
def function[hashleftjoin, parameter[left, right, key, lkey, rkey, missing, cache, lprefix, rprefix]]:
constant[Alternative implementation of :func:`petl.transform.joins.leftjoin`,
where the join is executed by constructing an in-memory lookup for the
right hand table, then iterating over rows from the left hand table.
May be faster and/or more resource efficient where the right table is small
and the left table is large.
By default data from right hand table is cached to improve performance
(only available when `key` is given).
Left and right tables with different key fields can be handled via the
`lkey` and `rkey` arguments.
]
<ast.Tuple object at 0x7da18f09e0e0> assign[=] call[name[keys_from_args], parameter[name[left], name[right], name[key], name[lkey], name[rkey]]]
return[call[name[HashLeftJoinView], parameter[name[left], name[right], name[lkey], name[rkey]]]]
|
keyword[def] identifier[hashleftjoin] ( identifier[left] , identifier[right] , identifier[key] = keyword[None] , identifier[lkey] = keyword[None] , identifier[rkey] = keyword[None] , identifier[missing] = keyword[None] ,
identifier[cache] = keyword[True] , identifier[lprefix] = keyword[None] , identifier[rprefix] = keyword[None] ):
literal[string]
identifier[lkey] , identifier[rkey] = identifier[keys_from_args] ( identifier[left] , identifier[right] , identifier[key] , identifier[lkey] , identifier[rkey] )
keyword[return] identifier[HashLeftJoinView] ( identifier[left] , identifier[right] , identifier[lkey] , identifier[rkey] , identifier[missing] = identifier[missing] ,
identifier[cache] = identifier[cache] , identifier[lprefix] = identifier[lprefix] , identifier[rprefix] = identifier[rprefix] )
|
def hashleftjoin(left, right, key=None, lkey=None, rkey=None, missing=None, cache=True, lprefix=None, rprefix=None):
"""Alternative implementation of :func:`petl.transform.joins.leftjoin`,
where the join is executed by constructing an in-memory lookup for the
right hand table, then iterating over rows from the left hand table.
May be faster and/or more resource efficient where the right table is small
and the left table is large.
By default data from right hand table is cached to improve performance
(only available when `key` is given).
Left and right tables with different key fields can be handled via the
`lkey` and `rkey` arguments.
"""
(lkey, rkey) = keys_from_args(left, right, key, lkey, rkey)
return HashLeftJoinView(left, right, lkey, rkey, missing=missing, cache=cache, lprefix=lprefix, rprefix=rprefix)
|
def get_type(self, notification_type_id):
"""
Returns a CloudMonitorNotificationType object for the given ID.
"""
uri = "/notification_types/%s" % utils.get_id(notification_type_id)
resp, resp_body = self.api.method_get(uri)
return CloudMonitorNotificationType(self, resp_body)
|
def function[get_type, parameter[self, notification_type_id]]:
constant[
Returns a CloudMonitorNotificationType object for the given ID.
]
variable[uri] assign[=] binary_operation[constant[/notification_types/%s] <ast.Mod object at 0x7da2590d6920> call[name[utils].get_id, parameter[name[notification_type_id]]]]
<ast.Tuple object at 0x7da1b056fa30> assign[=] call[name[self].api.method_get, parameter[name[uri]]]
return[call[name[CloudMonitorNotificationType], parameter[name[self], name[resp_body]]]]
|
keyword[def] identifier[get_type] ( identifier[self] , identifier[notification_type_id] ):
literal[string]
identifier[uri] = literal[string] % identifier[utils] . identifier[get_id] ( identifier[notification_type_id] )
identifier[resp] , identifier[resp_body] = identifier[self] . identifier[api] . identifier[method_get] ( identifier[uri] )
keyword[return] identifier[CloudMonitorNotificationType] ( identifier[self] , identifier[resp_body] )
|
def get_type(self, notification_type_id):
"""
Returns a CloudMonitorNotificationType object for the given ID.
"""
uri = '/notification_types/%s' % utils.get_id(notification_type_id)
(resp, resp_body) = self.api.method_get(uri)
return CloudMonitorNotificationType(self, resp_body)
|
def action(self, name, pk):
"""
Action method to handle actions from a show view
"""
pk = self._deserialize_pk_if_composite(pk)
if self.appbuilder.sm.has_access(name, self.__class__.__name__):
action = self.actions.get(name)
return action.func(self.datamodel.get(pk))
else:
flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), "danger")
return redirect(".")
|
def function[action, parameter[self, name, pk]]:
constant[
Action method to handle actions from a show view
]
variable[pk] assign[=] call[name[self]._deserialize_pk_if_composite, parameter[name[pk]]]
if call[name[self].appbuilder.sm.has_access, parameter[name[name], name[self].__class__.__name__]] begin[:]
variable[action] assign[=] call[name[self].actions.get, parameter[name[name]]]
return[call[name[action].func, parameter[call[name[self].datamodel.get, parameter[name[pk]]]]]]
|
keyword[def] identifier[action] ( identifier[self] , identifier[name] , identifier[pk] ):
literal[string]
identifier[pk] = identifier[self] . identifier[_deserialize_pk_if_composite] ( identifier[pk] )
keyword[if] identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[has_access] ( identifier[name] , identifier[self] . identifier[__class__] . identifier[__name__] ):
identifier[action] = identifier[self] . identifier[actions] . identifier[get] ( identifier[name] )
keyword[return] identifier[action] . identifier[func] ( identifier[self] . identifier[datamodel] . identifier[get] ( identifier[pk] ))
keyword[else] :
identifier[flash] ( identifier[as_unicode] ( identifier[FLAMSG_ERR_SEC_ACCESS_DENIED] ), literal[string] )
keyword[return] identifier[redirect] ( literal[string] )
|
def action(self, name, pk):
"""
Action method to handle actions from a show view
"""
pk = self._deserialize_pk_if_composite(pk)
if self.appbuilder.sm.has_access(name, self.__class__.__name__):
action = self.actions.get(name)
return action.func(self.datamodel.get(pk)) # depends on [control=['if'], data=[]]
else:
flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), 'danger')
return redirect('.')
|
def is_root_state_of_library(self):
""" If self is the attribute LibraryState.state_copy of a LibraryState its the library root state and its parent
is a LibraryState
:return True or False
:rtype bool
"""
from rafcon.core.states.library_state import LibraryState
return isinstance(self.parent, LibraryState)
|
def function[is_root_state_of_library, parameter[self]]:
constant[ If self is the attribute LibraryState.state_copy of a LibraryState its the library root state and its parent
is a LibraryState
:return True or False
:rtype bool
]
from relative_module[rafcon.core.states.library_state] import module[LibraryState]
return[call[name[isinstance], parameter[name[self].parent, name[LibraryState]]]]
|
keyword[def] identifier[is_root_state_of_library] ( identifier[self] ):
literal[string]
keyword[from] identifier[rafcon] . identifier[core] . identifier[states] . identifier[library_state] keyword[import] identifier[LibraryState]
keyword[return] identifier[isinstance] ( identifier[self] . identifier[parent] , identifier[LibraryState] )
|
def is_root_state_of_library(self):
""" If self is the attribute LibraryState.state_copy of a LibraryState its the library root state and its parent
is a LibraryState
:return True or False
:rtype bool
"""
from rafcon.core.states.library_state import LibraryState
return isinstance(self.parent, LibraryState)
|
def validate(cls, grammar):
# type: (_MetaRule, Grammar) -> None
"""
Perform rules validation of the class.
:param grammar: Grammar on which to validate.
:raise RuleSyntaxException: If invalid syntax is used.
:raise UselessEpsilonException: If epsilon used in rules in useless.
:raise TerminalDoesNotExistsException: If terminal does not exists in the grammar.
:raise NonterminalDoesNotExistsException: If nonterminal does not exists in the grammar.
"""
# check if the rule is not defined multiple times
defined = set(dir(cls))
if 'rules' in defined and len(defined & {'rule', 'left', 'right', 'toSymbol', 'fromSymbol'}) > 0 or \
'rule' in defined and len(defined & {'left', 'right', 'toSymbol', 'fromSymbol'}) > 0 or \
'left' in defined and 'fromSymbol' in defined or \
'right' in defined and 'toSymbol' in defined:
raise MultipleDefinitionException(cls, 'Rule is defined multiple times')
# check if the rule is defined properly
all = cls.rules
if not isinstance(all, list):
raise RuleSyntaxException(cls, 'Rules property is not enclose in list')
for rule in all:
if not isinstance(rule, tuple):
raise RuleSyntaxException(cls, 'One of the rules is not enclose in tuple', rule)
if len(rule) != 2:
raise RuleSyntaxException(cls, 'One of the rules does not have define left and right part', rule)
left = rule[0]
right = rule[1]
cls._controlSide(left, grammar)
cls._controlSide(right, grammar)
if left == [EPS] and right == [EPS]:
raise UselessEpsilonException(cls)
|
def function[validate, parameter[cls, grammar]]:
constant[
Perform rules validation of the class.
:param grammar: Grammar on which to validate.
:raise RuleSyntaxException: If invalid syntax is used.
:raise UselessEpsilonException: If epsilon used in rules in useless.
:raise TerminalDoesNotExistsException: If terminal does not exists in the grammar.
:raise NonterminalDoesNotExistsException: If nonterminal does not exists in the grammar.
]
variable[defined] assign[=] call[name[set], parameter[call[name[dir], parameter[name[cls]]]]]
if <ast.BoolOp object at 0x7da1b276a020> begin[:]
<ast.Raise object at 0x7da1b27684c0>
variable[all] assign[=] name[cls].rules
if <ast.UnaryOp object at 0x7da1b2769a80> begin[:]
<ast.Raise object at 0x7da1b276aef0>
for taget[name[rule]] in starred[name[all]] begin[:]
if <ast.UnaryOp object at 0x7da1b2717d60> begin[:]
<ast.Raise object at 0x7da1b2717b80>
if compare[call[name[len], parameter[name[rule]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da1b2775720>
variable[left] assign[=] call[name[rule]][constant[0]]
variable[right] assign[=] call[name[rule]][constant[1]]
call[name[cls]._controlSide, parameter[name[left], name[grammar]]]
call[name[cls]._controlSide, parameter[name[right], name[grammar]]]
if <ast.BoolOp object at 0x7da1b2775e40> begin[:]
<ast.Raise object at 0x7da1b2777850>
|
keyword[def] identifier[validate] ( identifier[cls] , identifier[grammar] ):
literal[string]
identifier[defined] = identifier[set] ( identifier[dir] ( identifier[cls] ))
keyword[if] literal[string] keyword[in] identifier[defined] keyword[and] identifier[len] ( identifier[defined] &{ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] })> literal[int] keyword[or] literal[string] keyword[in] identifier[defined] keyword[and] identifier[len] ( identifier[defined] &{ literal[string] , literal[string] , literal[string] , literal[string] })> literal[int] keyword[or] literal[string] keyword[in] identifier[defined] keyword[and] literal[string] keyword[in] identifier[defined] keyword[or] literal[string] keyword[in] identifier[defined] keyword[and] literal[string] keyword[in] identifier[defined] :
keyword[raise] identifier[MultipleDefinitionException] ( identifier[cls] , literal[string] )
identifier[all] = identifier[cls] . identifier[rules]
keyword[if] keyword[not] identifier[isinstance] ( identifier[all] , identifier[list] ):
keyword[raise] identifier[RuleSyntaxException] ( identifier[cls] , literal[string] )
keyword[for] identifier[rule] keyword[in] identifier[all] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[rule] , identifier[tuple] ):
keyword[raise] identifier[RuleSyntaxException] ( identifier[cls] , literal[string] , identifier[rule] )
keyword[if] identifier[len] ( identifier[rule] )!= literal[int] :
keyword[raise] identifier[RuleSyntaxException] ( identifier[cls] , literal[string] , identifier[rule] )
identifier[left] = identifier[rule] [ literal[int] ]
identifier[right] = identifier[rule] [ literal[int] ]
identifier[cls] . identifier[_controlSide] ( identifier[left] , identifier[grammar] )
identifier[cls] . identifier[_controlSide] ( identifier[right] , identifier[grammar] )
keyword[if] identifier[left] ==[ identifier[EPS] ] keyword[and] identifier[right] ==[ identifier[EPS] ]:
keyword[raise] identifier[UselessEpsilonException] ( identifier[cls] )
|
def validate(cls, grammar):
# type: (_MetaRule, Grammar) -> None
'\n Perform rules validation of the class.\n :param grammar: Grammar on which to validate.\n :raise RuleSyntaxException: If invalid syntax is used.\n :raise UselessEpsilonException: If epsilon used in rules in useless.\n :raise TerminalDoesNotExistsException: If terminal does not exists in the grammar.\n :raise NonterminalDoesNotExistsException: If nonterminal does not exists in the grammar.\n '
# check if the rule is not defined multiple times
defined = set(dir(cls))
if 'rules' in defined and len(defined & {'rule', 'left', 'right', 'toSymbol', 'fromSymbol'}) > 0 or ('rule' in defined and len(defined & {'left', 'right', 'toSymbol', 'fromSymbol'}) > 0) or ('left' in defined and 'fromSymbol' in defined) or ('right' in defined and 'toSymbol' in defined):
raise MultipleDefinitionException(cls, 'Rule is defined multiple times') # depends on [control=['if'], data=[]]
# check if the rule is defined properly
all = cls.rules
if not isinstance(all, list):
raise RuleSyntaxException(cls, 'Rules property is not enclose in list') # depends on [control=['if'], data=[]]
for rule in all:
if not isinstance(rule, tuple):
raise RuleSyntaxException(cls, 'One of the rules is not enclose in tuple', rule) # depends on [control=['if'], data=[]]
if len(rule) != 2:
raise RuleSyntaxException(cls, 'One of the rules does not have define left and right part', rule) # depends on [control=['if'], data=[]]
left = rule[0]
right = rule[1]
cls._controlSide(left, grammar)
cls._controlSide(right, grammar)
if left == [EPS] and right == [EPS]:
raise UselessEpsilonException(cls) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']]
|
def which(program):
''' look for "program" in PATH (respecting PATHEXT), and return the path to
it, or None if it was not found
'''
# current directory / absolute paths:
if os.path.exists(program) and os.access(program, os.X_OK):
return program
# PATH:
for path in os.environ['PATH'].split(os.pathsep):
# path variables may be quoted:
path = path.strip('"')
for ext in os.environ.get('PATHEXT', '').split(os.pathsep):
progpath = os.path.join(path, program + ext)
if os.path.exists(progpath) and os.access(progpath, os.X_OK):
return progpath
# not found
return None
|
def function[which, parameter[program]]:
constant[ look for "program" in PATH (respecting PATHEXT), and return the path to
it, or None if it was not found
]
if <ast.BoolOp object at 0x7da1b00e1ab0> begin[:]
return[name[program]]
for taget[name[path]] in starred[call[call[name[os].environ][constant[PATH]].split, parameter[name[os].pathsep]]] begin[:]
variable[path] assign[=] call[name[path].strip, parameter[constant["]]]
for taget[name[ext]] in starred[call[call[name[os].environ.get, parameter[constant[PATHEXT], constant[]]].split, parameter[name[os].pathsep]]] begin[:]
variable[progpath] assign[=] call[name[os].path.join, parameter[name[path], binary_operation[name[program] + name[ext]]]]
if <ast.BoolOp object at 0x7da1b00e2830> begin[:]
return[name[progpath]]
return[constant[None]]
|
keyword[def] identifier[which] ( identifier[program] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[program] ) keyword[and] identifier[os] . identifier[access] ( identifier[program] , identifier[os] . identifier[X_OK] ):
keyword[return] identifier[program]
keyword[for] identifier[path] keyword[in] identifier[os] . identifier[environ] [ literal[string] ]. identifier[split] ( identifier[os] . identifier[pathsep] ):
identifier[path] = identifier[path] . identifier[strip] ( literal[string] )
keyword[for] identifier[ext] keyword[in] identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( identifier[os] . identifier[pathsep] ):
identifier[progpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[program] + identifier[ext] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[progpath] ) keyword[and] identifier[os] . identifier[access] ( identifier[progpath] , identifier[os] . identifier[X_OK] ):
keyword[return] identifier[progpath]
keyword[return] keyword[None]
|
def which(program):
""" look for "program" in PATH (respecting PATHEXT), and return the path to
it, or None if it was not found
"""
# current directory / absolute paths:
if os.path.exists(program) and os.access(program, os.X_OK):
return program # depends on [control=['if'], data=[]]
# PATH:
for path in os.environ['PATH'].split(os.pathsep):
# path variables may be quoted:
path = path.strip('"')
for ext in os.environ.get('PATHEXT', '').split(os.pathsep):
progpath = os.path.join(path, program + ext)
if os.path.exists(progpath) and os.access(progpath, os.X_OK):
return progpath # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ext']] # depends on [control=['for'], data=['path']]
# not found
return None
|
def get_session(self, token=None):
'''
If provided, the `token` parameter is used to initialize an
authenticated session, otherwise an unauthenticated session object is
generated. Returns an instance of :attr:`session_obj`..
:param token: A token with which to initilize the session.
:type token: str
'''
if token is not None:
session = self.session_obj(self.client_id,
self.client_secret,
token,
service=self)
else: # pragma: no cover
session = self.session_obj(self.client_id,
self.client_secret,
service=self)
return session
|
def function[get_session, parameter[self, token]]:
constant[
If provided, the `token` parameter is used to initialize an
authenticated session, otherwise an unauthenticated session object is
generated. Returns an instance of :attr:`session_obj`..
:param token: A token with which to initilize the session.
:type token: str
]
if compare[name[token] is_not constant[None]] begin[:]
variable[session] assign[=] call[name[self].session_obj, parameter[name[self].client_id, name[self].client_secret, name[token]]]
return[name[session]]
|
keyword[def] identifier[get_session] ( identifier[self] , identifier[token] = keyword[None] ):
literal[string]
keyword[if] identifier[token] keyword[is] keyword[not] keyword[None] :
identifier[session] = identifier[self] . identifier[session_obj] ( identifier[self] . identifier[client_id] ,
identifier[self] . identifier[client_secret] ,
identifier[token] ,
identifier[service] = identifier[self] )
keyword[else] :
identifier[session] = identifier[self] . identifier[session_obj] ( identifier[self] . identifier[client_id] ,
identifier[self] . identifier[client_secret] ,
identifier[service] = identifier[self] )
keyword[return] identifier[session]
|
def get_session(self, token=None):
"""
If provided, the `token` parameter is used to initialize an
authenticated session, otherwise an unauthenticated session object is
generated. Returns an instance of :attr:`session_obj`..
:param token: A token with which to initilize the session.
:type token: str
"""
if token is not None:
session = self.session_obj(self.client_id, self.client_secret, token, service=self) # depends on [control=['if'], data=['token']]
else: # pragma: no cover
session = self.session_obj(self.client_id, self.client_secret, service=self)
return session
|
def hpss(S, kernel_size=31, power=2.0, mask=False, margin=1.0):
"""Median-filtering harmonic percussive source separation (HPSS).
If `margin = 1.0`, decomposes an input spectrogram `S = H + P`
where `H` contains the harmonic components,
and `P` contains the percussive components.
If `margin > 1.0`, decomposes an input spectrogram `S = H + P + R`
where `R` contains residual components not included in `H` or `P`.
This implementation is based upon the algorithm described by [1]_ and [2]_.
.. [1] Fitzgerald, Derry.
"Harmonic/percussive separation using median filtering."
13th International Conference on Digital Audio Effects (DAFX10),
Graz, Austria, 2010.
.. [2] Driedger, Müller, Disch.
"Extending harmonic-percussive separation of audio."
15th International Society for Music Information Retrieval Conference (ISMIR 2014),
Taipei, Taiwan, 2014.
Parameters
----------
S : np.ndarray [shape=(d, n)]
input spectrogram. May be real (magnitude) or complex.
kernel_size : int or tuple (kernel_harmonic, kernel_percussive)
kernel size(s) for the median filters.
- If scalar, the same size is used for both harmonic and percussive.
- If tuple, the first value specifies the width of the
harmonic filter, and the second value specifies the width
of the percussive filter.
power : float > 0 [scalar]
Exponent for the Wiener filter when constructing soft mask matrices.
mask : bool
Return the masking matrices instead of components.
Masking matrices contain non-negative real values that
can be used to measure the assignment of energy from `S`
into harmonic or percussive components.
Components can be recovered by multiplying `S * mask_H`
or `S * mask_P`.
margin : float or tuple (margin_harmonic, margin_percussive)
margin size(s) for the masks (as described in [2]_)
- If scalar, the same size is used for both harmonic and percussive.
- If tuple, the first value specifies the margin of the
harmonic mask, and the second value specifies the margin
of the percussive mask.
Returns
-------
harmonic : np.ndarray [shape=(d, n)]
harmonic component (or mask)
percussive : np.ndarray [shape=(d, n)]
percussive component (or mask)
See Also
--------
util.softmask
Notes
-----
This function caches at level 30.
Examples
--------
Separate into harmonic and percussive
>>> y, sr = librosa.load(librosa.util.example_audio_file(), duration=15)
>>> D = librosa.stft(y)
>>> H, P = librosa.decompose.hpss(D)
>>> import matplotlib.pyplot as plt
>>> plt.figure()
>>> plt.subplot(3, 1, 1)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(D),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Full power spectrogram')
>>> plt.subplot(3, 1, 2)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(H),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Harmonic power spectrogram')
>>> plt.subplot(3, 1, 3)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(P),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Percussive power spectrogram')
>>> plt.tight_layout()
Or with a narrower horizontal filter
>>> H, P = librosa.decompose.hpss(D, kernel_size=(13, 31))
Just get harmonic/percussive masks, not the spectra
>>> mask_H, mask_P = librosa.decompose.hpss(D, mask=True)
>>> mask_H
array([[ 1.000e+00, 1.469e-01, ..., 2.648e-03, 2.164e-03],
[ 1.000e+00, 2.368e-01, ..., 9.413e-03, 7.703e-03],
...,
[ 8.869e-01, 5.673e-02, ..., 4.603e-02, 1.247e-05],
[ 7.068e-01, 2.194e-02, ..., 4.453e-02, 1.205e-05]], dtype=float32)
>>> mask_P
array([[ 2.858e-05, 8.531e-01, ..., 9.974e-01, 9.978e-01],
[ 1.586e-05, 7.632e-01, ..., 9.906e-01, 9.923e-01],
...,
[ 1.131e-01, 9.433e-01, ..., 9.540e-01, 1.000e+00],
[ 2.932e-01, 9.781e-01, ..., 9.555e-01, 1.000e+00]], dtype=float32)
Separate into harmonic/percussive/residual components by using a margin > 1.0
>>> H, P = librosa.decompose.hpss(D, margin=3.0)
>>> R = D - (H+P)
>>> y_harm = librosa.core.istft(H)
>>> y_perc = librosa.core.istft(P)
>>> y_resi = librosa.core.istft(R)
Get a more isolated percussive component by widening its margin
>>> H, P = librosa.decompose.hpss(D, margin=(1.0,5.0))
"""
if np.iscomplexobj(S):
S, phase = core.magphase(S)
else:
phase = 1
if np.isscalar(kernel_size):
win_harm = kernel_size
win_perc = kernel_size
else:
win_harm = kernel_size[0]
win_perc = kernel_size[1]
if np.isscalar(margin):
margin_harm = margin
margin_perc = margin
else:
margin_harm = margin[0]
margin_perc = margin[1]
# margin minimum is 1.0
if margin_harm < 1 or margin_perc < 1:
raise ParameterError("Margins must be >= 1.0. "
"A typical range is between 1 and 10.")
# Compute median filters. Pre-allocation here preserves memory layout.
harm = np.empty_like(S)
harm[:] = median_filter(S, size=(1, win_harm), mode='reflect')
perc = np.empty_like(S)
perc[:] = median_filter(S, size=(win_perc, 1), mode='reflect')
split_zeros = (margin_harm == 1 and margin_perc == 1)
mask_harm = util.softmask(harm, perc * margin_harm,
power=power,
split_zeros=split_zeros)
mask_perc = util.softmask(perc, harm * margin_perc,
power=power,
split_zeros=split_zeros)
if mask:
return mask_harm, mask_perc
return ((S * mask_harm) * phase, (S * mask_perc) * phase)
|
def function[hpss, parameter[S, kernel_size, power, mask, margin]]:
constant[Median-filtering harmonic percussive source separation (HPSS).
If `margin = 1.0`, decomposes an input spectrogram `S = H + P`
where `H` contains the harmonic components,
and `P` contains the percussive components.
If `margin > 1.0`, decomposes an input spectrogram `S = H + P + R`
where `R` contains residual components not included in `H` or `P`.
This implementation is based upon the algorithm described by [1]_ and [2]_.
.. [1] Fitzgerald, Derry.
"Harmonic/percussive separation using median filtering."
13th International Conference on Digital Audio Effects (DAFX10),
Graz, Austria, 2010.
.. [2] Driedger, Müller, Disch.
"Extending harmonic-percussive separation of audio."
15th International Society for Music Information Retrieval Conference (ISMIR 2014),
Taipei, Taiwan, 2014.
Parameters
----------
S : np.ndarray [shape=(d, n)]
input spectrogram. May be real (magnitude) or complex.
kernel_size : int or tuple (kernel_harmonic, kernel_percussive)
kernel size(s) for the median filters.
- If scalar, the same size is used for both harmonic and percussive.
- If tuple, the first value specifies the width of the
harmonic filter, and the second value specifies the width
of the percussive filter.
power : float > 0 [scalar]
Exponent for the Wiener filter when constructing soft mask matrices.
mask : bool
Return the masking matrices instead of components.
Masking matrices contain non-negative real values that
can be used to measure the assignment of energy from `S`
into harmonic or percussive components.
Components can be recovered by multiplying `S * mask_H`
or `S * mask_P`.
margin : float or tuple (margin_harmonic, margin_percussive)
margin size(s) for the masks (as described in [2]_)
- If scalar, the same size is used for both harmonic and percussive.
- If tuple, the first value specifies the margin of the
harmonic mask, and the second value specifies the margin
of the percussive mask.
Returns
-------
harmonic : np.ndarray [shape=(d, n)]
harmonic component (or mask)
percussive : np.ndarray [shape=(d, n)]
percussive component (or mask)
See Also
--------
util.softmask
Notes
-----
This function caches at level 30.
Examples
--------
Separate into harmonic and percussive
>>> y, sr = librosa.load(librosa.util.example_audio_file(), duration=15)
>>> D = librosa.stft(y)
>>> H, P = librosa.decompose.hpss(D)
>>> import matplotlib.pyplot as plt
>>> plt.figure()
>>> plt.subplot(3, 1, 1)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(D),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Full power spectrogram')
>>> plt.subplot(3, 1, 2)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(H),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Harmonic power spectrogram')
>>> plt.subplot(3, 1, 3)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(P),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Percussive power spectrogram')
>>> plt.tight_layout()
Or with a narrower horizontal filter
>>> H, P = librosa.decompose.hpss(D, kernel_size=(13, 31))
Just get harmonic/percussive masks, not the spectra
>>> mask_H, mask_P = librosa.decompose.hpss(D, mask=True)
>>> mask_H
array([[ 1.000e+00, 1.469e-01, ..., 2.648e-03, 2.164e-03],
[ 1.000e+00, 2.368e-01, ..., 9.413e-03, 7.703e-03],
...,
[ 8.869e-01, 5.673e-02, ..., 4.603e-02, 1.247e-05],
[ 7.068e-01, 2.194e-02, ..., 4.453e-02, 1.205e-05]], dtype=float32)
>>> mask_P
array([[ 2.858e-05, 8.531e-01, ..., 9.974e-01, 9.978e-01],
[ 1.586e-05, 7.632e-01, ..., 9.906e-01, 9.923e-01],
...,
[ 1.131e-01, 9.433e-01, ..., 9.540e-01, 1.000e+00],
[ 2.932e-01, 9.781e-01, ..., 9.555e-01, 1.000e+00]], dtype=float32)
Separate into harmonic/percussive/residual components by using a margin > 1.0
>>> H, P = librosa.decompose.hpss(D, margin=3.0)
>>> R = D - (H+P)
>>> y_harm = librosa.core.istft(H)
>>> y_perc = librosa.core.istft(P)
>>> y_resi = librosa.core.istft(R)
Get a more isolated percussive component by widening its margin
>>> H, P = librosa.decompose.hpss(D, margin=(1.0,5.0))
]
if call[name[np].iscomplexobj, parameter[name[S]]] begin[:]
<ast.Tuple object at 0x7da20c6c7b80> assign[=] call[name[core].magphase, parameter[name[S]]]
if call[name[np].isscalar, parameter[name[kernel_size]]] begin[:]
variable[win_harm] assign[=] name[kernel_size]
variable[win_perc] assign[=] name[kernel_size]
if call[name[np].isscalar, parameter[name[margin]]] begin[:]
variable[margin_harm] assign[=] name[margin]
variable[margin_perc] assign[=] name[margin]
if <ast.BoolOp object at 0x7da20c6c59c0> begin[:]
<ast.Raise object at 0x7da20c6c4d60>
variable[harm] assign[=] call[name[np].empty_like, parameter[name[S]]]
call[name[harm]][<ast.Slice object at 0x7da20c6c4a30>] assign[=] call[name[median_filter], parameter[name[S]]]
variable[perc] assign[=] call[name[np].empty_like, parameter[name[S]]]
call[name[perc]][<ast.Slice object at 0x7da20c6c6a40>] assign[=] call[name[median_filter], parameter[name[S]]]
variable[split_zeros] assign[=] <ast.BoolOp object at 0x7da20c6c5bd0>
variable[mask_harm] assign[=] call[name[util].softmask, parameter[name[harm], binary_operation[name[perc] * name[margin_harm]]]]
variable[mask_perc] assign[=] call[name[util].softmask, parameter[name[perc], binary_operation[name[harm] * name[margin_perc]]]]
if name[mask] begin[:]
return[tuple[[<ast.Name object at 0x7da1b0532e60>, <ast.Name object at 0x7da1b0533c10>]]]
return[tuple[[<ast.BinOp object at 0x7da1b05331f0>, <ast.BinOp object at 0x7da1b0533130>]]]
|
keyword[def] identifier[hpss] ( identifier[S] , identifier[kernel_size] = literal[int] , identifier[power] = literal[int] , identifier[mask] = keyword[False] , identifier[margin] = literal[int] ):
literal[string]
keyword[if] identifier[np] . identifier[iscomplexobj] ( identifier[S] ):
identifier[S] , identifier[phase] = identifier[core] . identifier[magphase] ( identifier[S] )
keyword[else] :
identifier[phase] = literal[int]
keyword[if] identifier[np] . identifier[isscalar] ( identifier[kernel_size] ):
identifier[win_harm] = identifier[kernel_size]
identifier[win_perc] = identifier[kernel_size]
keyword[else] :
identifier[win_harm] = identifier[kernel_size] [ literal[int] ]
identifier[win_perc] = identifier[kernel_size] [ literal[int] ]
keyword[if] identifier[np] . identifier[isscalar] ( identifier[margin] ):
identifier[margin_harm] = identifier[margin]
identifier[margin_perc] = identifier[margin]
keyword[else] :
identifier[margin_harm] = identifier[margin] [ literal[int] ]
identifier[margin_perc] = identifier[margin] [ literal[int] ]
keyword[if] identifier[margin_harm] < literal[int] keyword[or] identifier[margin_perc] < literal[int] :
keyword[raise] identifier[ParameterError] ( literal[string]
literal[string] )
identifier[harm] = identifier[np] . identifier[empty_like] ( identifier[S] )
identifier[harm] [:]= identifier[median_filter] ( identifier[S] , identifier[size] =( literal[int] , identifier[win_harm] ), identifier[mode] = literal[string] )
identifier[perc] = identifier[np] . identifier[empty_like] ( identifier[S] )
identifier[perc] [:]= identifier[median_filter] ( identifier[S] , identifier[size] =( identifier[win_perc] , literal[int] ), identifier[mode] = literal[string] )
identifier[split_zeros] =( identifier[margin_harm] == literal[int] keyword[and] identifier[margin_perc] == literal[int] )
identifier[mask_harm] = identifier[util] . identifier[softmask] ( identifier[harm] , identifier[perc] * identifier[margin_harm] ,
identifier[power] = identifier[power] ,
identifier[split_zeros] = identifier[split_zeros] )
identifier[mask_perc] = identifier[util] . identifier[softmask] ( identifier[perc] , identifier[harm] * identifier[margin_perc] ,
identifier[power] = identifier[power] ,
identifier[split_zeros] = identifier[split_zeros] )
keyword[if] identifier[mask] :
keyword[return] identifier[mask_harm] , identifier[mask_perc]
keyword[return] (( identifier[S] * identifier[mask_harm] )* identifier[phase] ,( identifier[S] * identifier[mask_perc] )* identifier[phase] )
|
def hpss(S, kernel_size=31, power=2.0, mask=False, margin=1.0):
"""Median-filtering harmonic percussive source separation (HPSS).
If `margin = 1.0`, decomposes an input spectrogram `S = H + P`
where `H` contains the harmonic components,
and `P` contains the percussive components.
If `margin > 1.0`, decomposes an input spectrogram `S = H + P + R`
where `R` contains residual components not included in `H` or `P`.
This implementation is based upon the algorithm described by [1]_ and [2]_.
.. [1] Fitzgerald, Derry.
"Harmonic/percussive separation using median filtering."
13th International Conference on Digital Audio Effects (DAFX10),
Graz, Austria, 2010.
.. [2] Driedger, Müller, Disch.
"Extending harmonic-percussive separation of audio."
15th International Society for Music Information Retrieval Conference (ISMIR 2014),
Taipei, Taiwan, 2014.
Parameters
----------
S : np.ndarray [shape=(d, n)]
input spectrogram. May be real (magnitude) or complex.
kernel_size : int or tuple (kernel_harmonic, kernel_percussive)
kernel size(s) for the median filters.
- If scalar, the same size is used for both harmonic and percussive.
- If tuple, the first value specifies the width of the
harmonic filter, and the second value specifies the width
of the percussive filter.
power : float > 0 [scalar]
Exponent for the Wiener filter when constructing soft mask matrices.
mask : bool
Return the masking matrices instead of components.
Masking matrices contain non-negative real values that
can be used to measure the assignment of energy from `S`
into harmonic or percussive components.
Components can be recovered by multiplying `S * mask_H`
or `S * mask_P`.
margin : float or tuple (margin_harmonic, margin_percussive)
margin size(s) for the masks (as described in [2]_)
- If scalar, the same size is used for both harmonic and percussive.
- If tuple, the first value specifies the margin of the
harmonic mask, and the second value specifies the margin
of the percussive mask.
Returns
-------
harmonic : np.ndarray [shape=(d, n)]
harmonic component (or mask)
percussive : np.ndarray [shape=(d, n)]
percussive component (or mask)
See Also
--------
util.softmask
Notes
-----
This function caches at level 30.
Examples
--------
Separate into harmonic and percussive
>>> y, sr = librosa.load(librosa.util.example_audio_file(), duration=15)
>>> D = librosa.stft(y)
>>> H, P = librosa.decompose.hpss(D)
>>> import matplotlib.pyplot as plt
>>> plt.figure()
>>> plt.subplot(3, 1, 1)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(D),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Full power spectrogram')
>>> plt.subplot(3, 1, 2)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(H),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Harmonic power spectrogram')
>>> plt.subplot(3, 1, 3)
>>> librosa.display.specshow(librosa.amplitude_to_db(np.abs(P),
... ref=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Percussive power spectrogram')
>>> plt.tight_layout()
Or with a narrower horizontal filter
>>> H, P = librosa.decompose.hpss(D, kernel_size=(13, 31))
Just get harmonic/percussive masks, not the spectra
>>> mask_H, mask_P = librosa.decompose.hpss(D, mask=True)
>>> mask_H
array([[ 1.000e+00, 1.469e-01, ..., 2.648e-03, 2.164e-03],
[ 1.000e+00, 2.368e-01, ..., 9.413e-03, 7.703e-03],
...,
[ 8.869e-01, 5.673e-02, ..., 4.603e-02, 1.247e-05],
[ 7.068e-01, 2.194e-02, ..., 4.453e-02, 1.205e-05]], dtype=float32)
>>> mask_P
array([[ 2.858e-05, 8.531e-01, ..., 9.974e-01, 9.978e-01],
[ 1.586e-05, 7.632e-01, ..., 9.906e-01, 9.923e-01],
...,
[ 1.131e-01, 9.433e-01, ..., 9.540e-01, 1.000e+00],
[ 2.932e-01, 9.781e-01, ..., 9.555e-01, 1.000e+00]], dtype=float32)
Separate into harmonic/percussive/residual components by using a margin > 1.0
>>> H, P = librosa.decompose.hpss(D, margin=3.0)
>>> R = D - (H+P)
>>> y_harm = librosa.core.istft(H)
>>> y_perc = librosa.core.istft(P)
>>> y_resi = librosa.core.istft(R)
Get a more isolated percussive component by widening its margin
>>> H, P = librosa.decompose.hpss(D, margin=(1.0,5.0))
"""
if np.iscomplexobj(S):
(S, phase) = core.magphase(S) # depends on [control=['if'], data=[]]
else:
phase = 1
if np.isscalar(kernel_size):
win_harm = kernel_size
win_perc = kernel_size # depends on [control=['if'], data=[]]
else:
win_harm = kernel_size[0]
win_perc = kernel_size[1]
if np.isscalar(margin):
margin_harm = margin
margin_perc = margin # depends on [control=['if'], data=[]]
else:
margin_harm = margin[0]
margin_perc = margin[1]
# margin minimum is 1.0
if margin_harm < 1 or margin_perc < 1:
raise ParameterError('Margins must be >= 1.0. A typical range is between 1 and 10.') # depends on [control=['if'], data=[]]
# Compute median filters. Pre-allocation here preserves memory layout.
harm = np.empty_like(S)
harm[:] = median_filter(S, size=(1, win_harm), mode='reflect')
perc = np.empty_like(S)
perc[:] = median_filter(S, size=(win_perc, 1), mode='reflect')
split_zeros = margin_harm == 1 and margin_perc == 1
mask_harm = util.softmask(harm, perc * margin_harm, power=power, split_zeros=split_zeros)
mask_perc = util.softmask(perc, harm * margin_perc, power=power, split_zeros=split_zeros)
if mask:
return (mask_harm, mask_perc) # depends on [control=['if'], data=[]]
return (S * mask_harm * phase, S * mask_perc * phase)
|
def extract_feature_dependent_feature(self, extractor, force_extraction=False, verbose=0, add_args=None,
custom_name=None):
"""
Extracts a feature which may be dependent on other features and stores it in the database
Parameters
----------
extractor : function, which takes the path of a data point, a dictionary of all other features and *args as
parameters and returns a feature
force_extraction : boolean, if True - will re-extract feature even if a feature with this name already
exists in the database, otherwise, will only extract if the feature doesn't exist in the database.
default value: False
verbose : int, if bigger than 0, will print the current number of the file for which data is being extracted
add_args : optional arguments for the extractor (list/dictionary/tuple/whatever). if None, the
extractor should take only one input argument - the file path. default value: None
custom_name : string, optional name for the feature (it will be stored in the database with the custom_name
instead of extractor function name). if None, the extractor function name will be used. default value: None
Returns
-------
None
"""
if self._prepopulated is False:
raise errors.EmptyDatabase(self.dbpath)
else:
return extract_feature_dependent_feature_base(self.dbpath, self.path_to_set, self._set_object, extractor,
force_extraction, verbose, add_args, custom_name)
|
def function[extract_feature_dependent_feature, parameter[self, extractor, force_extraction, verbose, add_args, custom_name]]:
constant[
Extracts a feature which may be dependent on other features and stores it in the database
Parameters
----------
extractor : function, which takes the path of a data point, a dictionary of all other features and *args as
parameters and returns a feature
force_extraction : boolean, if True - will re-extract feature even if a feature with this name already
exists in the database, otherwise, will only extract if the feature doesn't exist in the database.
default value: False
verbose : int, if bigger than 0, will print the current number of the file for which data is being extracted
add_args : optional arguments for the extractor (list/dictionary/tuple/whatever). if None, the
extractor should take only one input argument - the file path. default value: None
custom_name : string, optional name for the feature (it will be stored in the database with the custom_name
instead of extractor function name). if None, the extractor function name will be used. default value: None
Returns
-------
None
]
if compare[name[self]._prepopulated is constant[False]] begin[:]
<ast.Raise object at 0x7da207f99030>
|
keyword[def] identifier[extract_feature_dependent_feature] ( identifier[self] , identifier[extractor] , identifier[force_extraction] = keyword[False] , identifier[verbose] = literal[int] , identifier[add_args] = keyword[None] ,
identifier[custom_name] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_prepopulated] keyword[is] keyword[False] :
keyword[raise] identifier[errors] . identifier[EmptyDatabase] ( identifier[self] . identifier[dbpath] )
keyword[else] :
keyword[return] identifier[extract_feature_dependent_feature_base] ( identifier[self] . identifier[dbpath] , identifier[self] . identifier[path_to_set] , identifier[self] . identifier[_set_object] , identifier[extractor] ,
identifier[force_extraction] , identifier[verbose] , identifier[add_args] , identifier[custom_name] )
|
def extract_feature_dependent_feature(self, extractor, force_extraction=False, verbose=0, add_args=None, custom_name=None):
"""
Extracts a feature which may be dependent on other features and stores it in the database
Parameters
----------
extractor : function, which takes the path of a data point, a dictionary of all other features and *args as
parameters and returns a feature
force_extraction : boolean, if True - will re-extract feature even if a feature with this name already
exists in the database, otherwise, will only extract if the feature doesn't exist in the database.
default value: False
verbose : int, if bigger than 0, will print the current number of the file for which data is being extracted
add_args : optional arguments for the extractor (list/dictionary/tuple/whatever). if None, the
extractor should take only one input argument - the file path. default value: None
custom_name : string, optional name for the feature (it will be stored in the database with the custom_name
instead of extractor function name). if None, the extractor function name will be used. default value: None
Returns
-------
None
"""
if self._prepopulated is False:
raise errors.EmptyDatabase(self.dbpath) # depends on [control=['if'], data=[]]
else:
return extract_feature_dependent_feature_base(self.dbpath, self.path_to_set, self._set_object, extractor, force_extraction, verbose, add_args, custom_name)
|
def _hash_scalar(val, encoding='utf8', hash_key=None):
"""
Hash scalar value
Returns
-------
1d uint64 numpy array of hash value, of length 1
"""
if isna(val):
# this is to be consistent with the _hash_categorical implementation
return np.array([np.iinfo(np.uint64).max], dtype='u8')
if getattr(val, 'tzinfo', None) is not None:
# for tz-aware datetimes, we need the underlying naive UTC value and
# not the tz aware object or pd extension type (as
# infer_dtype_from_scalar would do)
if not isinstance(val, tslibs.Timestamp):
val = tslibs.Timestamp(val)
val = val.tz_convert(None)
dtype, val = infer_dtype_from_scalar(val)
vals = np.array([val], dtype=dtype)
return hash_array(vals, hash_key=hash_key, encoding=encoding,
categorize=False)
|
def function[_hash_scalar, parameter[val, encoding, hash_key]]:
constant[
Hash scalar value
Returns
-------
1d uint64 numpy array of hash value, of length 1
]
if call[name[isna], parameter[name[val]]] begin[:]
return[call[name[np].array, parameter[list[[<ast.Attribute object at 0x7da207f00430>]]]]]
if compare[call[name[getattr], parameter[name[val], constant[tzinfo], constant[None]]] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da207f02920> begin[:]
variable[val] assign[=] call[name[tslibs].Timestamp, parameter[name[val]]]
variable[val] assign[=] call[name[val].tz_convert, parameter[constant[None]]]
<ast.Tuple object at 0x7da207f008b0> assign[=] call[name[infer_dtype_from_scalar], parameter[name[val]]]
variable[vals] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da207f016c0>]]]]
return[call[name[hash_array], parameter[name[vals]]]]
|
keyword[def] identifier[_hash_scalar] ( identifier[val] , identifier[encoding] = literal[string] , identifier[hash_key] = keyword[None] ):
literal[string]
keyword[if] identifier[isna] ( identifier[val] ):
keyword[return] identifier[np] . identifier[array] ([ identifier[np] . identifier[iinfo] ( identifier[np] . identifier[uint64] ). identifier[max] ], identifier[dtype] = literal[string] )
keyword[if] identifier[getattr] ( identifier[val] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[tslibs] . identifier[Timestamp] ):
identifier[val] = identifier[tslibs] . identifier[Timestamp] ( identifier[val] )
identifier[val] = identifier[val] . identifier[tz_convert] ( keyword[None] )
identifier[dtype] , identifier[val] = identifier[infer_dtype_from_scalar] ( identifier[val] )
identifier[vals] = identifier[np] . identifier[array] ([ identifier[val] ], identifier[dtype] = identifier[dtype] )
keyword[return] identifier[hash_array] ( identifier[vals] , identifier[hash_key] = identifier[hash_key] , identifier[encoding] = identifier[encoding] ,
identifier[categorize] = keyword[False] )
|
def _hash_scalar(val, encoding='utf8', hash_key=None):
"""
Hash scalar value
Returns
-------
1d uint64 numpy array of hash value, of length 1
"""
if isna(val):
# this is to be consistent with the _hash_categorical implementation
return np.array([np.iinfo(np.uint64).max], dtype='u8') # depends on [control=['if'], data=[]]
if getattr(val, 'tzinfo', None) is not None:
# for tz-aware datetimes, we need the underlying naive UTC value and
# not the tz aware object or pd extension type (as
# infer_dtype_from_scalar would do)
if not isinstance(val, tslibs.Timestamp):
val = tslibs.Timestamp(val) # depends on [control=['if'], data=[]]
val = val.tz_convert(None) # depends on [control=['if'], data=[]]
(dtype, val) = infer_dtype_from_scalar(val)
vals = np.array([val], dtype=dtype)
return hash_array(vals, hash_key=hash_key, encoding=encoding, categorize=False)
|
def close(self):
"""Closes all currently open file pointers"""
if not self.active:
return
self.active = False
if self._file:
self._file.close()
self._sincedb_update_position(force_update=True)
if self._current_event:
event = '\n'.join(self._current_event)
self._current_event.clear()
self._callback_wrapper([event])
|
def function[close, parameter[self]]:
constant[Closes all currently open file pointers]
if <ast.UnaryOp object at 0x7da18f58e020> begin[:]
return[None]
name[self].active assign[=] constant[False]
if name[self]._file begin[:]
call[name[self]._file.close, parameter[]]
call[name[self]._sincedb_update_position, parameter[]]
if name[self]._current_event begin[:]
variable[event] assign[=] call[constant[
].join, parameter[name[self]._current_event]]
call[name[self]._current_event.clear, parameter[]]
call[name[self]._callback_wrapper, parameter[list[[<ast.Name object at 0x7da18f58ded0>]]]]
|
keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[active] :
keyword[return]
identifier[self] . identifier[active] = keyword[False]
keyword[if] identifier[self] . identifier[_file] :
identifier[self] . identifier[_file] . identifier[close] ()
identifier[self] . identifier[_sincedb_update_position] ( identifier[force_update] = keyword[True] )
keyword[if] identifier[self] . identifier[_current_event] :
identifier[event] = literal[string] . identifier[join] ( identifier[self] . identifier[_current_event] )
identifier[self] . identifier[_current_event] . identifier[clear] ()
identifier[self] . identifier[_callback_wrapper] ([ identifier[event] ])
|
def close(self):
"""Closes all currently open file pointers"""
if not self.active:
return # depends on [control=['if'], data=[]]
self.active = False
if self._file:
self._file.close()
self._sincedb_update_position(force_update=True) # depends on [control=['if'], data=[]]
if self._current_event:
event = '\n'.join(self._current_event)
self._current_event.clear()
self._callback_wrapper([event]) # depends on [control=['if'], data=[]]
|
def inflate_nd_checker(identifier, definition):
"""
Inflate a no-data checker from a basic definition.
Args:
identifier (str): the no-data checker identifier / name.
definition (bool/dict): a boolean acting as "passes" or a full
dict definition with "passes" and "allow_failure".
Returns:
Checker: a checker instance.
Raises:
ValueError: when the definition type is not bool or dict.
"""
if isinstance(definition, bool):
return Checker(name=identifier, passes=definition)
elif isinstance(definition, dict):
return Checker(definition.pop('name', identifier), **definition)
else:
raise ValueError('%s type is not supported for no-data checkers, '
'use bool or dict' % type(definition))
|
def function[inflate_nd_checker, parameter[identifier, definition]]:
constant[
Inflate a no-data checker from a basic definition.
Args:
identifier (str): the no-data checker identifier / name.
definition (bool/dict): a boolean acting as "passes" or a full
dict definition with "passes" and "allow_failure".
Returns:
Checker: a checker instance.
Raises:
ValueError: when the definition type is not bool or dict.
]
if call[name[isinstance], parameter[name[definition], name[bool]]] begin[:]
return[call[name[Checker], parameter[]]]
|
keyword[def] identifier[inflate_nd_checker] ( identifier[identifier] , identifier[definition] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[definition] , identifier[bool] ):
keyword[return] identifier[Checker] ( identifier[name] = identifier[identifier] , identifier[passes] = identifier[definition] )
keyword[elif] identifier[isinstance] ( identifier[definition] , identifier[dict] ):
keyword[return] identifier[Checker] ( identifier[definition] . identifier[pop] ( literal[string] , identifier[identifier] ),** identifier[definition] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[type] ( identifier[definition] ))
|
def inflate_nd_checker(identifier, definition):
"""
Inflate a no-data checker from a basic definition.
Args:
identifier (str): the no-data checker identifier / name.
definition (bool/dict): a boolean acting as "passes" or a full
dict definition with "passes" and "allow_failure".
Returns:
Checker: a checker instance.
Raises:
ValueError: when the definition type is not bool or dict.
"""
if isinstance(definition, bool):
return Checker(name=identifier, passes=definition) # depends on [control=['if'], data=[]]
elif isinstance(definition, dict):
return Checker(definition.pop('name', identifier), **definition) # depends on [control=['if'], data=[]]
else:
raise ValueError('%s type is not supported for no-data checkers, use bool or dict' % type(definition))
|
def verify_keys(app_req, required_keys, issuer=None):
"""
Verify all JWT object keys listed in required_keys.
Each required key is specified as a dot-separated path.
The key values are returned as a list ordered by how
you specified them.
Take this JWT for example::
{
"iss": "...",
"aud": "...",
"request": {
"pricePoint": 1,
}
}
You could verify the presence of all keys and retrieve
their values like this::
iss, aud, price = verify_keys(jwt_dict,
('iss',
'aud',
'request.pricePoint'))
Do you see how the comma separated assigned variables
match the keys that were extracted? The order is important.
"""
if not issuer:
issuer = _get_issuer(app_req=app_req)
key_vals = []
for key_path in required_keys:
parent = app_req
for kp in key_path.split('.'):
if not isinstance(parent, dict):
raise InvalidJWT('JWT is missing %r: %s is not a dict'
% (key_path, kp), issuer=issuer)
val = parent.get(kp, None)
if not val:
raise InvalidJWT('JWT is missing %r: %s is not a valid key'
% (key_path, kp), issuer=issuer)
parent = val
key_vals.append(parent) # last value of key_path
return key_vals
|
def function[verify_keys, parameter[app_req, required_keys, issuer]]:
constant[
Verify all JWT object keys listed in required_keys.
Each required key is specified as a dot-separated path.
The key values are returned as a list ordered by how
you specified them.
Take this JWT for example::
{
"iss": "...",
"aud": "...",
"request": {
"pricePoint": 1,
}
}
You could verify the presence of all keys and retrieve
their values like this::
iss, aud, price = verify_keys(jwt_dict,
('iss',
'aud',
'request.pricePoint'))
Do you see how the comma separated assigned variables
match the keys that were extracted? The order is important.
]
if <ast.UnaryOp object at 0x7da1b09b7670> begin[:]
variable[issuer] assign[=] call[name[_get_issuer], parameter[]]
variable[key_vals] assign[=] list[[]]
for taget[name[key_path]] in starred[name[required_keys]] begin[:]
variable[parent] assign[=] name[app_req]
for taget[name[kp]] in starred[call[name[key_path].split, parameter[constant[.]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b09ea7a0> begin[:]
<ast.Raise object at 0x7da1b09e9ab0>
variable[val] assign[=] call[name[parent].get, parameter[name[kp], constant[None]]]
if <ast.UnaryOp object at 0x7da1b09e9e40> begin[:]
<ast.Raise object at 0x7da1b09e9db0>
variable[parent] assign[=] name[val]
call[name[key_vals].append, parameter[name[parent]]]
return[name[key_vals]]
|
keyword[def] identifier[verify_keys] ( identifier[app_req] , identifier[required_keys] , identifier[issuer] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[issuer] :
identifier[issuer] = identifier[_get_issuer] ( identifier[app_req] = identifier[app_req] )
identifier[key_vals] =[]
keyword[for] identifier[key_path] keyword[in] identifier[required_keys] :
identifier[parent] = identifier[app_req]
keyword[for] identifier[kp] keyword[in] identifier[key_path] . identifier[split] ( literal[string] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[parent] , identifier[dict] ):
keyword[raise] identifier[InvalidJWT] ( literal[string]
%( identifier[key_path] , identifier[kp] ), identifier[issuer] = identifier[issuer] )
identifier[val] = identifier[parent] . identifier[get] ( identifier[kp] , keyword[None] )
keyword[if] keyword[not] identifier[val] :
keyword[raise] identifier[InvalidJWT] ( literal[string]
%( identifier[key_path] , identifier[kp] ), identifier[issuer] = identifier[issuer] )
identifier[parent] = identifier[val]
identifier[key_vals] . identifier[append] ( identifier[parent] )
keyword[return] identifier[key_vals]
|
def verify_keys(app_req, required_keys, issuer=None):
"""
Verify all JWT object keys listed in required_keys.
Each required key is specified as a dot-separated path.
The key values are returned as a list ordered by how
you specified them.
Take this JWT for example::
{
"iss": "...",
"aud": "...",
"request": {
"pricePoint": 1,
}
}
You could verify the presence of all keys and retrieve
their values like this::
iss, aud, price = verify_keys(jwt_dict,
('iss',
'aud',
'request.pricePoint'))
Do you see how the comma separated assigned variables
match the keys that were extracted? The order is important.
"""
if not issuer:
issuer = _get_issuer(app_req=app_req) # depends on [control=['if'], data=[]]
key_vals = []
for key_path in required_keys:
parent = app_req
for kp in key_path.split('.'):
if not isinstance(parent, dict):
raise InvalidJWT('JWT is missing %r: %s is not a dict' % (key_path, kp), issuer=issuer) # depends on [control=['if'], data=[]]
val = parent.get(kp, None)
if not val:
raise InvalidJWT('JWT is missing %r: %s is not a valid key' % (key_path, kp), issuer=issuer) # depends on [control=['if'], data=[]]
parent = val # depends on [control=['for'], data=['kp']]
key_vals.append(parent) # last value of key_path # depends on [control=['for'], data=['key_path']]
return key_vals
|
def linearize(self, index=0):
'''Linearize the Sequence at an index.
:param index: index at which to linearize.
:type index: int
:returns: A linearized version of the current sequence.
:rtype: coral.sequence._sequence.Sequence
:raises: ValueError if the input is a linear sequence.
'''
if not self.circular and index != 0:
raise ValueError('Cannot relinearize a linear sequence.')
copy = self.copy()
# Snip at the index
if index:
return copy[index:] + copy[:index]
copy.circular = False
return copy
|
def function[linearize, parameter[self, index]]:
constant[Linearize the Sequence at an index.
:param index: index at which to linearize.
:type index: int
:returns: A linearized version of the current sequence.
:rtype: coral.sequence._sequence.Sequence
:raises: ValueError if the input is a linear sequence.
]
if <ast.BoolOp object at 0x7da1b057b130> begin[:]
<ast.Raise object at 0x7da1b057b190>
variable[copy] assign[=] call[name[self].copy, parameter[]]
if name[index] begin[:]
return[binary_operation[call[name[copy]][<ast.Slice object at 0x7da1b057bb80>] + call[name[copy]][<ast.Slice object at 0x7da1b057b3d0>]]]
name[copy].circular assign[=] constant[False]
return[name[copy]]
|
keyword[def] identifier[linearize] ( identifier[self] , identifier[index] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[circular] keyword[and] identifier[index] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[copy] = identifier[self] . identifier[copy] ()
keyword[if] identifier[index] :
keyword[return] identifier[copy] [ identifier[index] :]+ identifier[copy] [: identifier[index] ]
identifier[copy] . identifier[circular] = keyword[False]
keyword[return] identifier[copy]
|
def linearize(self, index=0):
"""Linearize the Sequence at an index.
:param index: index at which to linearize.
:type index: int
:returns: A linearized version of the current sequence.
:rtype: coral.sequence._sequence.Sequence
:raises: ValueError if the input is a linear sequence.
"""
if not self.circular and index != 0:
raise ValueError('Cannot relinearize a linear sequence.') # depends on [control=['if'], data=[]]
copy = self.copy()
# Snip at the index
if index:
return copy[index:] + copy[:index] # depends on [control=['if'], data=[]]
copy.circular = False
return copy
|
def input_file(self, filename, lines=None, expected=None, line_offset=0):
"""Run all checks on a Python source file."""
if self.options.verbose:
print('checking %s' % filename)
fchecker = self.checker_class(
filename, lines=lines, options=self.options)
return fchecker.check_all(expected=expected, line_offset=line_offset)
|
def function[input_file, parameter[self, filename, lines, expected, line_offset]]:
constant[Run all checks on a Python source file.]
if name[self].options.verbose begin[:]
call[name[print], parameter[binary_operation[constant[checking %s] <ast.Mod object at 0x7da2590d6920> name[filename]]]]
variable[fchecker] assign[=] call[name[self].checker_class, parameter[name[filename]]]
return[call[name[fchecker].check_all, parameter[]]]
|
keyword[def] identifier[input_file] ( identifier[self] , identifier[filename] , identifier[lines] = keyword[None] , identifier[expected] = keyword[None] , identifier[line_offset] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[options] . identifier[verbose] :
identifier[print] ( literal[string] % identifier[filename] )
identifier[fchecker] = identifier[self] . identifier[checker_class] (
identifier[filename] , identifier[lines] = identifier[lines] , identifier[options] = identifier[self] . identifier[options] )
keyword[return] identifier[fchecker] . identifier[check_all] ( identifier[expected] = identifier[expected] , identifier[line_offset] = identifier[line_offset] )
|
def input_file(self, filename, lines=None, expected=None, line_offset=0):
"""Run all checks on a Python source file."""
if self.options.verbose:
print('checking %s' % filename) # depends on [control=['if'], data=[]]
fchecker = self.checker_class(filename, lines=lines, options=self.options)
return fchecker.check_all(expected=expected, line_offset=line_offset)
|
def least_loaded_node(self):
"""Choose the node with fewest outstanding requests, with fallbacks.
This method will prefer a node with an existing connection and no
in-flight-requests. If no such node is found, a node will be chosen
randomly from disconnected nodes that are not "blacked out" (i.e.,
are not subject to a reconnect backoff). If no node metadata has been
obtained, will return a bootstrap node (subject to exponential backoff).
Returns:
node_id or None if no suitable node was found
"""
nodes = [broker.nodeId for broker in self.cluster.brokers()]
random.shuffle(nodes)
inflight = float('inf')
found = None
for node_id in nodes:
conn = self._conns.get(node_id)
connected = conn is not None and conn.connected()
blacked_out = conn is not None and conn.blacked_out()
curr_inflight = len(conn.in_flight_requests) if conn is not None else 0
if connected and curr_inflight == 0:
# if we find an established connection
# with no in-flight requests, we can stop right away
return node_id
elif not blacked_out and curr_inflight < inflight:
# otherwise if this is the best we have found so far, record that
inflight = curr_inflight
found = node_id
if found is not None:
return found
return None
|
def function[least_loaded_node, parameter[self]]:
constant[Choose the node with fewest outstanding requests, with fallbacks.
This method will prefer a node with an existing connection and no
in-flight-requests. If no such node is found, a node will be chosen
randomly from disconnected nodes that are not "blacked out" (i.e.,
are not subject to a reconnect backoff). If no node metadata has been
obtained, will return a bootstrap node (subject to exponential backoff).
Returns:
node_id or None if no suitable node was found
]
variable[nodes] assign[=] <ast.ListComp object at 0x7da1b21f3790>
call[name[random].shuffle, parameter[name[nodes]]]
variable[inflight] assign[=] call[name[float], parameter[constant[inf]]]
variable[found] assign[=] constant[None]
for taget[name[node_id]] in starred[name[nodes]] begin[:]
variable[conn] assign[=] call[name[self]._conns.get, parameter[name[node_id]]]
variable[connected] assign[=] <ast.BoolOp object at 0x7da18eb57250>
variable[blacked_out] assign[=] <ast.BoolOp object at 0x7da18eb55e10>
variable[curr_inflight] assign[=] <ast.IfExp object at 0x7da18eb56ad0>
if <ast.BoolOp object at 0x7da18eb57100> begin[:]
return[name[node_id]]
if compare[name[found] is_not constant[None]] begin[:]
return[name[found]]
return[constant[None]]
|
keyword[def] identifier[least_loaded_node] ( identifier[self] ):
literal[string]
identifier[nodes] =[ identifier[broker] . identifier[nodeId] keyword[for] identifier[broker] keyword[in] identifier[self] . identifier[cluster] . identifier[brokers] ()]
identifier[random] . identifier[shuffle] ( identifier[nodes] )
identifier[inflight] = identifier[float] ( literal[string] )
identifier[found] = keyword[None]
keyword[for] identifier[node_id] keyword[in] identifier[nodes] :
identifier[conn] = identifier[self] . identifier[_conns] . identifier[get] ( identifier[node_id] )
identifier[connected] = identifier[conn] keyword[is] keyword[not] keyword[None] keyword[and] identifier[conn] . identifier[connected] ()
identifier[blacked_out] = identifier[conn] keyword[is] keyword[not] keyword[None] keyword[and] identifier[conn] . identifier[blacked_out] ()
identifier[curr_inflight] = identifier[len] ( identifier[conn] . identifier[in_flight_requests] ) keyword[if] identifier[conn] keyword[is] keyword[not] keyword[None] keyword[else] literal[int]
keyword[if] identifier[connected] keyword[and] identifier[curr_inflight] == literal[int] :
keyword[return] identifier[node_id]
keyword[elif] keyword[not] identifier[blacked_out] keyword[and] identifier[curr_inflight] < identifier[inflight] :
identifier[inflight] = identifier[curr_inflight]
identifier[found] = identifier[node_id]
keyword[if] identifier[found] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[found]
keyword[return] keyword[None]
|
def least_loaded_node(self):
"""Choose the node with fewest outstanding requests, with fallbacks.
This method will prefer a node with an existing connection and no
in-flight-requests. If no such node is found, a node will be chosen
randomly from disconnected nodes that are not "blacked out" (i.e.,
are not subject to a reconnect backoff). If no node metadata has been
obtained, will return a bootstrap node (subject to exponential backoff).
Returns:
node_id or None if no suitable node was found
"""
nodes = [broker.nodeId for broker in self.cluster.brokers()]
random.shuffle(nodes)
inflight = float('inf')
found = None
for node_id in nodes:
conn = self._conns.get(node_id)
connected = conn is not None and conn.connected()
blacked_out = conn is not None and conn.blacked_out()
curr_inflight = len(conn.in_flight_requests) if conn is not None else 0
if connected and curr_inflight == 0:
# if we find an established connection
# with no in-flight requests, we can stop right away
return node_id # depends on [control=['if'], data=[]]
elif not blacked_out and curr_inflight < inflight:
# otherwise if this is the best we have found so far, record that
inflight = curr_inflight
found = node_id # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node_id']]
if found is not None:
return found # depends on [control=['if'], data=['found']]
return None
|
def _all_help_methods(self):
""" Returns a list of all the Workbench commands"""
methods = {name:method for name, method in inspect.getmembers(self, predicate=inspect.isroutine) if not name.startswith('_')}
return methods
|
def function[_all_help_methods, parameter[self]]:
constant[ Returns a list of all the Workbench commands]
variable[methods] assign[=] <ast.DictComp object at 0x7da18dc07250>
return[name[methods]]
|
keyword[def] identifier[_all_help_methods] ( identifier[self] ):
literal[string]
identifier[methods] ={ identifier[name] : identifier[method] keyword[for] identifier[name] , identifier[method] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[self] , identifier[predicate] = identifier[inspect] . identifier[isroutine] ) keyword[if] keyword[not] identifier[name] . identifier[startswith] ( literal[string] )}
keyword[return] identifier[methods]
|
def _all_help_methods(self):
""" Returns a list of all the Workbench commands"""
methods = {name: method for (name, method) in inspect.getmembers(self, predicate=inspect.isroutine) if not name.startswith('_')}
return methods
|
def summary(self, variables=None, alpha=0.05, start=0, batches=100,
chain=None, roundto=3):
"""
Generate a pretty-printed summary of the model's variables.
:Parameters:
alpha : float
The alpha level for generating posterior intervals. Defaults to
0.05.
start : int
The starting index from which to summarize (each) chain. Defaults
to zero.
batches : int
Batch size for calculating standard deviation for non-independent
samples. Defaults to 100.
chain : int
The index for which chain to summarize. Defaults to None (all
chains).
roundto : int
The number of digits to round posterior statistics.
quantiles : tuple or list
The desired quantiles to be calculated. Defaults to (2.5, 25, 50, 75, 97.5).
"""
# If no names provided, run them all
if variables is None:
variables = self._variables_to_tally
else:
variables = [
self.__dict__[
i] for i in variables if self.__dict__[
i] in self._variables_to_tally]
# Loop over nodes
for variable in variables:
variable.summary(
alpha=alpha, start=start, batches=batches, chain=chain,
roundto=roundto)
|
def function[summary, parameter[self, variables, alpha, start, batches, chain, roundto]]:
constant[
Generate a pretty-printed summary of the model's variables.
:Parameters:
alpha : float
The alpha level for generating posterior intervals. Defaults to
0.05.
start : int
The starting index from which to summarize (each) chain. Defaults
to zero.
batches : int
Batch size for calculating standard deviation for non-independent
samples. Defaults to 100.
chain : int
The index for which chain to summarize. Defaults to None (all
chains).
roundto : int
The number of digits to round posterior statistics.
quantiles : tuple or list
The desired quantiles to be calculated. Defaults to (2.5, 25, 50, 75, 97.5).
]
if compare[name[variables] is constant[None]] begin[:]
variable[variables] assign[=] name[self]._variables_to_tally
for taget[name[variable]] in starred[name[variables]] begin[:]
call[name[variable].summary, parameter[]]
|
keyword[def] identifier[summary] ( identifier[self] , identifier[variables] = keyword[None] , identifier[alpha] = literal[int] , identifier[start] = literal[int] , identifier[batches] = literal[int] ,
identifier[chain] = keyword[None] , identifier[roundto] = literal[int] ):
literal[string]
keyword[if] identifier[variables] keyword[is] keyword[None] :
identifier[variables] = identifier[self] . identifier[_variables_to_tally]
keyword[else] :
identifier[variables] =[
identifier[self] . identifier[__dict__] [
identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[variables] keyword[if] identifier[self] . identifier[__dict__] [
identifier[i] ] keyword[in] identifier[self] . identifier[_variables_to_tally] ]
keyword[for] identifier[variable] keyword[in] identifier[variables] :
identifier[variable] . identifier[summary] (
identifier[alpha] = identifier[alpha] , identifier[start] = identifier[start] , identifier[batches] = identifier[batches] , identifier[chain] = identifier[chain] ,
identifier[roundto] = identifier[roundto] )
|
def summary(self, variables=None, alpha=0.05, start=0, batches=100, chain=None, roundto=3):
"""
Generate a pretty-printed summary of the model's variables.
:Parameters:
alpha : float
The alpha level for generating posterior intervals. Defaults to
0.05.
start : int
The starting index from which to summarize (each) chain. Defaults
to zero.
batches : int
Batch size for calculating standard deviation for non-independent
samples. Defaults to 100.
chain : int
The index for which chain to summarize. Defaults to None (all
chains).
roundto : int
The number of digits to round posterior statistics.
quantiles : tuple or list
The desired quantiles to be calculated. Defaults to (2.5, 25, 50, 75, 97.5).
"""
# If no names provided, run them all
if variables is None:
variables = self._variables_to_tally # depends on [control=['if'], data=['variables']]
else:
variables = [self.__dict__[i] for i in variables if self.__dict__[i] in self._variables_to_tally]
# Loop over nodes
for variable in variables:
variable.summary(alpha=alpha, start=start, batches=batches, chain=chain, roundto=roundto) # depends on [control=['for'], data=['variable']]
|
def get_selection_as_executable_code(self):
"""Return selected text as a processed text,
to be executable in a Python/IPython interpreter"""
ls = self.get_line_separator()
_indent = lambda line: len(line)-len(line.lstrip())
line_from, line_to = self.get_selection_bounds()
text = self.get_selected_text()
if not text:
return
lines = text.split(ls)
if len(lines) > 1:
# Multiline selection -> eventually fixing indentation
original_indent = _indent(self.get_text_line(line_from))
text = (" "*(original_indent-_indent(lines[0])))+text
# If there is a common indent to all lines, find it.
# Moving from bottom line to top line ensures that blank
# lines inherit the indent of the line *below* it,
# which is the desired behavior.
min_indent = 999
current_indent = 0
lines = text.split(ls)
for i in range(len(lines)-1, -1, -1):
line = lines[i]
if line.strip():
current_indent = _indent(line)
min_indent = min(current_indent, min_indent)
else:
lines[i] = ' ' * current_indent
if min_indent:
lines = [line[min_indent:] for line in lines]
# Remove any leading whitespace or comment lines
# since they confuse the reserved word detector that follows below
lines_removed = 0
while lines:
first_line = lines[0].lstrip()
if first_line == '' or first_line[0] == '#':
lines_removed += 1
lines.pop(0)
else:
break
# Add an EOL character after the last line of code so that it gets
# evaluated automatically by the console and any quote characters
# are separated from the triple quotes of runcell
lines.append(ls)
# Add removed lines back to have correct traceback line numbers
leading_lines_str = ls * lines_removed
return leading_lines_str + ls.join(lines)
|
def function[get_selection_as_executable_code, parameter[self]]:
constant[Return selected text as a processed text,
to be executable in a Python/IPython interpreter]
variable[ls] assign[=] call[name[self].get_line_separator, parameter[]]
variable[_indent] assign[=] <ast.Lambda object at 0x7da1b1f754b0>
<ast.Tuple object at 0x7da1b1f772e0> assign[=] call[name[self].get_selection_bounds, parameter[]]
variable[text] assign[=] call[name[self].get_selected_text, parameter[]]
if <ast.UnaryOp object at 0x7da1b1f76950> begin[:]
return[None]
variable[lines] assign[=] call[name[text].split, parameter[name[ls]]]
if compare[call[name[len], parameter[name[lines]]] greater[>] constant[1]] begin[:]
variable[original_indent] assign[=] call[name[_indent], parameter[call[name[self].get_text_line, parameter[name[line_from]]]]]
variable[text] assign[=] binary_operation[binary_operation[constant[ ] * binary_operation[name[original_indent] - call[name[_indent], parameter[call[name[lines]][constant[0]]]]]] + name[text]]
variable[min_indent] assign[=] constant[999]
variable[current_indent] assign[=] constant[0]
variable[lines] assign[=] call[name[text].split, parameter[name[ls]]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[lines]]] - constant[1]], <ast.UnaryOp object at 0x7da1b1f75cf0>, <ast.UnaryOp object at 0x7da1b1f77eb0>]]] begin[:]
variable[line] assign[=] call[name[lines]][name[i]]
if call[name[line].strip, parameter[]] begin[:]
variable[current_indent] assign[=] call[name[_indent], parameter[name[line]]]
variable[min_indent] assign[=] call[name[min], parameter[name[current_indent], name[min_indent]]]
if name[min_indent] begin[:]
variable[lines] assign[=] <ast.ListComp object at 0x7da1b1f76350>
variable[lines_removed] assign[=] constant[0]
while name[lines] begin[:]
variable[first_line] assign[=] call[call[name[lines]][constant[0]].lstrip, parameter[]]
if <ast.BoolOp object at 0x7da1b1f742e0> begin[:]
<ast.AugAssign object at 0x7da1b1f747f0>
call[name[lines].pop, parameter[constant[0]]]
call[name[lines].append, parameter[name[ls]]]
variable[leading_lines_str] assign[=] binary_operation[name[ls] * name[lines_removed]]
return[binary_operation[name[leading_lines_str] + call[name[ls].join, parameter[name[lines]]]]]
|
keyword[def] identifier[get_selection_as_executable_code] ( identifier[self] ):
literal[string]
identifier[ls] = identifier[self] . identifier[get_line_separator] ()
identifier[_indent] = keyword[lambda] identifier[line] : identifier[len] ( identifier[line] )- identifier[len] ( identifier[line] . identifier[lstrip] ())
identifier[line_from] , identifier[line_to] = identifier[self] . identifier[get_selection_bounds] ()
identifier[text] = identifier[self] . identifier[get_selected_text] ()
keyword[if] keyword[not] identifier[text] :
keyword[return]
identifier[lines] = identifier[text] . identifier[split] ( identifier[ls] )
keyword[if] identifier[len] ( identifier[lines] )> literal[int] :
identifier[original_indent] = identifier[_indent] ( identifier[self] . identifier[get_text_line] ( identifier[line_from] ))
identifier[text] =( literal[string] *( identifier[original_indent] - identifier[_indent] ( identifier[lines] [ literal[int] ])))+ identifier[text]
identifier[min_indent] = literal[int]
identifier[current_indent] = literal[int]
identifier[lines] = identifier[text] . identifier[split] ( identifier[ls] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[lines] )- literal[int] ,- literal[int] ,- literal[int] ):
identifier[line] = identifier[lines] [ identifier[i] ]
keyword[if] identifier[line] . identifier[strip] ():
identifier[current_indent] = identifier[_indent] ( identifier[line] )
identifier[min_indent] = identifier[min] ( identifier[current_indent] , identifier[min_indent] )
keyword[else] :
identifier[lines] [ identifier[i] ]= literal[string] * identifier[current_indent]
keyword[if] identifier[min_indent] :
identifier[lines] =[ identifier[line] [ identifier[min_indent] :] keyword[for] identifier[line] keyword[in] identifier[lines] ]
identifier[lines_removed] = literal[int]
keyword[while] identifier[lines] :
identifier[first_line] = identifier[lines] [ literal[int] ]. identifier[lstrip] ()
keyword[if] identifier[first_line] == literal[string] keyword[or] identifier[first_line] [ literal[int] ]== literal[string] :
identifier[lines_removed] += literal[int]
identifier[lines] . identifier[pop] ( literal[int] )
keyword[else] :
keyword[break]
identifier[lines] . identifier[append] ( identifier[ls] )
identifier[leading_lines_str] = identifier[ls] * identifier[lines_removed]
keyword[return] identifier[leading_lines_str] + identifier[ls] . identifier[join] ( identifier[lines] )
|
def get_selection_as_executable_code(self):
"""Return selected text as a processed text,
to be executable in a Python/IPython interpreter"""
ls = self.get_line_separator()
_indent = lambda line: len(line) - len(line.lstrip())
(line_from, line_to) = self.get_selection_bounds()
text = self.get_selected_text()
if not text:
return # depends on [control=['if'], data=[]]
lines = text.split(ls)
if len(lines) > 1: # Multiline selection -> eventually fixing indentation
original_indent = _indent(self.get_text_line(line_from))
text = ' ' * (original_indent - _indent(lines[0])) + text # depends on [control=['if'], data=[]] # If there is a common indent to all lines, find it.
# Moving from bottom line to top line ensures that blank
# lines inherit the indent of the line *below* it,
# which is the desired behavior.
min_indent = 999
current_indent = 0
lines = text.split(ls)
for i in range(len(lines) - 1, -1, -1):
line = lines[i]
if line.strip():
current_indent = _indent(line)
min_indent = min(current_indent, min_indent) # depends on [control=['if'], data=[]]
else:
lines[i] = ' ' * current_indent # depends on [control=['for'], data=['i']]
if min_indent:
lines = [line[min_indent:] for line in lines] # depends on [control=['if'], data=[]] # Remove any leading whitespace or comment lines
# since they confuse the reserved word detector that follows below
lines_removed = 0
while lines:
first_line = lines[0].lstrip()
if first_line == '' or first_line[0] == '#':
lines_removed += 1
lines.pop(0) # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]] # Add an EOL character after the last line of code so that it gets
# evaluated automatically by the console and any quote characters
# are separated from the triple quotes of runcell
lines.append(ls) # Add removed lines back to have correct traceback line numbers
leading_lines_str = ls * lines_removed
return leading_lines_str + ls.join(lines)
|
def update(self):
'''
Updates LaserData.
'''
if self.hasproxy():
laserD = LaserData()
values = []
data = self.proxy.getLaserData()
#laserD.values = laser.distanceData
for i in range (data.numLaser):
values.append(data.distanceData[i] / 1000.0)
laserD.maxAngle = data.maxAngle
laserD.minAngle = data.minAngle
laserD.maxRange = data.maxRange
laserD.minRange = data.minRange
laserD.values = values
self.lock.acquire()
self.laser = laserD
self.lock.release()
|
def function[update, parameter[self]]:
constant[
Updates LaserData.
]
if call[name[self].hasproxy, parameter[]] begin[:]
variable[laserD] assign[=] call[name[LaserData], parameter[]]
variable[values] assign[=] list[[]]
variable[data] assign[=] call[name[self].proxy.getLaserData, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[name[data].numLaser]]] begin[:]
call[name[values].append, parameter[binary_operation[call[name[data].distanceData][name[i]] / constant[1000.0]]]]
name[laserD].maxAngle assign[=] name[data].maxAngle
name[laserD].minAngle assign[=] name[data].minAngle
name[laserD].maxRange assign[=] name[data].maxRange
name[laserD].minRange assign[=] name[data].minRange
name[laserD].values assign[=] name[values]
call[name[self].lock.acquire, parameter[]]
name[self].laser assign[=] name[laserD]
call[name[self].lock.release, parameter[]]
|
keyword[def] identifier[update] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[hasproxy] ():
identifier[laserD] = identifier[LaserData] ()
identifier[values] =[]
identifier[data] = identifier[self] . identifier[proxy] . identifier[getLaserData] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[data] . identifier[numLaser] ):
identifier[values] . identifier[append] ( identifier[data] . identifier[distanceData] [ identifier[i] ]/ literal[int] )
identifier[laserD] . identifier[maxAngle] = identifier[data] . identifier[maxAngle]
identifier[laserD] . identifier[minAngle] = identifier[data] . identifier[minAngle]
identifier[laserD] . identifier[maxRange] = identifier[data] . identifier[maxRange]
identifier[laserD] . identifier[minRange] = identifier[data] . identifier[minRange]
identifier[laserD] . identifier[values] = identifier[values]
identifier[self] . identifier[lock] . identifier[acquire] ()
identifier[self] . identifier[laser] = identifier[laserD]
identifier[self] . identifier[lock] . identifier[release] ()
|
def update(self):
"""
Updates LaserData.
"""
if self.hasproxy():
laserD = LaserData()
values = []
data = self.proxy.getLaserData()
#laserD.values = laser.distanceData
for i in range(data.numLaser):
values.append(data.distanceData[i] / 1000.0) # depends on [control=['for'], data=['i']]
laserD.maxAngle = data.maxAngle
laserD.minAngle = data.minAngle
laserD.maxRange = data.maxRange
laserD.minRange = data.minRange
laserD.values = values
self.lock.acquire()
self.laser = laserD
self.lock.release() # depends on [control=['if'], data=[]]
|
def patch_namespaced_horizontal_pod_autoscaler(self, name, namespace, body, **kwargs):
"""
partially update the specified HorizontalPodAutoscaler
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_horizontal_pod_autoscaler(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the HorizontalPodAutoscaler (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V2beta1HorizontalPodAutoscaler
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_horizontal_pod_autoscaler_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_horizontal_pod_autoscaler_with_http_info(name, namespace, body, **kwargs)
return data
|
def function[patch_namespaced_horizontal_pod_autoscaler, parameter[self, name, namespace, body]]:
constant[
partially update the specified HorizontalPodAutoscaler
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_horizontal_pod_autoscaler(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the HorizontalPodAutoscaler (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to "force" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V2beta1HorizontalPodAutoscaler
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].patch_namespaced_horizontal_pod_autoscaler_with_http_info, parameter[name[name], name[namespace], name[body]]]]
|
keyword[def] identifier[patch_namespaced_horizontal_pod_autoscaler] ( identifier[self] , identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[patch_namespaced_horizontal_pod_autoscaler_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[patch_namespaced_horizontal_pod_autoscaler_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def patch_namespaced_horizontal_pod_autoscaler(self, name, namespace, body, **kwargs):
"""
partially update the specified HorizontalPodAutoscaler
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_horizontal_pod_autoscaler(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the HorizontalPodAutoscaler (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to "force" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V2beta1HorizontalPodAutoscaler
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_horizontal_pod_autoscaler_with_http_info(name, namespace, body, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.patch_namespaced_horizontal_pod_autoscaler_with_http_info(name, namespace, body, **kwargs)
return data
|
def get_definition_revision(self, project, definition_id, revision, **kwargs):
"""GetDefinitionRevision.
[Preview API] Get release definition for a given definitionId and revision
:param str project: Project ID or project name
:param int definition_id: Id of the definition.
:param int revision: Id of the revision.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
if revision is not None:
route_values['revision'] = self._serialize.url('revision', revision, 'int')
response = self._send(http_method='GET',
location_id='258b82e0-9d41-43f3-86d6-fef14ddd44bc',
version='5.1-preview.1',
route_values=route_values,
accept_media_type='text/plain')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
|
def function[get_definition_revision, parameter[self, project, definition_id, revision]]:
constant[GetDefinitionRevision.
[Preview API] Get release definition for a given definitionId and revision
:param str project: Project ID or project name
:param int definition_id: Id of the definition.
:param int revision: Id of the revision.
:rtype: object
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[str]]]
if compare[name[definition_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[definitionId]] assign[=] call[name[self]._serialize.url, parameter[constant[definition_id], name[definition_id], constant[int]]]
if compare[name[revision] is_not constant[None]] begin[:]
call[name[route_values]][constant[revision]] assign[=] call[name[self]._serialize.url, parameter[constant[revision], name[revision], constant[int]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
if compare[constant[callback] in name[kwargs]] begin[:]
variable[callback] assign[=] call[name[kwargs]][constant[callback]]
return[call[name[self]._client.stream_download, parameter[name[response]]]]
|
keyword[def] identifier[get_definition_revision] ( identifier[self] , identifier[project] , identifier[definition_id] , identifier[revision] ,** identifier[kwargs] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
keyword[if] identifier[definition_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[definition_id] , literal[string] )
keyword[if] identifier[revision] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[revision] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[accept_media_type] = literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[callback] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[callback] = keyword[None]
keyword[return] identifier[self] . identifier[_client] . identifier[stream_download] ( identifier[response] , identifier[callback] = identifier[callback] )
|
def get_definition_revision(self, project, definition_id, revision, **kwargs):
"""GetDefinitionRevision.
[Preview API] Get release definition for a given definitionId and revision
:param str project: Project ID or project name
:param int definition_id: Id of the definition.
:param int revision: Id of the revision.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str') # depends on [control=['if'], data=['project']]
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int') # depends on [control=['if'], data=['definition_id']]
if revision is not None:
route_values['revision'] = self._serialize.url('revision', revision, 'int') # depends on [control=['if'], data=['revision']]
response = self._send(http_method='GET', location_id='258b82e0-9d41-43f3-86d6-fef14ddd44bc', version='5.1-preview.1', route_values=route_values, accept_media_type='text/plain')
if 'callback' in kwargs:
callback = kwargs['callback'] # depends on [control=['if'], data=['kwargs']]
else:
callback = None
return self._client.stream_download(response, callback=callback)
|
def crypto_sign_ed25519ph_update(edph, pmsg):
"""
Update the hash state wrapped in edph
:param edph: the ed25519ph state being updated
:type edph: crypto_sign_ed25519ph_state
:param pmsg: the partial message
:type pmsg: bytes
:rtype: None
"""
ensure(isinstance(edph, crypto_sign_ed25519ph_state),
'edph parameter must be a ed25519ph_state object',
raising=exc.TypeError)
ensure(isinstance(pmsg, bytes),
'pmsg parameter must be a bytes object',
raising=exc.TypeError)
rc = lib.crypto_sign_ed25519ph_update(edph.state,
pmsg,
len(pmsg))
ensure(rc == 0,
'Unexpected library error',
raising=exc.RuntimeError)
|
def function[crypto_sign_ed25519ph_update, parameter[edph, pmsg]]:
constant[
Update the hash state wrapped in edph
:param edph: the ed25519ph state being updated
:type edph: crypto_sign_ed25519ph_state
:param pmsg: the partial message
:type pmsg: bytes
:rtype: None
]
call[name[ensure], parameter[call[name[isinstance], parameter[name[edph], name[crypto_sign_ed25519ph_state]]], constant[edph parameter must be a ed25519ph_state object]]]
call[name[ensure], parameter[call[name[isinstance], parameter[name[pmsg], name[bytes]]], constant[pmsg parameter must be a bytes object]]]
variable[rc] assign[=] call[name[lib].crypto_sign_ed25519ph_update, parameter[name[edph].state, name[pmsg], call[name[len], parameter[name[pmsg]]]]]
call[name[ensure], parameter[compare[name[rc] equal[==] constant[0]], constant[Unexpected library error]]]
|
keyword[def] identifier[crypto_sign_ed25519ph_update] ( identifier[edph] , identifier[pmsg] ):
literal[string]
identifier[ensure] ( identifier[isinstance] ( identifier[edph] , identifier[crypto_sign_ed25519ph_state] ),
literal[string] ,
identifier[raising] = identifier[exc] . identifier[TypeError] )
identifier[ensure] ( identifier[isinstance] ( identifier[pmsg] , identifier[bytes] ),
literal[string] ,
identifier[raising] = identifier[exc] . identifier[TypeError] )
identifier[rc] = identifier[lib] . identifier[crypto_sign_ed25519ph_update] ( identifier[edph] . identifier[state] ,
identifier[pmsg] ,
identifier[len] ( identifier[pmsg] ))
identifier[ensure] ( identifier[rc] == literal[int] ,
literal[string] ,
identifier[raising] = identifier[exc] . identifier[RuntimeError] )
|
def crypto_sign_ed25519ph_update(edph, pmsg):
"""
Update the hash state wrapped in edph
:param edph: the ed25519ph state being updated
:type edph: crypto_sign_ed25519ph_state
:param pmsg: the partial message
:type pmsg: bytes
:rtype: None
"""
ensure(isinstance(edph, crypto_sign_ed25519ph_state), 'edph parameter must be a ed25519ph_state object', raising=exc.TypeError)
ensure(isinstance(pmsg, bytes), 'pmsg parameter must be a bytes object', raising=exc.TypeError)
rc = lib.crypto_sign_ed25519ph_update(edph.state, pmsg, len(pmsg))
ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError)
|
def methodReturnReceived(self, mret):
"""
Called when a method return message is received
"""
d, timeout = self._pendingCalls.get(mret.reply_serial, (None, None))
if timeout:
timeout.cancel()
if d:
del self._pendingCalls[mret.reply_serial]
d.callback(mret)
|
def function[methodReturnReceived, parameter[self, mret]]:
constant[
Called when a method return message is received
]
<ast.Tuple object at 0x7da1b06df220> assign[=] call[name[self]._pendingCalls.get, parameter[name[mret].reply_serial, tuple[[<ast.Constant object at 0x7da1b06dc280>, <ast.Constant object at 0x7da1b06dd780>]]]]
if name[timeout] begin[:]
call[name[timeout].cancel, parameter[]]
if name[d] begin[:]
<ast.Delete object at 0x7da1b06dc730>
call[name[d].callback, parameter[name[mret]]]
|
keyword[def] identifier[methodReturnReceived] ( identifier[self] , identifier[mret] ):
literal[string]
identifier[d] , identifier[timeout] = identifier[self] . identifier[_pendingCalls] . identifier[get] ( identifier[mret] . identifier[reply_serial] ,( keyword[None] , keyword[None] ))
keyword[if] identifier[timeout] :
identifier[timeout] . identifier[cancel] ()
keyword[if] identifier[d] :
keyword[del] identifier[self] . identifier[_pendingCalls] [ identifier[mret] . identifier[reply_serial] ]
identifier[d] . identifier[callback] ( identifier[mret] )
|
def methodReturnReceived(self, mret):
"""
Called when a method return message is received
"""
(d, timeout) = self._pendingCalls.get(mret.reply_serial, (None, None))
if timeout:
timeout.cancel() # depends on [control=['if'], data=[]]
if d:
del self._pendingCalls[mret.reply_serial]
d.callback(mret) # depends on [control=['if'], data=[]]
|
def _parse_ethtool_opts(opts, iface):
'''
Filters given options and outputs valid settings for ETHTOOLS_OPTS
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
config = {}
if 'autoneg' in opts:
if opts['autoneg'] in _CONFIG_TRUE:
config.update({'autoneg': 'on'})
elif opts['autoneg'] in _CONFIG_FALSE:
config.update({'autoneg': 'off'})
else:
_raise_error_iface(iface, 'autoneg', _CONFIG_TRUE + _CONFIG_FALSE)
if 'duplex' in opts:
valid = ['full', 'half']
if opts['duplex'] in valid:
config.update({'duplex': opts['duplex']})
else:
_raise_error_iface(iface, 'duplex', valid)
if 'speed' in opts:
valid = ['10', '100', '1000', '10000']
if six.text_type(opts['speed']) in valid:
config.update({'speed': opts['speed']})
else:
_raise_error_iface(iface, opts['speed'], valid)
valid = _CONFIG_TRUE + _CONFIG_FALSE
for option in ('rx', 'tx', 'sg', 'tso', 'ufo', 'gso', 'gro', 'lro'):
if option in opts:
if opts[option] in _CONFIG_TRUE:
config.update({option: 'on'})
elif opts[option] in _CONFIG_FALSE:
config.update({option: 'off'})
else:
_raise_error_iface(iface, option, valid)
return config
|
def function[_parse_ethtool_opts, parameter[opts, iface]]:
constant[
Filters given options and outputs valid settings for ETHTOOLS_OPTS
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
]
variable[config] assign[=] dictionary[[], []]
if compare[constant[autoneg] in name[opts]] begin[:]
if compare[call[name[opts]][constant[autoneg]] in name[_CONFIG_TRUE]] begin[:]
call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1fe5480>], [<ast.Constant object at 0x7da1b1fe5e70>]]]]
if compare[constant[duplex] in name[opts]] begin[:]
variable[valid] assign[=] list[[<ast.Constant object at 0x7da1b1fe6590>, <ast.Constant object at 0x7da1b1fe65f0>]]
if compare[call[name[opts]][constant[duplex]] in name[valid]] begin[:]
call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1fe4790>], [<ast.Subscript object at 0x7da1b1fe47c0>]]]]
if compare[constant[speed] in name[opts]] begin[:]
variable[valid] assign[=] list[[<ast.Constant object at 0x7da1b1fe5ea0>, <ast.Constant object at 0x7da1b1fe5db0>, <ast.Constant object at 0x7da1b1fe7c70>, <ast.Constant object at 0x7da1b1fe7eb0>]]
if compare[call[name[six].text_type, parameter[call[name[opts]][constant[speed]]]] in name[valid]] begin[:]
call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da1b20212d0>], [<ast.Subscript object at 0x7da1b20206a0>]]]]
variable[valid] assign[=] binary_operation[name[_CONFIG_TRUE] + name[_CONFIG_FALSE]]
for taget[name[option]] in starred[tuple[[<ast.Constant object at 0x7da1b2022980>, <ast.Constant object at 0x7da1b2023040>, <ast.Constant object at 0x7da1b2022b30>, <ast.Constant object at 0x7da1b2023280>, <ast.Constant object at 0x7da1b20205e0>, <ast.Constant object at 0x7da1b20203d0>, <ast.Constant object at 0x7da1b2022a70>, <ast.Constant object at 0x7da1b2023ee0>]]] begin[:]
if compare[name[option] in name[opts]] begin[:]
if compare[call[name[opts]][name[option]] in name[_CONFIG_TRUE]] begin[:]
call[name[config].update, parameter[dictionary[[<ast.Name object at 0x7da1b2023af0>], [<ast.Constant object at 0x7da1b20205b0>]]]]
return[name[config]]
|
keyword[def] identifier[_parse_ethtool_opts] ( identifier[opts] , identifier[iface] ):
literal[string]
identifier[config] ={}
keyword[if] literal[string] keyword[in] identifier[opts] :
keyword[if] identifier[opts] [ literal[string] ] keyword[in] identifier[_CONFIG_TRUE] :
identifier[config] . identifier[update] ({ literal[string] : literal[string] })
keyword[elif] identifier[opts] [ literal[string] ] keyword[in] identifier[_CONFIG_FALSE] :
identifier[config] . identifier[update] ({ literal[string] : literal[string] })
keyword[else] :
identifier[_raise_error_iface] ( identifier[iface] , literal[string] , identifier[_CONFIG_TRUE] + identifier[_CONFIG_FALSE] )
keyword[if] literal[string] keyword[in] identifier[opts] :
identifier[valid] =[ literal[string] , literal[string] ]
keyword[if] identifier[opts] [ literal[string] ] keyword[in] identifier[valid] :
identifier[config] . identifier[update] ({ literal[string] : identifier[opts] [ literal[string] ]})
keyword[else] :
identifier[_raise_error_iface] ( identifier[iface] , literal[string] , identifier[valid] )
keyword[if] literal[string] keyword[in] identifier[opts] :
identifier[valid] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[six] . identifier[text_type] ( identifier[opts] [ literal[string] ]) keyword[in] identifier[valid] :
identifier[config] . identifier[update] ({ literal[string] : identifier[opts] [ literal[string] ]})
keyword[else] :
identifier[_raise_error_iface] ( identifier[iface] , identifier[opts] [ literal[string] ], identifier[valid] )
identifier[valid] = identifier[_CONFIG_TRUE] + identifier[_CONFIG_FALSE]
keyword[for] identifier[option] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[option] keyword[in] identifier[opts] :
keyword[if] identifier[opts] [ identifier[option] ] keyword[in] identifier[_CONFIG_TRUE] :
identifier[config] . identifier[update] ({ identifier[option] : literal[string] })
keyword[elif] identifier[opts] [ identifier[option] ] keyword[in] identifier[_CONFIG_FALSE] :
identifier[config] . identifier[update] ({ identifier[option] : literal[string] })
keyword[else] :
identifier[_raise_error_iface] ( identifier[iface] , identifier[option] , identifier[valid] )
keyword[return] identifier[config]
|
def _parse_ethtool_opts(opts, iface):
"""
Filters given options and outputs valid settings for ETHTOOLS_OPTS
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
"""
config = {}
if 'autoneg' in opts:
if opts['autoneg'] in _CONFIG_TRUE:
config.update({'autoneg': 'on'}) # depends on [control=['if'], data=[]]
elif opts['autoneg'] in _CONFIG_FALSE:
config.update({'autoneg': 'off'}) # depends on [control=['if'], data=[]]
else:
_raise_error_iface(iface, 'autoneg', _CONFIG_TRUE + _CONFIG_FALSE) # depends on [control=['if'], data=['opts']]
if 'duplex' in opts:
valid = ['full', 'half']
if opts['duplex'] in valid:
config.update({'duplex': opts['duplex']}) # depends on [control=['if'], data=[]]
else:
_raise_error_iface(iface, 'duplex', valid) # depends on [control=['if'], data=['opts']]
if 'speed' in opts:
valid = ['10', '100', '1000', '10000']
if six.text_type(opts['speed']) in valid:
config.update({'speed': opts['speed']}) # depends on [control=['if'], data=[]]
else:
_raise_error_iface(iface, opts['speed'], valid) # depends on [control=['if'], data=['opts']]
valid = _CONFIG_TRUE + _CONFIG_FALSE
for option in ('rx', 'tx', 'sg', 'tso', 'ufo', 'gso', 'gro', 'lro'):
if option in opts:
if opts[option] in _CONFIG_TRUE:
config.update({option: 'on'}) # depends on [control=['if'], data=[]]
elif opts[option] in _CONFIG_FALSE:
config.update({option: 'off'}) # depends on [control=['if'], data=[]]
else:
_raise_error_iface(iface, option, valid) # depends on [control=['if'], data=['option', 'opts']] # depends on [control=['for'], data=['option']]
return config
|
def load_environment_vars(self):
"""
Looks for any MACH9_ prefixed environment variables and applies
them to the configuration if present.
"""
for k, v in os.environ.items():
if k.startswith(MACH9_PREFIX):
_, config_key = k.split(MACH9_PREFIX, 1)
self[config_key] = v
|
def function[load_environment_vars, parameter[self]]:
constant[
Looks for any MACH9_ prefixed environment variables and applies
them to the configuration if present.
]
for taget[tuple[[<ast.Name object at 0x7da1b27e3b50>, <ast.Name object at 0x7da1b27e0ca0>]]] in starred[call[name[os].environ.items, parameter[]]] begin[:]
if call[name[k].startswith, parameter[name[MACH9_PREFIX]]] begin[:]
<ast.Tuple object at 0x7da1b27e0b20> assign[=] call[name[k].split, parameter[name[MACH9_PREFIX], constant[1]]]
call[name[self]][name[config_key]] assign[=] name[v]
|
keyword[def] identifier[load_environment_vars] ( identifier[self] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[os] . identifier[environ] . identifier[items] ():
keyword[if] identifier[k] . identifier[startswith] ( identifier[MACH9_PREFIX] ):
identifier[_] , identifier[config_key] = identifier[k] . identifier[split] ( identifier[MACH9_PREFIX] , literal[int] )
identifier[self] [ identifier[config_key] ]= identifier[v]
|
def load_environment_vars(self):
"""
Looks for any MACH9_ prefixed environment variables and applies
them to the configuration if present.
"""
for (k, v) in os.environ.items():
if k.startswith(MACH9_PREFIX):
(_, config_key) = k.split(MACH9_PREFIX, 1)
self[config_key] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def enable_torque(self, ids):
""" Enables torque of the motors with the specified ids. """
self._set_torque_enable(dict(zip(ids, itertools.repeat(True))))
|
def function[enable_torque, parameter[self, ids]]:
constant[ Enables torque of the motors with the specified ids. ]
call[name[self]._set_torque_enable, parameter[call[name[dict], parameter[call[name[zip], parameter[name[ids], call[name[itertools].repeat, parameter[constant[True]]]]]]]]]
|
keyword[def] identifier[enable_torque] ( identifier[self] , identifier[ids] ):
literal[string]
identifier[self] . identifier[_set_torque_enable] ( identifier[dict] ( identifier[zip] ( identifier[ids] , identifier[itertools] . identifier[repeat] ( keyword[True] ))))
|
def enable_torque(self, ids):
""" Enables torque of the motors with the specified ids. """
self._set_torque_enable(dict(zip(ids, itertools.repeat(True))))
|
def run(self):
""" Called by the process, it runs it.
NEVER call this method directly. Instead call start() to start the separate process.
If you don't want to use a second process, then call fetch() directly on this istance.
To stop, call terminate()
"""
if not self._queue:
raise Exception("No queue available to send messages")
factory = LiveStreamFactory(self)
self._reactor.connectSSL("streaming.campfirenow.com", 443, factory, ssl.ClientContextFactory())
self._reactor.run()
|
def function[run, parameter[self]]:
constant[ Called by the process, it runs it.
NEVER call this method directly. Instead call start() to start the separate process.
If you don't want to use a second process, then call fetch() directly on this istance.
To stop, call terminate()
]
if <ast.UnaryOp object at 0x7da1afe7a230> begin[:]
<ast.Raise object at 0x7da1afe7bc70>
variable[factory] assign[=] call[name[LiveStreamFactory], parameter[name[self]]]
call[name[self]._reactor.connectSSL, parameter[constant[streaming.campfirenow.com], constant[443], name[factory], call[name[ssl].ClientContextFactory, parameter[]]]]
call[name[self]._reactor.run, parameter[]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_queue] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[factory] = identifier[LiveStreamFactory] ( identifier[self] )
identifier[self] . identifier[_reactor] . identifier[connectSSL] ( literal[string] , literal[int] , identifier[factory] , identifier[ssl] . identifier[ClientContextFactory] ())
identifier[self] . identifier[_reactor] . identifier[run] ()
|
def run(self):
""" Called by the process, it runs it.
NEVER call this method directly. Instead call start() to start the separate process.
If you don't want to use a second process, then call fetch() directly on this istance.
To stop, call terminate()
"""
if not self._queue:
raise Exception('No queue available to send messages') # depends on [control=['if'], data=[]]
factory = LiveStreamFactory(self)
self._reactor.connectSSL('streaming.campfirenow.com', 443, factory, ssl.ClientContextFactory())
self._reactor.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.