code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def block_signals(self):
"""Prevent the combos and dock listening for event changes."""
self.disconnect_layer_listener()
self.aggregation_layer_combo.blockSignals(True)
self.exposure_layer_combo.blockSignals(True)
self.hazard_layer_combo.blockSignals(True) | def function[block_signals, parameter[self]]:
constant[Prevent the combos and dock listening for event changes.]
call[name[self].disconnect_layer_listener, parameter[]]
call[name[self].aggregation_layer_combo.blockSignals, parameter[constant[True]]]
call[name[self].exposure_layer_combo.blockSignals, parameter[constant[True]]]
call[name[self].hazard_layer_combo.blockSignals, parameter[constant[True]]] | keyword[def] identifier[block_signals] ( identifier[self] ):
literal[string]
identifier[self] . identifier[disconnect_layer_listener] ()
identifier[self] . identifier[aggregation_layer_combo] . identifier[blockSignals] ( keyword[True] )
identifier[self] . identifier[exposure_layer_combo] . identifier[blockSignals] ( keyword[True] )
identifier[self] . identifier[hazard_layer_combo] . identifier[blockSignals] ( keyword[True] ) | def block_signals(self):
"""Prevent the combos and dock listening for event changes."""
self.disconnect_layer_listener()
self.aggregation_layer_combo.blockSignals(True)
self.exposure_layer_combo.blockSignals(True)
self.hazard_layer_combo.blockSignals(True) |
def start(self):
""" Start the run method as a new thread.
It will first stop the thread if it is already running.
"""
if self.running:
self.stop()
self._thread = threading.Thread(target=self._wrapped_target)
self._thread.daemon = True
self._thread.start() | def function[start, parameter[self]]:
constant[ Start the run method as a new thread.
It will first stop the thread if it is already running.
]
if name[self].running begin[:]
call[name[self].stop, parameter[]]
name[self]._thread assign[=] call[name[threading].Thread, parameter[]]
name[self]._thread.daemon assign[=] constant[True]
call[name[self]._thread.start, parameter[]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[running] :
identifier[self] . identifier[stop] ()
identifier[self] . identifier[_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[_wrapped_target] )
identifier[self] . identifier[_thread] . identifier[daemon] = keyword[True]
identifier[self] . identifier[_thread] . identifier[start] () | def start(self):
""" Start the run method as a new thread.
It will first stop the thread if it is already running.
"""
if self.running:
self.stop() # depends on [control=['if'], data=[]]
self._thread = threading.Thread(target=self._wrapped_target)
self._thread.daemon = True
self._thread.start() |
def strings(self):
"""
Write strings sheet.
"""
sheet = self.result.add_sheet("strings")
self.header(sheet, "strings")
n_row = 1 # row number
for entry in self.po:
row = sheet.row(n_row)
row.write(0, entry.msgid)
row.write(1, entry.msgstr)
n_row += 1
sheet.flush_row_data() | def function[strings, parameter[self]]:
constant[
Write strings sheet.
]
variable[sheet] assign[=] call[name[self].result.add_sheet, parameter[constant[strings]]]
call[name[self].header, parameter[name[sheet], constant[strings]]]
variable[n_row] assign[=] constant[1]
for taget[name[entry]] in starred[name[self].po] begin[:]
variable[row] assign[=] call[name[sheet].row, parameter[name[n_row]]]
call[name[row].write, parameter[constant[0], name[entry].msgid]]
call[name[row].write, parameter[constant[1], name[entry].msgstr]]
<ast.AugAssign object at 0x7da207f9bdc0>
call[name[sheet].flush_row_data, parameter[]] | keyword[def] identifier[strings] ( identifier[self] ):
literal[string]
identifier[sheet] = identifier[self] . identifier[result] . identifier[add_sheet] ( literal[string] )
identifier[self] . identifier[header] ( identifier[sheet] , literal[string] )
identifier[n_row] = literal[int]
keyword[for] identifier[entry] keyword[in] identifier[self] . identifier[po] :
identifier[row] = identifier[sheet] . identifier[row] ( identifier[n_row] )
identifier[row] . identifier[write] ( literal[int] , identifier[entry] . identifier[msgid] )
identifier[row] . identifier[write] ( literal[int] , identifier[entry] . identifier[msgstr] )
identifier[n_row] += literal[int]
identifier[sheet] . identifier[flush_row_data] () | def strings(self):
"""
Write strings sheet.
"""
sheet = self.result.add_sheet('strings')
self.header(sheet, 'strings')
n_row = 1 # row number
for entry in self.po:
row = sheet.row(n_row)
row.write(0, entry.msgid)
row.write(1, entry.msgstr)
n_row += 1
sheet.flush_row_data() # depends on [control=['for'], data=['entry']] |
def init_sentry(self,):
""" Initializes sentry.io error logging for this session """
if not self.use_sentry:
return
sentry_config = self.keychain.get_service("sentry")
tags = {
"repo": self.repo_name,
"branch": self.repo_branch,
"commit": self.repo_commit,
"cci version": cumulusci.__version__,
}
tags.update(self.config.get("sentry_tags", {}))
env = self.config.get("sentry_environment", "CumulusCI CLI")
self.sentry = raven.Client(
dsn=sentry_config.dsn,
environment=env,
tags=tags,
processors=("raven.processors.SanitizePasswordsProcessor",),
) | def function[init_sentry, parameter[self]]:
constant[ Initializes sentry.io error logging for this session ]
if <ast.UnaryOp object at 0x7da1b1663dc0> begin[:]
return[None]
variable[sentry_config] assign[=] call[name[self].keychain.get_service, parameter[constant[sentry]]]
variable[tags] assign[=] dictionary[[<ast.Constant object at 0x7da1b1507c40>, <ast.Constant object at 0x7da1b1507b50>, <ast.Constant object at 0x7da1b1507c70>, <ast.Constant object at 0x7da1b1507be0>], [<ast.Attribute object at 0x7da1b1507790>, <ast.Attribute object at 0x7da1b15073d0>, <ast.Attribute object at 0x7da1b1505180>, <ast.Attribute object at 0x7da1b15052d0>]]
call[name[tags].update, parameter[call[name[self].config.get, parameter[constant[sentry_tags], dictionary[[], []]]]]]
variable[env] assign[=] call[name[self].config.get, parameter[constant[sentry_environment], constant[CumulusCI CLI]]]
name[self].sentry assign[=] call[name[raven].Client, parameter[]] | keyword[def] identifier[init_sentry] ( identifier[self] ,):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[use_sentry] :
keyword[return]
identifier[sentry_config] = identifier[self] . identifier[keychain] . identifier[get_service] ( literal[string] )
identifier[tags] ={
literal[string] : identifier[self] . identifier[repo_name] ,
literal[string] : identifier[self] . identifier[repo_branch] ,
literal[string] : identifier[self] . identifier[repo_commit] ,
literal[string] : identifier[cumulusci] . identifier[__version__] ,
}
identifier[tags] . identifier[update] ( identifier[self] . identifier[config] . identifier[get] ( literal[string] ,{}))
identifier[env] = identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[self] . identifier[sentry] = identifier[raven] . identifier[Client] (
identifier[dsn] = identifier[sentry_config] . identifier[dsn] ,
identifier[environment] = identifier[env] ,
identifier[tags] = identifier[tags] ,
identifier[processors] =( literal[string] ,),
) | def init_sentry(self):
""" Initializes sentry.io error logging for this session """
if not self.use_sentry:
return # depends on [control=['if'], data=[]]
sentry_config = self.keychain.get_service('sentry')
tags = {'repo': self.repo_name, 'branch': self.repo_branch, 'commit': self.repo_commit, 'cci version': cumulusci.__version__}
tags.update(self.config.get('sentry_tags', {}))
env = self.config.get('sentry_environment', 'CumulusCI CLI')
self.sentry = raven.Client(dsn=sentry_config.dsn, environment=env, tags=tags, processors=('raven.processors.SanitizePasswordsProcessor',)) |
def isolate_to_image(src_container, src_resources, dst_image, **kwargs):
"""
Uses :func:`copy_resources` to copy resources from a container, but afterwards imports the contents into a new
(otherwise empty) Docker image.
:param src_container: Container name or id.
:type src_container: unicode
:param src_resources: Resources, as (file or directory) names to copy.
:type src_resources: iterable
:param dst_image: Tag for the new image.
:type dst_image: unicode
:param kwargs: Additional kwargs for :func:`copy_resources`.
"""
with temp_dir() as remote_tmp:
copy_resources(src_container, src_resources, remote_tmp, **kwargs)
with cd(remote_tmp):
sudo('tar -cz * | docker import - {0}'.format(dst_image)) | def function[isolate_to_image, parameter[src_container, src_resources, dst_image]]:
constant[
Uses :func:`copy_resources` to copy resources from a container, but afterwards imports the contents into a new
(otherwise empty) Docker image.
:param src_container: Container name or id.
:type src_container: unicode
:param src_resources: Resources, as (file or directory) names to copy.
:type src_resources: iterable
:param dst_image: Tag for the new image.
:type dst_image: unicode
:param kwargs: Additional kwargs for :func:`copy_resources`.
]
with call[name[temp_dir], parameter[]] begin[:]
call[name[copy_resources], parameter[name[src_container], name[src_resources], name[remote_tmp]]]
with call[name[cd], parameter[name[remote_tmp]]] begin[:]
call[name[sudo], parameter[call[constant[tar -cz * | docker import - {0}].format, parameter[name[dst_image]]]]] | keyword[def] identifier[isolate_to_image] ( identifier[src_container] , identifier[src_resources] , identifier[dst_image] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[temp_dir] () keyword[as] identifier[remote_tmp] :
identifier[copy_resources] ( identifier[src_container] , identifier[src_resources] , identifier[remote_tmp] ,** identifier[kwargs] )
keyword[with] identifier[cd] ( identifier[remote_tmp] ):
identifier[sudo] ( literal[string] . identifier[format] ( identifier[dst_image] )) | def isolate_to_image(src_container, src_resources, dst_image, **kwargs):
"""
Uses :func:`copy_resources` to copy resources from a container, but afterwards imports the contents into a new
(otherwise empty) Docker image.
:param src_container: Container name or id.
:type src_container: unicode
:param src_resources: Resources, as (file or directory) names to copy.
:type src_resources: iterable
:param dst_image: Tag for the new image.
:type dst_image: unicode
:param kwargs: Additional kwargs for :func:`copy_resources`.
"""
with temp_dir() as remote_tmp:
copy_resources(src_container, src_resources, remote_tmp, **kwargs)
with cd(remote_tmp):
sudo('tar -cz * | docker import - {0}'.format(dst_image)) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['remote_tmp']] |
def verify_signature(message, signature, certs):
"""Verify an RSA cryptographic signature.
Checks that the provided ``signature`` was generated from ``bytes`` using
the private key associated with the ``cert``.
Args:
message (Union[str, bytes]): The plaintext message.
signature (Union[str, bytes]): The cryptographic signature to check.
certs (Union[Sequence, str, bytes]): The certificate or certificates
to use to check the signature.
Returns:
bool: True if the signature is valid, otherwise False.
"""
if isinstance(certs, (six.text_type, six.binary_type)):
certs = [certs]
for cert in certs:
verifier = rsa.RSAVerifier.from_string(cert)
if verifier.verify(message, signature):
return True
return False | def function[verify_signature, parameter[message, signature, certs]]:
constant[Verify an RSA cryptographic signature.
Checks that the provided ``signature`` was generated from ``bytes`` using
the private key associated with the ``cert``.
Args:
message (Union[str, bytes]): The plaintext message.
signature (Union[str, bytes]): The cryptographic signature to check.
certs (Union[Sequence, str, bytes]): The certificate or certificates
to use to check the signature.
Returns:
bool: True if the signature is valid, otherwise False.
]
if call[name[isinstance], parameter[name[certs], tuple[[<ast.Attribute object at 0x7da18bc704c0>, <ast.Attribute object at 0x7da18bc71660>]]]] begin[:]
variable[certs] assign[=] list[[<ast.Name object at 0x7da18bc73100>]]
for taget[name[cert]] in starred[name[certs]] begin[:]
variable[verifier] assign[=] call[name[rsa].RSAVerifier.from_string, parameter[name[cert]]]
if call[name[verifier].verify, parameter[name[message], name[signature]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[verify_signature] ( identifier[message] , identifier[signature] , identifier[certs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[certs] ,( identifier[six] . identifier[text_type] , identifier[six] . identifier[binary_type] )):
identifier[certs] =[ identifier[certs] ]
keyword[for] identifier[cert] keyword[in] identifier[certs] :
identifier[verifier] = identifier[rsa] . identifier[RSAVerifier] . identifier[from_string] ( identifier[cert] )
keyword[if] identifier[verifier] . identifier[verify] ( identifier[message] , identifier[signature] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def verify_signature(message, signature, certs):
"""Verify an RSA cryptographic signature.
Checks that the provided ``signature`` was generated from ``bytes`` using
the private key associated with the ``cert``.
Args:
message (Union[str, bytes]): The plaintext message.
signature (Union[str, bytes]): The cryptographic signature to check.
certs (Union[Sequence, str, bytes]): The certificate or certificates
to use to check the signature.
Returns:
bool: True if the signature is valid, otherwise False.
"""
if isinstance(certs, (six.text_type, six.binary_type)):
certs = [certs] # depends on [control=['if'], data=[]]
for cert in certs:
verifier = rsa.RSAVerifier.from_string(cert)
if verifier.verify(message, signature):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cert']]
return False |
def create_message(self):
"""Returns a message body to send in this email. Should be from email.mime.*"""
body = dedent("""\
Received exception {exception} on {queue} from worker {worker}:
{traceback}
Payload:
{payload}
""").format(exception=self._exception,
traceback=self._traceback,
queue=self._queue,
payload=self._payload,
worker=self._worker)
return MIMEText(body) | def function[create_message, parameter[self]]:
constant[Returns a message body to send in this email. Should be from email.mime.*]
variable[body] assign[=] call[call[name[dedent], parameter[constant[ Received exception {exception} on {queue} from worker {worker}:
{traceback}
Payload:
{payload}
]]].format, parameter[]]
return[call[name[MIMEText], parameter[name[body]]]] | keyword[def] identifier[create_message] ( identifier[self] ):
literal[string]
identifier[body] = identifier[dedent] ( literal[string] ). identifier[format] ( identifier[exception] = identifier[self] . identifier[_exception] ,
identifier[traceback] = identifier[self] . identifier[_traceback] ,
identifier[queue] = identifier[self] . identifier[_queue] ,
identifier[payload] = identifier[self] . identifier[_payload] ,
identifier[worker] = identifier[self] . identifier[_worker] )
keyword[return] identifier[MIMEText] ( identifier[body] ) | def create_message(self):
"""Returns a message body to send in this email. Should be from email.mime.*"""
body = dedent(' Received exception {exception} on {queue} from worker {worker}:\n\n {traceback}\n\n Payload:\n {payload}\n\n ').format(exception=self._exception, traceback=self._traceback, queue=self._queue, payload=self._payload, worker=self._worker)
return MIMEText(body) |
def delete_flair(self, subreddit, user):
"""Delete the flair for the given user on the given subreddit.
:returns: The json response from the server.
"""
data = {'r': six.text_type(subreddit),
'name': six.text_type(user)}
return self.request_json(self.config['deleteflair'], data=data) | def function[delete_flair, parameter[self, subreddit, user]]:
constant[Delete the flair for the given user on the given subreddit.
:returns: The json response from the server.
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f58f910>, <ast.Constant object at 0x7da18f58e710>], [<ast.Call object at 0x7da18f58c610>, <ast.Call object at 0x7da18f58ca00>]]
return[call[name[self].request_json, parameter[call[name[self].config][constant[deleteflair]]]]] | keyword[def] identifier[delete_flair] ( identifier[self] , identifier[subreddit] , identifier[user] ):
literal[string]
identifier[data] ={ literal[string] : identifier[six] . identifier[text_type] ( identifier[subreddit] ),
literal[string] : identifier[six] . identifier[text_type] ( identifier[user] )}
keyword[return] identifier[self] . identifier[request_json] ( identifier[self] . identifier[config] [ literal[string] ], identifier[data] = identifier[data] ) | def delete_flair(self, subreddit, user):
"""Delete the flair for the given user on the given subreddit.
:returns: The json response from the server.
"""
data = {'r': six.text_type(subreddit), 'name': six.text_type(user)}
return self.request_json(self.config['deleteflair'], data=data) |
def set_integer(self, option, value):
"""Set an integer option.
Args:
option (str): name of option.
value (int): value of the option.
Raises:
ValueError: Value must be an integer.
"""
try:
int_value = int(value)
except ValueError as err:
print(err.args)
self.options[option] = value | def function[set_integer, parameter[self, option, value]]:
constant[Set an integer option.
Args:
option (str): name of option.
value (int): value of the option.
Raises:
ValueError: Value must be an integer.
]
<ast.Try object at 0x7da1b18b9ff0>
call[name[self].options][name[option]] assign[=] name[value] | keyword[def] identifier[set_integer] ( identifier[self] , identifier[option] , identifier[value] ):
literal[string]
keyword[try] :
identifier[int_value] = identifier[int] ( identifier[value] )
keyword[except] identifier[ValueError] keyword[as] identifier[err] :
identifier[print] ( identifier[err] . identifier[args] )
identifier[self] . identifier[options] [ identifier[option] ]= identifier[value] | def set_integer(self, option, value):
"""Set an integer option.
Args:
option (str): name of option.
value (int): value of the option.
Raises:
ValueError: Value must be an integer.
"""
try:
int_value = int(value) # depends on [control=['try'], data=[]]
except ValueError as err:
print(err.args) # depends on [control=['except'], data=['err']]
self.options[option] = value |
def _parse_user_flags():
"""
Parses user-flags file and loads it to register user defined options.
"""
try:
idx = list(sys.argv).index('--user-flags')
user_flags_file = sys.argv[idx + 1]
except (ValueError, IndexError):
user_flags_file = ''
if user_flags_file and os.path.isfile(user_flags_file):
from ryu.utils import _import_module_file
_import_module_file(user_flags_file) | def function[_parse_user_flags, parameter[]]:
constant[
Parses user-flags file and loads it to register user defined options.
]
<ast.Try object at 0x7da1b1bae620>
if <ast.BoolOp object at 0x7da1b1a36e30> begin[:]
from relative_module[ryu.utils] import module[_import_module_file]
call[name[_import_module_file], parameter[name[user_flags_file]]] | keyword[def] identifier[_parse_user_flags] ():
literal[string]
keyword[try] :
identifier[idx] = identifier[list] ( identifier[sys] . identifier[argv] ). identifier[index] ( literal[string] )
identifier[user_flags_file] = identifier[sys] . identifier[argv] [ identifier[idx] + literal[int] ]
keyword[except] ( identifier[ValueError] , identifier[IndexError] ):
identifier[user_flags_file] = literal[string]
keyword[if] identifier[user_flags_file] keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[user_flags_file] ):
keyword[from] identifier[ryu] . identifier[utils] keyword[import] identifier[_import_module_file]
identifier[_import_module_file] ( identifier[user_flags_file] ) | def _parse_user_flags():
"""
Parses user-flags file and loads it to register user defined options.
"""
try:
idx = list(sys.argv).index('--user-flags')
user_flags_file = sys.argv[idx + 1] # depends on [control=['try'], data=[]]
except (ValueError, IndexError):
user_flags_file = '' # depends on [control=['except'], data=[]]
if user_flags_file and os.path.isfile(user_flags_file):
from ryu.utils import _import_module_file
_import_module_file(user_flags_file) # depends on [control=['if'], data=[]] |
def init_app(self, app, session=None, parameters=None):
"""Initializes snow extension
Set config default and find out which client type to use
:param app: App passed from constructor or directly to init_app (factory)
:param session: requests-compatible session to pass along to init_app
:param parameters: `ParamsBuilder` object passed to `Client` after instantiation
:raises:
- ConfigError - if unable to determine client type
"""
if parameters is not None and not isinstance(parameters, ParamsBuilder):
raise InvalidUsage("parameters should be a pysnow.ParamsBuilder object, not %r" % type(parameters).__name__)
self._session = session
self._parameters = parameters
app.config.setdefault('SNOW_INSTANCE', None)
app.config.setdefault('SNOW_HOST', None)
app.config.setdefault('SNOW_USER', None)
app.config.setdefault('SNOW_PASSWORD', None)
app.config.setdefault('SNOW_OAUTH_CLIENT_ID', None)
app.config.setdefault('SNOW_OAUTH_CLIENT_SECRET', None)
app.config.setdefault('SNOW_USE_SSL', True)
if app.config['SNOW_OAUTH_CLIENT_ID'] and app.config['SNOW_OAUTH_CLIENT_SECRET']:
self._client_type_oauth = True
elif self._session or (app.config['SNOW_USER'] and app.config['SNOW_PASSWORD']):
self._client_type_basic = True
else:
raise ConfigError("You must supply user credentials, a session or OAuth credentials to use flask-snow") | def function[init_app, parameter[self, app, session, parameters]]:
constant[Initializes snow extension
Set config default and find out which client type to use
:param app: App passed from constructor or directly to init_app (factory)
:param session: requests-compatible session to pass along to init_app
:param parameters: `ParamsBuilder` object passed to `Client` after instantiation
:raises:
- ConfigError - if unable to determine client type
]
if <ast.BoolOp object at 0x7da1b0f0db10> begin[:]
<ast.Raise object at 0x7da1b0f0dbd0>
name[self]._session assign[=] name[session]
name[self]._parameters assign[=] name[parameters]
call[name[app].config.setdefault, parameter[constant[SNOW_INSTANCE], constant[None]]]
call[name[app].config.setdefault, parameter[constant[SNOW_HOST], constant[None]]]
call[name[app].config.setdefault, parameter[constant[SNOW_USER], constant[None]]]
call[name[app].config.setdefault, parameter[constant[SNOW_PASSWORD], constant[None]]]
call[name[app].config.setdefault, parameter[constant[SNOW_OAUTH_CLIENT_ID], constant[None]]]
call[name[app].config.setdefault, parameter[constant[SNOW_OAUTH_CLIENT_SECRET], constant[None]]]
call[name[app].config.setdefault, parameter[constant[SNOW_USE_SSL], constant[True]]]
if <ast.BoolOp object at 0x7da1b0e0c820> begin[:]
name[self]._client_type_oauth assign[=] constant[True] | keyword[def] identifier[init_app] ( identifier[self] , identifier[app] , identifier[session] = keyword[None] , identifier[parameters] = keyword[None] ):
literal[string]
keyword[if] identifier[parameters] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[parameters] , identifier[ParamsBuilder] ):
keyword[raise] identifier[InvalidUsage] ( literal[string] % identifier[type] ( identifier[parameters] ). identifier[__name__] )
identifier[self] . identifier[_session] = identifier[session]
identifier[self] . identifier[_parameters] = identifier[parameters]
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[True] )
keyword[if] identifier[app] . identifier[config] [ literal[string] ] keyword[and] identifier[app] . identifier[config] [ literal[string] ]:
identifier[self] . identifier[_client_type_oauth] = keyword[True]
keyword[elif] identifier[self] . identifier[_session] keyword[or] ( identifier[app] . identifier[config] [ literal[string] ] keyword[and] identifier[app] . identifier[config] [ literal[string] ]):
identifier[self] . identifier[_client_type_basic] = keyword[True]
keyword[else] :
keyword[raise] identifier[ConfigError] ( literal[string] ) | def init_app(self, app, session=None, parameters=None):
"""Initializes snow extension
Set config default and find out which client type to use
:param app: App passed from constructor or directly to init_app (factory)
:param session: requests-compatible session to pass along to init_app
:param parameters: `ParamsBuilder` object passed to `Client` after instantiation
:raises:
- ConfigError - if unable to determine client type
"""
if parameters is not None and (not isinstance(parameters, ParamsBuilder)):
raise InvalidUsage('parameters should be a pysnow.ParamsBuilder object, not %r' % type(parameters).__name__) # depends on [control=['if'], data=[]]
self._session = session
self._parameters = parameters
app.config.setdefault('SNOW_INSTANCE', None)
app.config.setdefault('SNOW_HOST', None)
app.config.setdefault('SNOW_USER', None)
app.config.setdefault('SNOW_PASSWORD', None)
app.config.setdefault('SNOW_OAUTH_CLIENT_ID', None)
app.config.setdefault('SNOW_OAUTH_CLIENT_SECRET', None)
app.config.setdefault('SNOW_USE_SSL', True)
if app.config['SNOW_OAUTH_CLIENT_ID'] and app.config['SNOW_OAUTH_CLIENT_SECRET']:
self._client_type_oauth = True # depends on [control=['if'], data=[]]
elif self._session or (app.config['SNOW_USER'] and app.config['SNOW_PASSWORD']):
self._client_type_basic = True # depends on [control=['if'], data=[]]
else:
raise ConfigError('You must supply user credentials, a session or OAuth credentials to use flask-snow') |
def graft_neuron(root_section):
'''Returns a neuron starting at root_section'''
assert isinstance(root_section, Section)
return Neuron(soma=Soma(root_section.points[:1]), neurites=[Neurite(root_section)]) | def function[graft_neuron, parameter[root_section]]:
constant[Returns a neuron starting at root_section]
assert[call[name[isinstance], parameter[name[root_section], name[Section]]]]
return[call[name[Neuron], parameter[]]] | keyword[def] identifier[graft_neuron] ( identifier[root_section] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[root_section] , identifier[Section] )
keyword[return] identifier[Neuron] ( identifier[soma] = identifier[Soma] ( identifier[root_section] . identifier[points] [: literal[int] ]), identifier[neurites] =[ identifier[Neurite] ( identifier[root_section] )]) | def graft_neuron(root_section):
"""Returns a neuron starting at root_section"""
assert isinstance(root_section, Section)
return Neuron(soma=Soma(root_section.points[:1]), neurites=[Neurite(root_section)]) |
def end_timing(self, name, elapsed):
"""
Ends measurement of execution elapsed time and updates specified counter.
:param name: a counter name
:param elapsed: execution elapsed time in milliseconds to update the counter.
"""
for counter in self._counters:
if isinstance(counter, ITimingCallback):
counter.end_timing(name, elapsed) | def function[end_timing, parameter[self, name, elapsed]]:
constant[
Ends measurement of execution elapsed time and updates specified counter.
:param name: a counter name
:param elapsed: execution elapsed time in milliseconds to update the counter.
]
for taget[name[counter]] in starred[name[self]._counters] begin[:]
if call[name[isinstance], parameter[name[counter], name[ITimingCallback]]] begin[:]
call[name[counter].end_timing, parameter[name[name], name[elapsed]]] | keyword[def] identifier[end_timing] ( identifier[self] , identifier[name] , identifier[elapsed] ):
literal[string]
keyword[for] identifier[counter] keyword[in] identifier[self] . identifier[_counters] :
keyword[if] identifier[isinstance] ( identifier[counter] , identifier[ITimingCallback] ):
identifier[counter] . identifier[end_timing] ( identifier[name] , identifier[elapsed] ) | def end_timing(self, name, elapsed):
"""
Ends measurement of execution elapsed time and updates specified counter.
:param name: a counter name
:param elapsed: execution elapsed time in milliseconds to update the counter.
"""
for counter in self._counters:
if isinstance(counter, ITimingCallback):
counter.end_timing(name, elapsed) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['counter']] |
def bump_version(cursor, uuid, is_minor_bump=False):
"""Bump to the next version of the given content identified
by ``uuid``. Returns the next available version as a version tuple,
containing major and minor version.
If ``is_minor_bump`` is ``True`` the version will minor bump. That is
1.2 becomes 1.3 in the case of Collections. And 2 becomes 3 for
Modules regardless of this option.
"""
cursor.execute("""\
SELECT portal_type, major_version, minor_version
FROM latest_modules
WHERE uuid = %s::uuid""", (uuid,))
type_, major_version, minor_version = cursor.fetchone()
incr = 1
if type_ == 'Collection' and is_minor_bump:
minor_version = minor_version + incr
else:
major_version = major_version + incr
return (major_version, minor_version,) | def function[bump_version, parameter[cursor, uuid, is_minor_bump]]:
constant[Bump to the next version of the given content identified
by ``uuid``. Returns the next available version as a version tuple,
containing major and minor version.
If ``is_minor_bump`` is ``True`` the version will minor bump. That is
1.2 becomes 1.3 in the case of Collections. And 2 becomes 3 for
Modules regardless of this option.
]
call[name[cursor].execute, parameter[constant[SELECT portal_type, major_version, minor_version
FROM latest_modules
WHERE uuid = %s::uuid], tuple[[<ast.Name object at 0x7da1aff8c370>]]]]
<ast.Tuple object at 0x7da1aff8c400> assign[=] call[name[cursor].fetchone, parameter[]]
variable[incr] assign[=] constant[1]
if <ast.BoolOp object at 0x7da1aff8c820> begin[:]
variable[minor_version] assign[=] binary_operation[name[minor_version] + name[incr]]
return[tuple[[<ast.Name object at 0x7da1aff8cb50>, <ast.Name object at 0x7da1aff8cb80>]]] | keyword[def] identifier[bump_version] ( identifier[cursor] , identifier[uuid] , identifier[is_minor_bump] = keyword[False] ):
literal[string]
identifier[cursor] . identifier[execute] ( literal[string] ,( identifier[uuid] ,))
identifier[type_] , identifier[major_version] , identifier[minor_version] = identifier[cursor] . identifier[fetchone] ()
identifier[incr] = literal[int]
keyword[if] identifier[type_] == literal[string] keyword[and] identifier[is_minor_bump] :
identifier[minor_version] = identifier[minor_version] + identifier[incr]
keyword[else] :
identifier[major_version] = identifier[major_version] + identifier[incr]
keyword[return] ( identifier[major_version] , identifier[minor_version] ,) | def bump_version(cursor, uuid, is_minor_bump=False):
"""Bump to the next version of the given content identified
by ``uuid``. Returns the next available version as a version tuple,
containing major and minor version.
If ``is_minor_bump`` is ``True`` the version will minor bump. That is
1.2 becomes 1.3 in the case of Collections. And 2 becomes 3 for
Modules regardless of this option.
"""
cursor.execute('SELECT portal_type, major_version, minor_version\nFROM latest_modules\nWHERE uuid = %s::uuid', (uuid,))
(type_, major_version, minor_version) = cursor.fetchone()
incr = 1
if type_ == 'Collection' and is_minor_bump:
minor_version = minor_version + incr # depends on [control=['if'], data=[]]
else:
major_version = major_version + incr
return (major_version, minor_version) |
def _is_port_name(name):
''' Check that name is IANA service: An alphanumeric (a-z, and 0-9) string,
with a maximum length of 15 characters, with the '-' character allowed
anywhere except the first or the last character or adjacent to another '-'
character, it must contain at least a (a-z) character '''
port_name = re.compile("""^[a-z0-9]{1,15}$""")
if port_name.match(name):
return True
else:
return False | def function[_is_port_name, parameter[name]]:
constant[ Check that name is IANA service: An alphanumeric (a-z, and 0-9) string,
with a maximum length of 15 characters, with the '-' character allowed
anywhere except the first or the last character or adjacent to another '-'
character, it must contain at least a (a-z) character ]
variable[port_name] assign[=] call[name[re].compile, parameter[constant[^[a-z0-9]{1,15}$]]]
if call[name[port_name].match, parameter[name[name]]] begin[:]
return[constant[True]] | keyword[def] identifier[_is_port_name] ( identifier[name] ):
literal[string]
identifier[port_name] = identifier[re] . identifier[compile] ( literal[string] )
keyword[if] identifier[port_name] . identifier[match] ( identifier[name] ):
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def _is_port_name(name):
""" Check that name is IANA service: An alphanumeric (a-z, and 0-9) string,
with a maximum length of 15 characters, with the '-' character allowed
anywhere except the first or the last character or adjacent to another '-'
character, it must contain at least a (a-z) character """
port_name = re.compile('^[a-z0-9]{1,15}$')
if port_name.match(name):
return True # depends on [control=['if'], data=[]]
else:
return False |
async def sinter(self, keys, *args):
"""
Return the intersection of sets specified by ``keys``
Cluster impl:
Querry all keys, intersection and return result
"""
k = list_or_args(keys, args)
res = await self.smembers(k[0])
for arg in k[1:]:
res &= await self.smembers(arg)
return res | <ast.AsyncFunctionDef object at 0x7da1b0798ee0> | keyword[async] keyword[def] identifier[sinter] ( identifier[self] , identifier[keys] ,* identifier[args] ):
literal[string]
identifier[k] = identifier[list_or_args] ( identifier[keys] , identifier[args] )
identifier[res] = keyword[await] identifier[self] . identifier[smembers] ( identifier[k] [ literal[int] ])
keyword[for] identifier[arg] keyword[in] identifier[k] [ literal[int] :]:
identifier[res] &= keyword[await] identifier[self] . identifier[smembers] ( identifier[arg] )
keyword[return] identifier[res] | async def sinter(self, keys, *args):
"""
Return the intersection of sets specified by ``keys``
Cluster impl:
Querry all keys, intersection and return result
"""
k = list_or_args(keys, args)
res = await self.smembers(k[0])
for arg in k[1:]:
res &= await self.smembers(arg) # depends on [control=['for'], data=['arg']]
return res |
def twosided_2_onesided(data):
"""Convert a one-sided PSD to a twosided PSD
In order to keep the power in the onesided PSD the same
as in the twosided version, the onesided values are twice
as much as in the input data (except for the zero-lag value).
::
>>> twosided_2_onesided([10, 2,3,3,2,8])
array([ 10., 4., 6., 8.])
"""
assert len(data) % 2 == 0
N = len(data)
psd = np.array(data[0:N//2+1]) * 2.
psd[0] /= 2.
psd[-1] = data[-1]
return psd | def function[twosided_2_onesided, parameter[data]]:
constant[Convert a one-sided PSD to a twosided PSD
In order to keep the power in the onesided PSD the same
as in the twosided version, the onesided values are twice
as much as in the input data (except for the zero-lag value).
::
>>> twosided_2_onesided([10, 2,3,3,2,8])
array([ 10., 4., 6., 8.])
]
assert[compare[binary_operation[call[name[len], parameter[name[data]]] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[0]]]
variable[N] assign[=] call[name[len], parameter[name[data]]]
variable[psd] assign[=] binary_operation[call[name[np].array, parameter[call[name[data]][<ast.Slice object at 0x7da1b01c2980>]]] * constant[2.0]]
<ast.AugAssign object at 0x7da1b01c0a90>
call[name[psd]][<ast.UnaryOp object at 0x7da1b01c0c10>] assign[=] call[name[data]][<ast.UnaryOp object at 0x7da1b01c0e20>]
return[name[psd]] | keyword[def] identifier[twosided_2_onesided] ( identifier[data] ):
literal[string]
keyword[assert] identifier[len] ( identifier[data] )% literal[int] == literal[int]
identifier[N] = identifier[len] ( identifier[data] )
identifier[psd] = identifier[np] . identifier[array] ( identifier[data] [ literal[int] : identifier[N] // literal[int] + literal[int] ])* literal[int]
identifier[psd] [ literal[int] ]/= literal[int]
identifier[psd] [- literal[int] ]= identifier[data] [- literal[int] ]
keyword[return] identifier[psd] | def twosided_2_onesided(data):
"""Convert a one-sided PSD to a twosided PSD
In order to keep the power in the onesided PSD the same
as in the twosided version, the onesided values are twice
as much as in the input data (except for the zero-lag value).
::
>>> twosided_2_onesided([10, 2,3,3,2,8])
array([ 10., 4., 6., 8.])
"""
assert len(data) % 2 == 0
N = len(data)
psd = np.array(data[0:N // 2 + 1]) * 2.0
psd[0] /= 2.0
psd[-1] = data[-1]
return psd |
def set_power_state(self, is_on, bulb=ALL_BULBS, timeout=None):
"""
Sets the power state of one or more bulbs.
"""
with _blocking(self.lock, self.power_state, self.light_state_event,
timeout):
self.send(REQ_SET_POWER_STATE,
bulb, '2s', '\x00\x01' if is_on else '\x00\x00')
self.send(REQ_GET_LIGHT_STATE, ALL_BULBS, '')
return self.power_state | def function[set_power_state, parameter[self, is_on, bulb, timeout]]:
constant[
Sets the power state of one or more bulbs.
]
with call[name[_blocking], parameter[name[self].lock, name[self].power_state, name[self].light_state_event, name[timeout]]] begin[:]
call[name[self].send, parameter[name[REQ_SET_POWER_STATE], name[bulb], constant[2s], <ast.IfExp object at 0x7da1b0b722c0>]]
call[name[self].send, parameter[name[REQ_GET_LIGHT_STATE], name[ALL_BULBS], constant[]]]
return[name[self].power_state] | keyword[def] identifier[set_power_state] ( identifier[self] , identifier[is_on] , identifier[bulb] = identifier[ALL_BULBS] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[with] identifier[_blocking] ( identifier[self] . identifier[lock] , identifier[self] . identifier[power_state] , identifier[self] . identifier[light_state_event] ,
identifier[timeout] ):
identifier[self] . identifier[send] ( identifier[REQ_SET_POWER_STATE] ,
identifier[bulb] , literal[string] , literal[string] keyword[if] identifier[is_on] keyword[else] literal[string] )
identifier[self] . identifier[send] ( identifier[REQ_GET_LIGHT_STATE] , identifier[ALL_BULBS] , literal[string] )
keyword[return] identifier[self] . identifier[power_state] | def set_power_state(self, is_on, bulb=ALL_BULBS, timeout=None):
"""
Sets the power state of one or more bulbs.
"""
with _blocking(self.lock, self.power_state, self.light_state_event, timeout):
self.send(REQ_SET_POWER_STATE, bulb, '2s', '\x00\x01' if is_on else '\x00\x00')
self.send(REQ_GET_LIGHT_STATE, ALL_BULBS, '') # depends on [control=['with'], data=[]]
return self.power_state |
def _edge_in_front(self, edge):
""" Return the index where *edge* appears in the current front.
If the edge is not in the front, return -1
"""
e = (list(edge), list(edge)[::-1])
for i in range(len(self._front)-1):
if self._front[i:i+2] in e:
return i
return -1 | def function[_edge_in_front, parameter[self, edge]]:
constant[ Return the index where *edge* appears in the current front.
If the edge is not in the front, return -1
]
variable[e] assign[=] tuple[[<ast.Call object at 0x7da1b0f19870>, <ast.Subscript object at 0x7da1b0f921a0>]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[self]._front]] - constant[1]]]]] begin[:]
if compare[call[name[self]._front][<ast.Slice object at 0x7da1b0f93bb0>] in name[e]] begin[:]
return[name[i]]
return[<ast.UnaryOp object at 0x7da1b0f93d30>] | keyword[def] identifier[_edge_in_front] ( identifier[self] , identifier[edge] ):
literal[string]
identifier[e] =( identifier[list] ( identifier[edge] ), identifier[list] ( identifier[edge] )[::- literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[_front] )- literal[int] ):
keyword[if] identifier[self] . identifier[_front] [ identifier[i] : identifier[i] + literal[int] ] keyword[in] identifier[e] :
keyword[return] identifier[i]
keyword[return] - literal[int] | def _edge_in_front(self, edge):
""" Return the index where *edge* appears in the current front.
If the edge is not in the front, return -1
"""
e = (list(edge), list(edge)[::-1])
for i in range(len(self._front) - 1):
if self._front[i:i + 2] in e:
return i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return -1 |
def _der_to_raw(self, der_signature):
"""Convert signature from DER encoding to RAW encoding."""
r, s = decode_dss_signature(der_signature)
component_length = self._sig_component_length()
return int_to_bytes(r, component_length) + int_to_bytes(s, component_length) | def function[_der_to_raw, parameter[self, der_signature]]:
constant[Convert signature from DER encoding to RAW encoding.]
<ast.Tuple object at 0x7da18fe90cd0> assign[=] call[name[decode_dss_signature], parameter[name[der_signature]]]
variable[component_length] assign[=] call[name[self]._sig_component_length, parameter[]]
return[binary_operation[call[name[int_to_bytes], parameter[name[r], name[component_length]]] + call[name[int_to_bytes], parameter[name[s], name[component_length]]]]] | keyword[def] identifier[_der_to_raw] ( identifier[self] , identifier[der_signature] ):
literal[string]
identifier[r] , identifier[s] = identifier[decode_dss_signature] ( identifier[der_signature] )
identifier[component_length] = identifier[self] . identifier[_sig_component_length] ()
keyword[return] identifier[int_to_bytes] ( identifier[r] , identifier[component_length] )+ identifier[int_to_bytes] ( identifier[s] , identifier[component_length] ) | def _der_to_raw(self, der_signature):
"""Convert signature from DER encoding to RAW encoding."""
(r, s) = decode_dss_signature(der_signature)
component_length = self._sig_component_length()
return int_to_bytes(r, component_length) + int_to_bytes(s, component_length) |
async def register(self, request):
"""Registers the user."""
session = await get_session(request)
user_id = session.get('user_id')
if user_id:
return redirect(request, 'timeline')
error = None
form = None
if request.method == 'POST':
form = await request.post()
user_id = await db.get_user_id(self.mongo.user, form['username'])
if not form['username']:
error = 'You have to enter a username'
elif not form['email'] or '@' not in form['email']:
error = 'You have to enter a valid email address'
elif not form['password']:
error = 'You have to enter a password'
elif form['password'] != form['password2']:
error = 'The two passwords do not match'
elif user_id is not None:
error = 'The username is already taken'
else:
await self.mongo.user.insert(
{'username': form['username'],
'email': form['email'],
'pw_hash': generate_password_hash(form['password'])})
return redirect(request, 'login')
return {"error": error, "form": form} | <ast.AsyncFunctionDef object at 0x7da1b12bffa0> | keyword[async] keyword[def] identifier[register] ( identifier[self] , identifier[request] ):
literal[string]
identifier[session] = keyword[await] identifier[get_session] ( identifier[request] )
identifier[user_id] = identifier[session] . identifier[get] ( literal[string] )
keyword[if] identifier[user_id] :
keyword[return] identifier[redirect] ( identifier[request] , literal[string] )
identifier[error] = keyword[None]
identifier[form] = keyword[None]
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[form] = keyword[await] identifier[request] . identifier[post] ()
identifier[user_id] = keyword[await] identifier[db] . identifier[get_user_id] ( identifier[self] . identifier[mongo] . identifier[user] , identifier[form] [ literal[string] ])
keyword[if] keyword[not] identifier[form] [ literal[string] ]:
identifier[error] = literal[string]
keyword[elif] keyword[not] identifier[form] [ literal[string] ] keyword[or] literal[string] keyword[not] keyword[in] identifier[form] [ literal[string] ]:
identifier[error] = literal[string]
keyword[elif] keyword[not] identifier[form] [ literal[string] ]:
identifier[error] = literal[string]
keyword[elif] identifier[form] [ literal[string] ]!= identifier[form] [ literal[string] ]:
identifier[error] = literal[string]
keyword[elif] identifier[user_id] keyword[is] keyword[not] keyword[None] :
identifier[error] = literal[string]
keyword[else] :
keyword[await] identifier[self] . identifier[mongo] . identifier[user] . identifier[insert] (
{ literal[string] : identifier[form] [ literal[string] ],
literal[string] : identifier[form] [ literal[string] ],
literal[string] : identifier[generate_password_hash] ( identifier[form] [ literal[string] ])})
keyword[return] identifier[redirect] ( identifier[request] , literal[string] )
keyword[return] { literal[string] : identifier[error] , literal[string] : identifier[form] } | async def register(self, request):
"""Registers the user."""
session = await get_session(request)
user_id = session.get('user_id')
if user_id:
return redirect(request, 'timeline') # depends on [control=['if'], data=[]]
error = None
form = None
if request.method == 'POST':
form = await request.post()
user_id = await db.get_user_id(self.mongo.user, form['username'])
if not form['username']:
error = 'You have to enter a username' # depends on [control=['if'], data=[]]
elif not form['email'] or '@' not in form['email']:
error = 'You have to enter a valid email address' # depends on [control=['if'], data=[]]
elif not form['password']:
error = 'You have to enter a password' # depends on [control=['if'], data=[]]
elif form['password'] != form['password2']:
error = 'The two passwords do not match' # depends on [control=['if'], data=[]]
elif user_id is not None:
error = 'The username is already taken' # depends on [control=['if'], data=[]]
else:
await self.mongo.user.insert({'username': form['username'], 'email': form['email'], 'pw_hash': generate_password_hash(form['password'])})
return redirect(request, 'login') # depends on [control=['if'], data=[]]
return {'error': error, 'form': form} |
def find_packages(name, pkg_dir):
"""Locate pre-built packages in the _packages directory"""
for c in (FileSystemPackageBuilder, ZipPackageBuilder, ExcelPackageBuilder):
package_path, cache_path = c.make_package_path(pkg_dir, name)
if package_path.exists():
yield c.type_code, package_path, cache_path | def function[find_packages, parameter[name, pkg_dir]]:
constant[Locate pre-built packages in the _packages directory]
for taget[name[c]] in starred[tuple[[<ast.Name object at 0x7da1b196b760>, <ast.Name object at 0x7da1b19ce650>, <ast.Name object at 0x7da1b19cee30>]]] begin[:]
<ast.Tuple object at 0x7da1b19ce890> assign[=] call[name[c].make_package_path, parameter[name[pkg_dir], name[name]]]
if call[name[package_path].exists, parameter[]] begin[:]
<ast.Yield object at 0x7da1b19cd180> | keyword[def] identifier[find_packages] ( identifier[name] , identifier[pkg_dir] ):
literal[string]
keyword[for] identifier[c] keyword[in] ( identifier[FileSystemPackageBuilder] , identifier[ZipPackageBuilder] , identifier[ExcelPackageBuilder] ):
identifier[package_path] , identifier[cache_path] = identifier[c] . identifier[make_package_path] ( identifier[pkg_dir] , identifier[name] )
keyword[if] identifier[package_path] . identifier[exists] ():
keyword[yield] identifier[c] . identifier[type_code] , identifier[package_path] , identifier[cache_path] | def find_packages(name, pkg_dir):
"""Locate pre-built packages in the _packages directory"""
for c in (FileSystemPackageBuilder, ZipPackageBuilder, ExcelPackageBuilder):
(package_path, cache_path) = c.make_package_path(pkg_dir, name)
if package_path.exists():
yield (c.type_code, package_path, cache_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] |
def get_user(uid, **kwargs):
"""
Get a user by ID
"""
user_id=kwargs.get('user_id')
if uid is None:
uid = user_id
user_i = _get_user(uid)
return user_i | def function[get_user, parameter[uid]]:
constant[
Get a user by ID
]
variable[user_id] assign[=] call[name[kwargs].get, parameter[constant[user_id]]]
if compare[name[uid] is constant[None]] begin[:]
variable[uid] assign[=] name[user_id]
variable[user_i] assign[=] call[name[_get_user], parameter[name[uid]]]
return[name[user_i]] | keyword[def] identifier[get_user] ( identifier[uid] ,** identifier[kwargs] ):
literal[string]
identifier[user_id] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[uid] keyword[is] keyword[None] :
identifier[uid] = identifier[user_id]
identifier[user_i] = identifier[_get_user] ( identifier[uid] )
keyword[return] identifier[user_i] | def get_user(uid, **kwargs):
"""
Get a user by ID
"""
user_id = kwargs.get('user_id')
if uid is None:
uid = user_id # depends on [control=['if'], data=['uid']]
user_i = _get_user(uid)
return user_i |
def _is_unordered(collection):
"""
Determine whether a collection appears to be unordered.
This is a conservative implementation, allowing for the possibility that
someone's implemented Mapping or Set, for example, and provided an
__iter__ implementation that defines a consistent ordering of the
collection's elements.
:param object collection: Object to check as an unordered collection.
:return bool: Whether the given object appears to be unordered
:raises TypeError: If the given "collection" is non-iterable, it's
illogical to investigate whether it's ordered.
"""
if not isinstance(collection, Iterable):
raise TypeError("Non-iterable alleged collection: {}".
format(type(collection)))
return isinstance(collection, set) or \
(isinstance(collection, dict) and
not isinstance(collection, OrderedDict)) | def function[_is_unordered, parameter[collection]]:
constant[
Determine whether a collection appears to be unordered.
This is a conservative implementation, allowing for the possibility that
someone's implemented Mapping or Set, for example, and provided an
__iter__ implementation that defines a consistent ordering of the
collection's elements.
:param object collection: Object to check as an unordered collection.
:return bool: Whether the given object appears to be unordered
:raises TypeError: If the given "collection" is non-iterable, it's
illogical to investigate whether it's ordered.
]
if <ast.UnaryOp object at 0x7da1b03ca0b0> begin[:]
<ast.Raise object at 0x7da1b03c85e0>
return[<ast.BoolOp object at 0x7da1b03c8cd0>] | keyword[def] identifier[_is_unordered] ( identifier[collection] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[collection] , identifier[Iterable] ):
keyword[raise] identifier[TypeError] ( literal[string] .
identifier[format] ( identifier[type] ( identifier[collection] )))
keyword[return] identifier[isinstance] ( identifier[collection] , identifier[set] ) keyword[or] ( identifier[isinstance] ( identifier[collection] , identifier[dict] ) keyword[and]
keyword[not] identifier[isinstance] ( identifier[collection] , identifier[OrderedDict] )) | def _is_unordered(collection):
"""
Determine whether a collection appears to be unordered.
This is a conservative implementation, allowing for the possibility that
someone's implemented Mapping or Set, for example, and provided an
__iter__ implementation that defines a consistent ordering of the
collection's elements.
:param object collection: Object to check as an unordered collection.
:return bool: Whether the given object appears to be unordered
:raises TypeError: If the given "collection" is non-iterable, it's
illogical to investigate whether it's ordered.
"""
if not isinstance(collection, Iterable):
raise TypeError('Non-iterable alleged collection: {}'.format(type(collection))) # depends on [control=['if'], data=[]]
return isinstance(collection, set) or (isinstance(collection, dict) and (not isinstance(collection, OrderedDict))) |
def height_to_pressure_std(height):
r"""Convert height data to pressures using the U.S. standard atmosphere.
The implementation inverts the formula outlined in [Hobbs1977]_ pg.60-61.
Parameters
----------
height : `pint.Quantity`
Atmospheric height
Returns
-------
`pint.Quantity`
The corresponding pressure value(s)
Notes
-----
.. math:: p = p_0 e^{\frac{g}{R \Gamma} \text{ln}(1-\frac{Z \Gamma}{T_0})}
"""
t0 = 288. * units.kelvin
gamma = 6.5 * units('K/km')
p0 = 1013.25 * units.mbar
return p0 * (1 - (gamma / t0) * height) ** (mpconsts.g / (mpconsts.Rd * gamma)) | def function[height_to_pressure_std, parameter[height]]:
constant[Convert height data to pressures using the U.S. standard atmosphere.
The implementation inverts the formula outlined in [Hobbs1977]_ pg.60-61.
Parameters
----------
height : `pint.Quantity`
Atmospheric height
Returns
-------
`pint.Quantity`
The corresponding pressure value(s)
Notes
-----
.. math:: p = p_0 e^{\frac{g}{R \Gamma} \text{ln}(1-\frac{Z \Gamma}{T_0})}
]
variable[t0] assign[=] binary_operation[constant[288.0] * name[units].kelvin]
variable[gamma] assign[=] binary_operation[constant[6.5] * call[name[units], parameter[constant[K/km]]]]
variable[p0] assign[=] binary_operation[constant[1013.25] * name[units].mbar]
return[binary_operation[name[p0] * binary_operation[binary_operation[constant[1] - binary_operation[binary_operation[name[gamma] / name[t0]] * name[height]]] ** binary_operation[name[mpconsts].g / binary_operation[name[mpconsts].Rd * name[gamma]]]]]] | keyword[def] identifier[height_to_pressure_std] ( identifier[height] ):
literal[string]
identifier[t0] = literal[int] * identifier[units] . identifier[kelvin]
identifier[gamma] = literal[int] * identifier[units] ( literal[string] )
identifier[p0] = literal[int] * identifier[units] . identifier[mbar]
keyword[return] identifier[p0] *( literal[int] -( identifier[gamma] / identifier[t0] )* identifier[height] )**( identifier[mpconsts] . identifier[g] /( identifier[mpconsts] . identifier[Rd] * identifier[gamma] )) | def height_to_pressure_std(height):
"""Convert height data to pressures using the U.S. standard atmosphere.
The implementation inverts the formula outlined in [Hobbs1977]_ pg.60-61.
Parameters
----------
height : `pint.Quantity`
Atmospheric height
Returns
-------
`pint.Quantity`
The corresponding pressure value(s)
Notes
-----
.. math:: p = p_0 e^{\\frac{g}{R \\Gamma} \\text{ln}(1-\\frac{Z \\Gamma}{T_0})}
"""
t0 = 288.0 * units.kelvin
gamma = 6.5 * units('K/km')
p0 = 1013.25 * units.mbar
return p0 * (1 - gamma / t0 * height) ** (mpconsts.g / (mpconsts.Rd * gamma)) |
def get_user_id(user_id_or_username):
"""Gets the user ID based on the value `user_id_or_username` specified on
the command-line, being extra lenient and lowercasing the value in all
cases.
"""
user_id_or_username = user_id_or_username.lower()
if not user_id_or_username.startswith("user-"):
user_id = "user-" + user_id_or_username.lower()
else:
user_id = user_id_or_username
return user_id | def function[get_user_id, parameter[user_id_or_username]]:
constant[Gets the user ID based on the value `user_id_or_username` specified on
the command-line, being extra lenient and lowercasing the value in all
cases.
]
variable[user_id_or_username] assign[=] call[name[user_id_or_username].lower, parameter[]]
if <ast.UnaryOp object at 0x7da20e955390> begin[:]
variable[user_id] assign[=] binary_operation[constant[user-] + call[name[user_id_or_username].lower, parameter[]]]
return[name[user_id]] | keyword[def] identifier[get_user_id] ( identifier[user_id_or_username] ):
literal[string]
identifier[user_id_or_username] = identifier[user_id_or_username] . identifier[lower] ()
keyword[if] keyword[not] identifier[user_id_or_username] . identifier[startswith] ( literal[string] ):
identifier[user_id] = literal[string] + identifier[user_id_or_username] . identifier[lower] ()
keyword[else] :
identifier[user_id] = identifier[user_id_or_username]
keyword[return] identifier[user_id] | def get_user_id(user_id_or_username):
"""Gets the user ID based on the value `user_id_or_username` specified on
the command-line, being extra lenient and lowercasing the value in all
cases.
"""
user_id_or_username = user_id_or_username.lower()
if not user_id_or_username.startswith('user-'):
user_id = 'user-' + user_id_or_username.lower() # depends on [control=['if'], data=[]]
else:
user_id = user_id_or_username
return user_id |
def delete_image(self, name: str) -> None:
"""
Deletes a Docker image with a given name.
Parameters:
name: the name of the Docker image.
"""
logger.debug("deleting Docker image: %s", name)
path = "docker/images/{}".format(name)
response = self.__api.delete(path)
if response.status_code != 204:
try:
self.__api.handle_erroneous_response(response)
except Exception:
logger.exception("failed to delete Docker image: %s", name)
raise
else:
logger.info("deleted Docker image: %s", name) | def function[delete_image, parameter[self, name]]:
constant[
Deletes a Docker image with a given name.
Parameters:
name: the name of the Docker image.
]
call[name[logger].debug, parameter[constant[deleting Docker image: %s], name[name]]]
variable[path] assign[=] call[constant[docker/images/{}].format, parameter[name[name]]]
variable[response] assign[=] call[name[self].__api.delete, parameter[name[path]]]
if compare[name[response].status_code not_equal[!=] constant[204]] begin[:]
<ast.Try object at 0x7da1b0ccdf30> | keyword[def] identifier[delete_image] ( identifier[self] , identifier[name] : identifier[str] )-> keyword[None] :
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[name] )
identifier[path] = literal[string] . identifier[format] ( identifier[name] )
identifier[response] = identifier[self] . identifier[__api] . identifier[delete] ( identifier[path] )
keyword[if] identifier[response] . identifier[status_code] != literal[int] :
keyword[try] :
identifier[self] . identifier[__api] . identifier[handle_erroneous_response] ( identifier[response] )
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] ( literal[string] , identifier[name] )
keyword[raise]
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] , identifier[name] ) | def delete_image(self, name: str) -> None:
"""
Deletes a Docker image with a given name.
Parameters:
name: the name of the Docker image.
"""
logger.debug('deleting Docker image: %s', name)
path = 'docker/images/{}'.format(name)
response = self.__api.delete(path)
if response.status_code != 204:
try:
self.__api.handle_erroneous_response(response) # depends on [control=['try'], data=[]]
except Exception:
logger.exception('failed to delete Docker image: %s', name)
raise # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
logger.info('deleted Docker image: %s', name) |
def send_getheaders( self, prev_block_hash ):
"""
Request block headers from a particular block hash.
Will receive up to 2000 blocks, starting with the block *after*
the given block hash (prev_block_hash)
"""
getheaders = GetHeaders()
getheaders.add_block_hash( prev_block_hash )
log.debug("send getheaders")
self.send_message( getheaders ) | def function[send_getheaders, parameter[self, prev_block_hash]]:
constant[
Request block headers from a particular block hash.
Will receive up to 2000 blocks, starting with the block *after*
the given block hash (prev_block_hash)
]
variable[getheaders] assign[=] call[name[GetHeaders], parameter[]]
call[name[getheaders].add_block_hash, parameter[name[prev_block_hash]]]
call[name[log].debug, parameter[constant[send getheaders]]]
call[name[self].send_message, parameter[name[getheaders]]] | keyword[def] identifier[send_getheaders] ( identifier[self] , identifier[prev_block_hash] ):
literal[string]
identifier[getheaders] = identifier[GetHeaders] ()
identifier[getheaders] . identifier[add_block_hash] ( identifier[prev_block_hash] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[send_message] ( identifier[getheaders] ) | def send_getheaders(self, prev_block_hash):
"""
Request block headers from a particular block hash.
Will receive up to 2000 blocks, starting with the block *after*
the given block hash (prev_block_hash)
"""
getheaders = GetHeaders()
getheaders.add_block_hash(prev_block_hash)
log.debug('send getheaders')
self.send_message(getheaders) |
def prev_settlement(self):
"""
[float] 昨日结算价(期货专用)
"""
try:
return self._data['prev_settlement']
except (ValueError, KeyError):
pass
if self._prev_settlement is None:
trading_dt = Environment.get_instance().trading_dt
data_proxy = Environment.get_instance().data_proxy
self._prev_settlement = data_proxy.get_prev_settlement(self._instrument.order_book_id, trading_dt)
return self._prev_settlement | def function[prev_settlement, parameter[self]]:
constant[
[float] 昨日结算价(期货专用)
]
<ast.Try object at 0x7da1b21466e0>
if compare[name[self]._prev_settlement is constant[None]] begin[:]
variable[trading_dt] assign[=] call[name[Environment].get_instance, parameter[]].trading_dt
variable[data_proxy] assign[=] call[name[Environment].get_instance, parameter[]].data_proxy
name[self]._prev_settlement assign[=] call[name[data_proxy].get_prev_settlement, parameter[name[self]._instrument.order_book_id, name[trading_dt]]]
return[name[self]._prev_settlement] | keyword[def] identifier[prev_settlement] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_data] [ literal[string] ]
keyword[except] ( identifier[ValueError] , identifier[KeyError] ):
keyword[pass]
keyword[if] identifier[self] . identifier[_prev_settlement] keyword[is] keyword[None] :
identifier[trading_dt] = identifier[Environment] . identifier[get_instance] (). identifier[trading_dt]
identifier[data_proxy] = identifier[Environment] . identifier[get_instance] (). identifier[data_proxy]
identifier[self] . identifier[_prev_settlement] = identifier[data_proxy] . identifier[get_prev_settlement] ( identifier[self] . identifier[_instrument] . identifier[order_book_id] , identifier[trading_dt] )
keyword[return] identifier[self] . identifier[_prev_settlement] | def prev_settlement(self):
"""
[float] 昨日结算价(期货专用)
"""
try:
return self._data['prev_settlement'] # depends on [control=['try'], data=[]]
except (ValueError, KeyError):
pass # depends on [control=['except'], data=[]]
if self._prev_settlement is None:
trading_dt = Environment.get_instance().trading_dt
data_proxy = Environment.get_instance().data_proxy
self._prev_settlement = data_proxy.get_prev_settlement(self._instrument.order_book_id, trading_dt) # depends on [control=['if'], data=[]]
return self._prev_settlement |
def find_segment_first(self, *args, **kwargs):
"""Finds the first matching segment.
Same parameters as find_segments(), but only returns the first match, or None if no match is found."""
for m in self.find_segments(*args, **kwargs):
return m
return None | def function[find_segment_first, parameter[self]]:
constant[Finds the first matching segment.
Same parameters as find_segments(), but only returns the first match, or None if no match is found.]
for taget[name[m]] in starred[call[name[self].find_segments, parameter[<ast.Starred object at 0x7da2044c0640>]]] begin[:]
return[name[m]]
return[constant[None]] | keyword[def] identifier[find_segment_first] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[m] keyword[in] identifier[self] . identifier[find_segments] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[m]
keyword[return] keyword[None] | def find_segment_first(self, *args, **kwargs):
"""Finds the first matching segment.
Same parameters as find_segments(), but only returns the first match, or None if no match is found."""
for m in self.find_segments(*args, **kwargs):
return m # depends on [control=['for'], data=['m']]
return None |
def list_():
'''
Get a list of automatically running programs
CLI Example:
.. code-block:: bash
salt '*' autoruns.list
'''
autoruns = {}
# Find autoruns in registry
keys = ['HKLM\\Software\\Microsoft\\Windows\\CurrentVersion\\Run',
'HKLM\\Software\\Microsoft\\Windows\\CurrentVersion\\Run /reg:64',
'HKCU\\Software\\Microsoft\\Windows\\CurrentVersion\\Run'
]
for key in keys:
autoruns[key] = []
cmd = ['reg', 'query', key]
for line in __salt__['cmd.run'](cmd, python_shell=False).splitlines():
if line and line[0:4] != "HKEY" and line[0:5] != "ERROR": # Remove junk lines
autoruns[key].append(line)
# Find autoruns in user's startup folder
user_dir = 'C:\\Documents and Settings\\'
startup_dir = '\\Start Menu\\Programs\\Startup'
full_dirs = _get_dirs(user_dir, startup_dir)
if not full_dirs:
user_dir = 'C:\\Users\\'
startup_dir = '\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup'
full_dirs = _get_dirs(user_dir, startup_dir)
for full_dir in full_dirs:
files = os.listdir(full_dir)
autoruns[full_dir] = []
for single_file in files:
autoruns[full_dir].append(single_file)
return autoruns | def function[list_, parameter[]]:
constant[
Get a list of automatically running programs
CLI Example:
.. code-block:: bash
salt '*' autoruns.list
]
variable[autoruns] assign[=] dictionary[[], []]
variable[keys] assign[=] list[[<ast.Constant object at 0x7da18ede7f40>, <ast.Constant object at 0x7da18ede5ba0>, <ast.Constant object at 0x7da18ede44c0>]]
for taget[name[key]] in starred[name[keys]] begin[:]
call[name[autoruns]][name[key]] assign[=] list[[]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18ede6f80>, <ast.Constant object at 0x7da18ede5630>, <ast.Name object at 0x7da18ede5330>]]
for taget[name[line]] in starred[call[call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]].splitlines, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18ede6f20> begin[:]
call[call[name[autoruns]][name[key]].append, parameter[name[line]]]
variable[user_dir] assign[=] constant[C:\Documents and Settings\]
variable[startup_dir] assign[=] constant[\Start Menu\Programs\Startup]
variable[full_dirs] assign[=] call[name[_get_dirs], parameter[name[user_dir], name[startup_dir]]]
if <ast.UnaryOp object at 0x7da18ede61a0> begin[:]
variable[user_dir] assign[=] constant[C:\Users\]
variable[startup_dir] assign[=] constant[\AppData\Roaming\Microsoft\Windows\Start Menu\Programs\Startup]
variable[full_dirs] assign[=] call[name[_get_dirs], parameter[name[user_dir], name[startup_dir]]]
for taget[name[full_dir]] in starred[name[full_dirs]] begin[:]
variable[files] assign[=] call[name[os].listdir, parameter[name[full_dir]]]
call[name[autoruns]][name[full_dir]] assign[=] list[[]]
for taget[name[single_file]] in starred[name[files]] begin[:]
call[call[name[autoruns]][name[full_dir]].append, parameter[name[single_file]]]
return[name[autoruns]] | keyword[def] identifier[list_] ():
literal[string]
identifier[autoruns] ={}
identifier[keys] =[ literal[string] ,
literal[string] ,
literal[string]
]
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[autoruns] [ identifier[key] ]=[]
identifier[cmd] =[ literal[string] , literal[string] , identifier[key] ]
keyword[for] identifier[line] keyword[in] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ). identifier[splitlines] ():
keyword[if] identifier[line] keyword[and] identifier[line] [ literal[int] : literal[int] ]!= literal[string] keyword[and] identifier[line] [ literal[int] : literal[int] ]!= literal[string] :
identifier[autoruns] [ identifier[key] ]. identifier[append] ( identifier[line] )
identifier[user_dir] = literal[string]
identifier[startup_dir] = literal[string]
identifier[full_dirs] = identifier[_get_dirs] ( identifier[user_dir] , identifier[startup_dir] )
keyword[if] keyword[not] identifier[full_dirs] :
identifier[user_dir] = literal[string]
identifier[startup_dir] = literal[string]
identifier[full_dirs] = identifier[_get_dirs] ( identifier[user_dir] , identifier[startup_dir] )
keyword[for] identifier[full_dir] keyword[in] identifier[full_dirs] :
identifier[files] = identifier[os] . identifier[listdir] ( identifier[full_dir] )
identifier[autoruns] [ identifier[full_dir] ]=[]
keyword[for] identifier[single_file] keyword[in] identifier[files] :
identifier[autoruns] [ identifier[full_dir] ]. identifier[append] ( identifier[single_file] )
keyword[return] identifier[autoruns] | def list_():
"""
Get a list of automatically running programs
CLI Example:
.. code-block:: bash
salt '*' autoruns.list
"""
autoruns = {}
# Find autoruns in registry
keys = ['HKLM\\Software\\Microsoft\\Windows\\CurrentVersion\\Run', 'HKLM\\Software\\Microsoft\\Windows\\CurrentVersion\\Run /reg:64', 'HKCU\\Software\\Microsoft\\Windows\\CurrentVersion\\Run']
for key in keys:
autoruns[key] = []
cmd = ['reg', 'query', key]
for line in __salt__['cmd.run'](cmd, python_shell=False).splitlines():
if line and line[0:4] != 'HKEY' and (line[0:5] != 'ERROR'): # Remove junk lines
autoruns[key].append(line) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['key']]
# Find autoruns in user's startup folder
user_dir = 'C:\\Documents and Settings\\'
startup_dir = '\\Start Menu\\Programs\\Startup'
full_dirs = _get_dirs(user_dir, startup_dir)
if not full_dirs:
user_dir = 'C:\\Users\\'
startup_dir = '\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup'
full_dirs = _get_dirs(user_dir, startup_dir) # depends on [control=['if'], data=[]]
for full_dir in full_dirs:
files = os.listdir(full_dir)
autoruns[full_dir] = []
for single_file in files:
autoruns[full_dir].append(single_file) # depends on [control=['for'], data=['single_file']] # depends on [control=['for'], data=['full_dir']]
return autoruns |
def cublasZher(handle, uplo, n, alpha, x, incx, A, lda):
"""
Rank-1 operation on Hermitian matrix.
"""
status = _libcublas.cublasZher_v2(handle,
_CUBLAS_FILL_MODE[uplo],
n, alpha, int(x), incx, int(A), lda)
cublasCheckStatus(status) | def function[cublasZher, parameter[handle, uplo, n, alpha, x, incx, A, lda]]:
constant[
Rank-1 operation on Hermitian matrix.
]
variable[status] assign[=] call[name[_libcublas].cublasZher_v2, parameter[name[handle], call[name[_CUBLAS_FILL_MODE]][name[uplo]], name[n], name[alpha], call[name[int], parameter[name[x]]], name[incx], call[name[int], parameter[name[A]]], name[lda]]]
call[name[cublasCheckStatus], parameter[name[status]]] | keyword[def] identifier[cublasZher] ( identifier[handle] , identifier[uplo] , identifier[n] , identifier[alpha] , identifier[x] , identifier[incx] , identifier[A] , identifier[lda] ):
literal[string]
identifier[status] = identifier[_libcublas] . identifier[cublasZher_v2] ( identifier[handle] ,
identifier[_CUBLAS_FILL_MODE] [ identifier[uplo] ],
identifier[n] , identifier[alpha] , identifier[int] ( identifier[x] ), identifier[incx] , identifier[int] ( identifier[A] ), identifier[lda] )
identifier[cublasCheckStatus] ( identifier[status] ) | def cublasZher(handle, uplo, n, alpha, x, incx, A, lda):
"""
Rank-1 operation on Hermitian matrix.
"""
status = _libcublas.cublasZher_v2(handle, _CUBLAS_FILL_MODE[uplo], n, alpha, int(x), incx, int(A), lda)
cublasCheckStatus(status) |
def child(self, child):
"""
Equivalent to Child(self, next) but with some canonicalization
"""
if isinstance(self, This) or isinstance(self, Root):
return child
elif isinstance(child, This):
return self
elif isinstance(child, Root):
return child
else:
return Child(self, child) | def function[child, parameter[self, child]]:
constant[
Equivalent to Child(self, next) but with some canonicalization
]
if <ast.BoolOp object at 0x7da1b0bcaaa0> begin[:]
return[name[child]] | keyword[def] identifier[child] ( identifier[self] , identifier[child] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] , identifier[This] ) keyword[or] identifier[isinstance] ( identifier[self] , identifier[Root] ):
keyword[return] identifier[child]
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[This] ):
keyword[return] identifier[self]
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[Root] ):
keyword[return] identifier[child]
keyword[else] :
keyword[return] identifier[Child] ( identifier[self] , identifier[child] ) | def child(self, child):
"""
Equivalent to Child(self, next) but with some canonicalization
"""
if isinstance(self, This) or isinstance(self, Root):
return child # depends on [control=['if'], data=[]]
elif isinstance(child, This):
return self # depends on [control=['if'], data=[]]
elif isinstance(child, Root):
return child # depends on [control=['if'], data=[]]
else:
return Child(self, child) |
def _replace_star(fmt, size):
"""
Replace the `*` placeholder in a format string (fmt), so that
struct.calcsize(fmt) is equal to the given `size` using the format
following the placeholder.
Raises `ValueError` if number of `*` is larger than 1. If no `*`
in `fmt`, returns `fmt` without checking its size!
Examples
--------
>>> _replace_star('ii*fi', 40)
'ii7fi'
"""
n_stars = fmt.count('*')
if n_stars > 1:
raise ValueError("More than one `*` in format (%s)." % fmt)
if n_stars:
i = fmt.find('*')
s = struct.calcsize(fmt.replace(fmt[i:i + 2], ''))
n = old_div((size - s), struct.calcsize(fmt[i + 1]))
fmt = fmt.replace('*', str(n))
return fmt | def function[_replace_star, parameter[fmt, size]]:
constant[
Replace the `*` placeholder in a format string (fmt), so that
struct.calcsize(fmt) is equal to the given `size` using the format
following the placeholder.
Raises `ValueError` if number of `*` is larger than 1. If no `*`
in `fmt`, returns `fmt` without checking its size!
Examples
--------
>>> _replace_star('ii*fi', 40)
'ii7fi'
]
variable[n_stars] assign[=] call[name[fmt].count, parameter[constant[*]]]
if compare[name[n_stars] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0f51e10>
if name[n_stars] begin[:]
variable[i] assign[=] call[name[fmt].find, parameter[constant[*]]]
variable[s] assign[=] call[name[struct].calcsize, parameter[call[name[fmt].replace, parameter[call[name[fmt]][<ast.Slice object at 0x7da1b0f50220>], constant[]]]]]
variable[n] assign[=] call[name[old_div], parameter[binary_operation[name[size] - name[s]], call[name[struct].calcsize, parameter[call[name[fmt]][binary_operation[name[i] + constant[1]]]]]]]
variable[fmt] assign[=] call[name[fmt].replace, parameter[constant[*], call[name[str], parameter[name[n]]]]]
return[name[fmt]] | keyword[def] identifier[_replace_star] ( identifier[fmt] , identifier[size] ):
literal[string]
identifier[n_stars] = identifier[fmt] . identifier[count] ( literal[string] )
keyword[if] identifier[n_stars] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[fmt] )
keyword[if] identifier[n_stars] :
identifier[i] = identifier[fmt] . identifier[find] ( literal[string] )
identifier[s] = identifier[struct] . identifier[calcsize] ( identifier[fmt] . identifier[replace] ( identifier[fmt] [ identifier[i] : identifier[i] + literal[int] ], literal[string] ))
identifier[n] = identifier[old_div] (( identifier[size] - identifier[s] ), identifier[struct] . identifier[calcsize] ( identifier[fmt] [ identifier[i] + literal[int] ]))
identifier[fmt] = identifier[fmt] . identifier[replace] ( literal[string] , identifier[str] ( identifier[n] ))
keyword[return] identifier[fmt] | def _replace_star(fmt, size):
"""
Replace the `*` placeholder in a format string (fmt), so that
struct.calcsize(fmt) is equal to the given `size` using the format
following the placeholder.
Raises `ValueError` if number of `*` is larger than 1. If no `*`
in `fmt`, returns `fmt` without checking its size!
Examples
--------
>>> _replace_star('ii*fi', 40)
'ii7fi'
"""
n_stars = fmt.count('*')
if n_stars > 1:
raise ValueError('More than one `*` in format (%s).' % fmt) # depends on [control=['if'], data=[]]
if n_stars:
i = fmt.find('*')
s = struct.calcsize(fmt.replace(fmt[i:i + 2], ''))
n = old_div(size - s, struct.calcsize(fmt[i + 1]))
fmt = fmt.replace('*', str(n)) # depends on [control=['if'], data=[]]
return fmt |
def online_time_to_string(value, timeFormat, utcOffset=0):
"""Converts AGOL timestamp to formatted string.
Args:
value (float): A UTC timestamp as reported by AGOL (time in ms since Unix epoch * 1000)
timeFormat (str): Date/Time format string as parsed by :py:func:`datetime.strftime`.
utcOffset (int): Hours difference from UTC and desired output. Default is 0 (remain in UTC).
Returns:
str: A string representation of the timestamp.
Examples:
>>> arcresthelper.common.online_time_to_string(1457167261000.0, "%Y-%m-%d %H:%M:%S")
'2016-03-05 00:41:01'
>>> arcresthelper.common.online_time_to_string(731392515000.0, '%m/%d/%Y %H:%M:%S', -8) # PST is UTC-8:00
'03/05/1993 12:35:15'
See Also:
:py:func:`local_time_to_online` for converting a :py:class:`datetime.datetime` object to AGOL timestamp
"""
try:
return datetime.datetime.fromtimestamp(value/1000 + utcOffset*3600).strftime(timeFormat)
except:
line, filename, synerror = trace()
raise ArcRestHelperError({
"function": "online_time_to_string",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
pass | def function[online_time_to_string, parameter[value, timeFormat, utcOffset]]:
constant[Converts AGOL timestamp to formatted string.
Args:
value (float): A UTC timestamp as reported by AGOL (time in ms since Unix epoch * 1000)
timeFormat (str): Date/Time format string as parsed by :py:func:`datetime.strftime`.
utcOffset (int): Hours difference from UTC and desired output. Default is 0 (remain in UTC).
Returns:
str: A string representation of the timestamp.
Examples:
>>> arcresthelper.common.online_time_to_string(1457167261000.0, "%Y-%m-%d %H:%M:%S")
'2016-03-05 00:41:01'
>>> arcresthelper.common.online_time_to_string(731392515000.0, '%m/%d/%Y %H:%M:%S', -8) # PST is UTC-8:00
'03/05/1993 12:35:15'
See Also:
:py:func:`local_time_to_online` for converting a :py:class:`datetime.datetime` object to AGOL timestamp
]
<ast.Try object at 0x7da2041d9e70> | keyword[def] identifier[online_time_to_string] ( identifier[value] , identifier[timeFormat] , identifier[utcOffset] = literal[int] ):
literal[string]
keyword[try] :
keyword[return] identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[value] / literal[int] + identifier[utcOffset] * literal[int] ). identifier[strftime] ( identifier[timeFormat] )
keyword[except] :
identifier[line] , identifier[filename] , identifier[synerror] = identifier[trace] ()
keyword[raise] identifier[ArcRestHelperError] ({
literal[string] : literal[string] ,
literal[string] : identifier[line] ,
literal[string] : identifier[filename] ,
literal[string] : identifier[synerror] ,
}
)
keyword[finally] :
keyword[pass] | def online_time_to_string(value, timeFormat, utcOffset=0):
"""Converts AGOL timestamp to formatted string.
Args:
value (float): A UTC timestamp as reported by AGOL (time in ms since Unix epoch * 1000)
timeFormat (str): Date/Time format string as parsed by :py:func:`datetime.strftime`.
utcOffset (int): Hours difference from UTC and desired output. Default is 0 (remain in UTC).
Returns:
str: A string representation of the timestamp.
Examples:
>>> arcresthelper.common.online_time_to_string(1457167261000.0, "%Y-%m-%d %H:%M:%S")
'2016-03-05 00:41:01'
>>> arcresthelper.common.online_time_to_string(731392515000.0, '%m/%d/%Y %H:%M:%S', -8) # PST is UTC-8:00
'03/05/1993 12:35:15'
See Also:
:py:func:`local_time_to_online` for converting a :py:class:`datetime.datetime` object to AGOL timestamp
"""
try:
return datetime.datetime.fromtimestamp(value / 1000 + utcOffset * 3600).strftime(timeFormat) # depends on [control=['try'], data=[]]
except:
(line, filename, synerror) = trace()
raise ArcRestHelperError({'function': 'online_time_to_string', 'line': line, 'filename': filename, 'synerror': synerror}) # depends on [control=['except'], data=[]]
finally:
pass |
def _take_values(self, item: Parameterized) -> DictBasicType:
"""Uses super()._take_values() method and removes autoflow cache in-place.
:param item: GPflow parameterized object.
:return: dictionary snapshot of the parameter object."""
values = super()._take_values(item)
values = {k: v for k, v in values.items() if not k.startswith(AutoFlow.__autoflow_prefix__)}
return values | def function[_take_values, parameter[self, item]]:
constant[Uses super()._take_values() method and removes autoflow cache in-place.
:param item: GPflow parameterized object.
:return: dictionary snapshot of the parameter object.]
variable[values] assign[=] call[call[name[super], parameter[]]._take_values, parameter[name[item]]]
variable[values] assign[=] <ast.DictComp object at 0x7da1b1cefa90>
return[name[values]] | keyword[def] identifier[_take_values] ( identifier[self] , identifier[item] : identifier[Parameterized] )-> identifier[DictBasicType] :
literal[string]
identifier[values] = identifier[super] (). identifier[_take_values] ( identifier[item] )
identifier[values] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[values] . identifier[items] () keyword[if] keyword[not] identifier[k] . identifier[startswith] ( identifier[AutoFlow] . identifier[__autoflow_prefix__] )}
keyword[return] identifier[values] | def _take_values(self, item: Parameterized) -> DictBasicType:
"""Uses super()._take_values() method and removes autoflow cache in-place.
:param item: GPflow parameterized object.
:return: dictionary snapshot of the parameter object."""
values = super()._take_values(item)
values = {k: v for (k, v) in values.items() if not k.startswith(AutoFlow.__autoflow_prefix__)}
return values |
def __get_gui_handle(self, root_dir):
""" get the filepath and filehandle to the .env file for the environment """
gui_path = os.path.join(root_dir, '.gui')
fh = open(gui_path, "w+")
return (gui_path, fh) | def function[__get_gui_handle, parameter[self, root_dir]]:
constant[ get the filepath and filehandle to the .env file for the environment ]
variable[gui_path] assign[=] call[name[os].path.join, parameter[name[root_dir], constant[.gui]]]
variable[fh] assign[=] call[name[open], parameter[name[gui_path], constant[w+]]]
return[tuple[[<ast.Name object at 0x7da204621510>, <ast.Name object at 0x7da204623e20>]]] | keyword[def] identifier[__get_gui_handle] ( identifier[self] , identifier[root_dir] ):
literal[string]
identifier[gui_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[root_dir] , literal[string] )
identifier[fh] = identifier[open] ( identifier[gui_path] , literal[string] )
keyword[return] ( identifier[gui_path] , identifier[fh] ) | def __get_gui_handle(self, root_dir):
""" get the filepath and filehandle to the .env file for the environment """
gui_path = os.path.join(root_dir, '.gui')
fh = open(gui_path, 'w+')
return (gui_path, fh) |
def raw(self, command, arguments, queue=None, max_time=None, stream=False, tags=None, id=None):
"""
Implements the low level command call, this needs to build the command structure
and push it on the correct queue.
:param command: Command name to execute supported by the node (ex: core.system, info.cpu, etc...)
check documentation for list of built in commands
:param arguments: A dict of required command arguments depends on the command name.
:param queue: command queue (commands on the same queue are executed sequentially)
:param max_time: kill job server side if it exceeded this amount of seconds
:param stream: If True, process stdout and stderr are pushed to a special queue (stream:<id>) so
client can stream output
:param tags: job tags
:param id: job id. Generated if not supplied
:return: Response object
"""
args = {
'container': self._container,
'command': {
'command': command,
'arguments': arguments,
'queue': queue,
'max_time': max_time,
'stream': stream,
'tags': tags,
'id': id,
},
}
# check input
self._raw_chk.check(args)
response = self._client.raw('corex.dispatch', args)
result = response.get()
if result.state != 'SUCCESS':
raise RuntimeError('failed to dispatch command to container: %s' % result.data)
cmd_id = json.loads(result.data)
return self._client.response_for(cmd_id) | def function[raw, parameter[self, command, arguments, queue, max_time, stream, tags, id]]:
constant[
Implements the low level command call, this needs to build the command structure
and push it on the correct queue.
:param command: Command name to execute supported by the node (ex: core.system, info.cpu, etc...)
check documentation for list of built in commands
:param arguments: A dict of required command arguments depends on the command name.
:param queue: command queue (commands on the same queue are executed sequentially)
:param max_time: kill job server side if it exceeded this amount of seconds
:param stream: If True, process stdout and stderr are pushed to a special queue (stream:<id>) so
client can stream output
:param tags: job tags
:param id: job id. Generated if not supplied
:return: Response object
]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b05bc940>, <ast.Constant object at 0x7da1b05bc280>], [<ast.Attribute object at 0x7da1b05bfd30>, <ast.Dict object at 0x7da1b05bd180>]]
call[name[self]._raw_chk.check, parameter[name[args]]]
variable[response] assign[=] call[name[self]._client.raw, parameter[constant[corex.dispatch], name[args]]]
variable[result] assign[=] call[name[response].get, parameter[]]
if compare[name[result].state not_equal[!=] constant[SUCCESS]] begin[:]
<ast.Raise object at 0x7da1b0477a00>
variable[cmd_id] assign[=] call[name[json].loads, parameter[name[result].data]]
return[call[name[self]._client.response_for, parameter[name[cmd_id]]]] | keyword[def] identifier[raw] ( identifier[self] , identifier[command] , identifier[arguments] , identifier[queue] = keyword[None] , identifier[max_time] = keyword[None] , identifier[stream] = keyword[False] , identifier[tags] = keyword[None] , identifier[id] = keyword[None] ):
literal[string]
identifier[args] ={
literal[string] : identifier[self] . identifier[_container] ,
literal[string] :{
literal[string] : identifier[command] ,
literal[string] : identifier[arguments] ,
literal[string] : identifier[queue] ,
literal[string] : identifier[max_time] ,
literal[string] : identifier[stream] ,
literal[string] : identifier[tags] ,
literal[string] : identifier[id] ,
},
}
identifier[self] . identifier[_raw_chk] . identifier[check] ( identifier[args] )
identifier[response] = identifier[self] . identifier[_client] . identifier[raw] ( literal[string] , identifier[args] )
identifier[result] = identifier[response] . identifier[get] ()
keyword[if] identifier[result] . identifier[state] != literal[string] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[result] . identifier[data] )
identifier[cmd_id] = identifier[json] . identifier[loads] ( identifier[result] . identifier[data] )
keyword[return] identifier[self] . identifier[_client] . identifier[response_for] ( identifier[cmd_id] ) | def raw(self, command, arguments, queue=None, max_time=None, stream=False, tags=None, id=None):
"""
Implements the low level command call, this needs to build the command structure
and push it on the correct queue.
:param command: Command name to execute supported by the node (ex: core.system, info.cpu, etc...)
check documentation for list of built in commands
:param arguments: A dict of required command arguments depends on the command name.
:param queue: command queue (commands on the same queue are executed sequentially)
:param max_time: kill job server side if it exceeded this amount of seconds
:param stream: If True, process stdout and stderr are pushed to a special queue (stream:<id>) so
client can stream output
:param tags: job tags
:param id: job id. Generated if not supplied
:return: Response object
"""
args = {'container': self._container, 'command': {'command': command, 'arguments': arguments, 'queue': queue, 'max_time': max_time, 'stream': stream, 'tags': tags, 'id': id}}
# check input
self._raw_chk.check(args)
response = self._client.raw('corex.dispatch', args)
result = response.get()
if result.state != 'SUCCESS':
raise RuntimeError('failed to dispatch command to container: %s' % result.data) # depends on [control=['if'], data=[]]
cmd_id = json.loads(result.data)
return self._client.response_for(cmd_id) |
def utc_book_close_time(self):
"""
The book close time in utc.
"""
tz = pytz.timezone(self.timezone)
close_time = datetime.datetime.strptime(self.close_time, '%H:%M:%S').time()
close_time = tz.localize(datetime.datetime.combine(datetime.datetime.now(tz), close_time))
return close_time.astimezone(pytz.utc).time() | def function[utc_book_close_time, parameter[self]]:
constant[
The book close time in utc.
]
variable[tz] assign[=] call[name[pytz].timezone, parameter[name[self].timezone]]
variable[close_time] assign[=] call[call[name[datetime].datetime.strptime, parameter[name[self].close_time, constant[%H:%M:%S]]].time, parameter[]]
variable[close_time] assign[=] call[name[tz].localize, parameter[call[name[datetime].datetime.combine, parameter[call[name[datetime].datetime.now, parameter[name[tz]]], name[close_time]]]]]
return[call[call[name[close_time].astimezone, parameter[name[pytz].utc]].time, parameter[]]] | keyword[def] identifier[utc_book_close_time] ( identifier[self] ):
literal[string]
identifier[tz] = identifier[pytz] . identifier[timezone] ( identifier[self] . identifier[timezone] )
identifier[close_time] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[self] . identifier[close_time] , literal[string] ). identifier[time] ()
identifier[close_time] = identifier[tz] . identifier[localize] ( identifier[datetime] . identifier[datetime] . identifier[combine] ( identifier[datetime] . identifier[datetime] . identifier[now] ( identifier[tz] ), identifier[close_time] ))
keyword[return] identifier[close_time] . identifier[astimezone] ( identifier[pytz] . identifier[utc] ). identifier[time] () | def utc_book_close_time(self):
"""
The book close time in utc.
"""
tz = pytz.timezone(self.timezone)
close_time = datetime.datetime.strptime(self.close_time, '%H:%M:%S').time()
close_time = tz.localize(datetime.datetime.combine(datetime.datetime.now(tz), close_time))
return close_time.astimezone(pytz.utc).time() |
def check_power(self):
"""Returns the power state of the smart power strip."""
state = self.check_power_raw()
data = {}
data['s1'] = bool(state & 0x01)
data['s2'] = bool(state & 0x02)
data['s3'] = bool(state & 0x04)
data['s4'] = bool(state & 0x08)
return data | def function[check_power, parameter[self]]:
constant[Returns the power state of the smart power strip.]
variable[state] assign[=] call[name[self].check_power_raw, parameter[]]
variable[data] assign[=] dictionary[[], []]
call[name[data]][constant[s1]] assign[=] call[name[bool], parameter[binary_operation[name[state] <ast.BitAnd object at 0x7da2590d6b60> constant[1]]]]
call[name[data]][constant[s2]] assign[=] call[name[bool], parameter[binary_operation[name[state] <ast.BitAnd object at 0x7da2590d6b60> constant[2]]]]
call[name[data]][constant[s3]] assign[=] call[name[bool], parameter[binary_operation[name[state] <ast.BitAnd object at 0x7da2590d6b60> constant[4]]]]
call[name[data]][constant[s4]] assign[=] call[name[bool], parameter[binary_operation[name[state] <ast.BitAnd object at 0x7da2590d6b60> constant[8]]]]
return[name[data]] | keyword[def] identifier[check_power] ( identifier[self] ):
literal[string]
identifier[state] = identifier[self] . identifier[check_power_raw] ()
identifier[data] ={}
identifier[data] [ literal[string] ]= identifier[bool] ( identifier[state] & literal[int] )
identifier[data] [ literal[string] ]= identifier[bool] ( identifier[state] & literal[int] )
identifier[data] [ literal[string] ]= identifier[bool] ( identifier[state] & literal[int] )
identifier[data] [ literal[string] ]= identifier[bool] ( identifier[state] & literal[int] )
keyword[return] identifier[data] | def check_power(self):
"""Returns the power state of the smart power strip."""
state = self.check_power_raw()
data = {}
data['s1'] = bool(state & 1)
data['s2'] = bool(state & 2)
data['s3'] = bool(state & 4)
data['s4'] = bool(state & 8)
return data |
def command_line(argv):
"""Instantiate an editor and process arguments.
Optional argument:
- processed_paths: paths processed are appended to the list.
"""
arguments = parse_command_line(argv)
if arguments.generate:
generate_fixer_file(arguments.generate)
paths = edit_files(arguments.patterns,
expressions=arguments.expressions,
functions=arguments.functions,
executables=arguments.executables,
start_dirs=arguments.start_dirs,
max_depth=arguments.max_depth,
dry_run=arguments.dry_run,
output=arguments.output,
encoding=arguments.encoding,
newline=arguments.newline)
# If the output is not sys.stdout, we need to close it because
# argparse.FileType does not do it for us.
is_sys = arguments.output in [sys.stdout, sys.stderr]
if not is_sys and isinstance(arguments.output, io.IOBase):
arguments.output.close()
return paths | def function[command_line, parameter[argv]]:
constant[Instantiate an editor and process arguments.
Optional argument:
- processed_paths: paths processed are appended to the list.
]
variable[arguments] assign[=] call[name[parse_command_line], parameter[name[argv]]]
if name[arguments].generate begin[:]
call[name[generate_fixer_file], parameter[name[arguments].generate]]
variable[paths] assign[=] call[name[edit_files], parameter[name[arguments].patterns]]
variable[is_sys] assign[=] compare[name[arguments].output in list[[<ast.Attribute object at 0x7da1b0da3b20>, <ast.Attribute object at 0x7da1b0da2a40>]]]
if <ast.BoolOp object at 0x7da1b0da3940> begin[:]
call[name[arguments].output.close, parameter[]]
return[name[paths]] | keyword[def] identifier[command_line] ( identifier[argv] ):
literal[string]
identifier[arguments] = identifier[parse_command_line] ( identifier[argv] )
keyword[if] identifier[arguments] . identifier[generate] :
identifier[generate_fixer_file] ( identifier[arguments] . identifier[generate] )
identifier[paths] = identifier[edit_files] ( identifier[arguments] . identifier[patterns] ,
identifier[expressions] = identifier[arguments] . identifier[expressions] ,
identifier[functions] = identifier[arguments] . identifier[functions] ,
identifier[executables] = identifier[arguments] . identifier[executables] ,
identifier[start_dirs] = identifier[arguments] . identifier[start_dirs] ,
identifier[max_depth] = identifier[arguments] . identifier[max_depth] ,
identifier[dry_run] = identifier[arguments] . identifier[dry_run] ,
identifier[output] = identifier[arguments] . identifier[output] ,
identifier[encoding] = identifier[arguments] . identifier[encoding] ,
identifier[newline] = identifier[arguments] . identifier[newline] )
identifier[is_sys] = identifier[arguments] . identifier[output] keyword[in] [ identifier[sys] . identifier[stdout] , identifier[sys] . identifier[stderr] ]
keyword[if] keyword[not] identifier[is_sys] keyword[and] identifier[isinstance] ( identifier[arguments] . identifier[output] , identifier[io] . identifier[IOBase] ):
identifier[arguments] . identifier[output] . identifier[close] ()
keyword[return] identifier[paths] | def command_line(argv):
"""Instantiate an editor and process arguments.
Optional argument:
- processed_paths: paths processed are appended to the list.
"""
arguments = parse_command_line(argv)
if arguments.generate:
generate_fixer_file(arguments.generate) # depends on [control=['if'], data=[]]
paths = edit_files(arguments.patterns, expressions=arguments.expressions, functions=arguments.functions, executables=arguments.executables, start_dirs=arguments.start_dirs, max_depth=arguments.max_depth, dry_run=arguments.dry_run, output=arguments.output, encoding=arguments.encoding, newline=arguments.newline)
# If the output is not sys.stdout, we need to close it because
# argparse.FileType does not do it for us.
is_sys = arguments.output in [sys.stdout, sys.stderr]
if not is_sys and isinstance(arguments.output, io.IOBase):
arguments.output.close() # depends on [control=['if'], data=[]]
return paths |
def function_scoping(self, node, frame, children=None,
find_special=True):
"""In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
"""
# we have to iterate twice over it, make sure that works
if children is None:
children = node.iter_child_nodes()
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children, hard_scope=True)
# variables that are undeclared (accessed before declaration) and
# declared locally *and* part of an outside scope raise a template
# assertion error. Reason: we can't generate reasonable code from
# it without aliasing all the variables.
# this could be fixed in Python 3 where we have the nonlocal
# keyword or if we switch to bytecode generation
overriden_closure_vars = (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared &
(func_frame.identifiers.declared_locally |
func_frame.identifiers.declared_parameter)
)
if overriden_closure_vars:
self.fail('It\'s not possible to set and access variables '
'derived from an outer scope! (affects: %s)' %
', '.join(sorted(overriden_closure_vars)), node.lineno)
# remove variables from a closure from the frame's undeclared
# identifiers.
func_frame.identifiers.undeclared -= (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared
)
# no special variables for this scope, abort early
if not find_special:
return func_frame
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = ['l_' + x.name for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller')
if 'kwargs' in undeclared:
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs')
if 'varargs' in undeclared:
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs')
return func_frame | def function[function_scoping, parameter[self, node, frame, children, find_special]]:
constant[In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
]
if compare[name[children] is constant[None]] begin[:]
variable[children] assign[=] call[name[node].iter_child_nodes, parameter[]]
variable[children] assign[=] call[name[list], parameter[name[children]]]
variable[func_frame] assign[=] call[name[frame].inner, parameter[]]
call[name[func_frame].inspect, parameter[name[children]]]
variable[overriden_closure_vars] assign[=] binary_operation[binary_operation[name[func_frame].identifiers.undeclared <ast.BitAnd object at 0x7da2590d6b60> name[func_frame].identifiers.declared] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[name[func_frame].identifiers.declared_locally <ast.BitOr object at 0x7da2590d6aa0> name[func_frame].identifiers.declared_parameter]]
if name[overriden_closure_vars] begin[:]
call[name[self].fail, parameter[binary_operation[constant[It's not possible to set and access variables derived from an outer scope! (affects: %s)] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[call[name[sorted], parameter[name[overriden_closure_vars]]]]]], name[node].lineno]]
<ast.AugAssign object at 0x7da1b1f0bc70>
if <ast.UnaryOp object at 0x7da1b1f0ab30> begin[:]
return[name[func_frame]]
name[func_frame].accesses_kwargs assign[=] constant[False]
name[func_frame].accesses_varargs assign[=] constant[False]
name[func_frame].accesses_caller assign[=] constant[False]
name[func_frame].arguments assign[=] <ast.ListComp object at 0x7da1b1f0bdc0>
variable[undeclared] assign[=] call[name[find_undeclared], parameter[name[children], tuple[[<ast.Constant object at 0x7da1b1f0b100>, <ast.Constant object at 0x7da1b1f0aad0>, <ast.Constant object at 0x7da1b1f0b040>]]]]
if compare[constant[caller] in name[undeclared]] begin[:]
name[func_frame].accesses_caller assign[=] constant[True]
call[name[func_frame].identifiers.add_special, parameter[constant[caller]]]
call[name[args].append, parameter[constant[l_caller]]]
if compare[constant[kwargs] in name[undeclared]] begin[:]
name[func_frame].accesses_kwargs assign[=] constant[True]
call[name[func_frame].identifiers.add_special, parameter[constant[kwargs]]]
call[name[args].append, parameter[constant[l_kwargs]]]
if compare[constant[varargs] in name[undeclared]] begin[:]
name[func_frame].accesses_varargs assign[=] constant[True]
call[name[func_frame].identifiers.add_special, parameter[constant[varargs]]]
call[name[args].append, parameter[constant[l_varargs]]]
return[name[func_frame]] | keyword[def] identifier[function_scoping] ( identifier[self] , identifier[node] , identifier[frame] , identifier[children] = keyword[None] ,
identifier[find_special] = keyword[True] ):
literal[string]
keyword[if] identifier[children] keyword[is] keyword[None] :
identifier[children] = identifier[node] . identifier[iter_child_nodes] ()
identifier[children] = identifier[list] ( identifier[children] )
identifier[func_frame] = identifier[frame] . identifier[inner] ()
identifier[func_frame] . identifier[inspect] ( identifier[children] , identifier[hard_scope] = keyword[True] )
identifier[overriden_closure_vars] =(
identifier[func_frame] . identifier[identifiers] . identifier[undeclared] &
identifier[func_frame] . identifier[identifiers] . identifier[declared] &
( identifier[func_frame] . identifier[identifiers] . identifier[declared_locally] |
identifier[func_frame] . identifier[identifiers] . identifier[declared_parameter] )
)
keyword[if] identifier[overriden_closure_vars] :
identifier[self] . identifier[fail] ( literal[string]
literal[string] %
literal[string] . identifier[join] ( identifier[sorted] ( identifier[overriden_closure_vars] )), identifier[node] . identifier[lineno] )
identifier[func_frame] . identifier[identifiers] . identifier[undeclared] -=(
identifier[func_frame] . identifier[identifiers] . identifier[undeclared] &
identifier[func_frame] . identifier[identifiers] . identifier[declared]
)
keyword[if] keyword[not] identifier[find_special] :
keyword[return] identifier[func_frame]
identifier[func_frame] . identifier[accesses_kwargs] = keyword[False]
identifier[func_frame] . identifier[accesses_varargs] = keyword[False]
identifier[func_frame] . identifier[accesses_caller] = keyword[False]
identifier[func_frame] . identifier[arguments] = identifier[args] =[ literal[string] + identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[node] . identifier[args] ]
identifier[undeclared] = identifier[find_undeclared] ( identifier[children] ,( literal[string] , literal[string] , literal[string] ))
keyword[if] literal[string] keyword[in] identifier[undeclared] :
identifier[func_frame] . identifier[accesses_caller] = keyword[True]
identifier[func_frame] . identifier[identifiers] . identifier[add_special] ( literal[string] )
identifier[args] . identifier[append] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[undeclared] :
identifier[func_frame] . identifier[accesses_kwargs] = keyword[True]
identifier[func_frame] . identifier[identifiers] . identifier[add_special] ( literal[string] )
identifier[args] . identifier[append] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[undeclared] :
identifier[func_frame] . identifier[accesses_varargs] = keyword[True]
identifier[func_frame] . identifier[identifiers] . identifier[add_special] ( literal[string] )
identifier[args] . identifier[append] ( literal[string] )
keyword[return] identifier[func_frame] | def function_scoping(self, node, frame, children=None, find_special=True):
"""In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
"""
# we have to iterate twice over it, make sure that works
if children is None:
children = node.iter_child_nodes() # depends on [control=['if'], data=['children']]
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children, hard_scope=True)
# variables that are undeclared (accessed before declaration) and
# declared locally *and* part of an outside scope raise a template
# assertion error. Reason: we can't generate reasonable code from
# it without aliasing all the variables.
# this could be fixed in Python 3 where we have the nonlocal
# keyword or if we switch to bytecode generation
overriden_closure_vars = func_frame.identifiers.undeclared & func_frame.identifiers.declared & (func_frame.identifiers.declared_locally | func_frame.identifiers.declared_parameter)
if overriden_closure_vars:
self.fail("It's not possible to set and access variables derived from an outer scope! (affects: %s)" % ', '.join(sorted(overriden_closure_vars)), node.lineno) # depends on [control=['if'], data=[]]
# remove variables from a closure from the frame's undeclared
# identifiers.
func_frame.identifiers.undeclared -= func_frame.identifiers.undeclared & func_frame.identifiers.declared
# no special variables for this scope, abort early
if not find_special:
return func_frame # depends on [control=['if'], data=[]]
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = ['l_' + x.name for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller') # depends on [control=['if'], data=[]]
if 'kwargs' in undeclared:
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs') # depends on [control=['if'], data=[]]
if 'varargs' in undeclared:
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs') # depends on [control=['if'], data=[]]
return func_frame |
def enable_caching(self):
"Enable the cache of this object."
self.caching_enabled = True
for c in self.values():
c.enable_cacher() | def function[enable_caching, parameter[self]]:
constant[Enable the cache of this object.]
name[self].caching_enabled assign[=] constant[True]
for taget[name[c]] in starred[call[name[self].values, parameter[]]] begin[:]
call[name[c].enable_cacher, parameter[]] | keyword[def] identifier[enable_caching] ( identifier[self] ):
literal[string]
identifier[self] . identifier[caching_enabled] = keyword[True]
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[values] ():
identifier[c] . identifier[enable_cacher] () | def enable_caching(self):
"""Enable the cache of this object."""
self.caching_enabled = True
for c in self.values():
c.enable_cacher() # depends on [control=['for'], data=['c']] |
def xunit(self):
"""Unit of x-axis index
:type: `~astropy.units.Unit`
"""
try:
return self._dx.unit
except AttributeError:
try:
return self._x0.unit
except AttributeError:
return self._default_xunit | def function[xunit, parameter[self]]:
constant[Unit of x-axis index
:type: `~astropy.units.Unit`
]
<ast.Try object at 0x7da18f09ca60> | keyword[def] identifier[xunit] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_dx] . identifier[unit]
keyword[except] identifier[AttributeError] :
keyword[try] :
keyword[return] identifier[self] . identifier[_x0] . identifier[unit]
keyword[except] identifier[AttributeError] :
keyword[return] identifier[self] . identifier[_default_xunit] | def xunit(self):
"""Unit of x-axis index
:type: `~astropy.units.Unit`
"""
try:
return self._dx.unit # depends on [control=['try'], data=[]]
except AttributeError:
try:
return self._x0.unit # depends on [control=['try'], data=[]]
except AttributeError:
return self._default_xunit # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] |
def fromtama(file, coltype = lal.LIGOTimeGPS):
"""
Read a segmentlist from the file object file containing TAMA
locked-segments data. Parsing stops on the first line that cannot
be parsed (which is consumed). The segmentlist will be created
with segments whose boundaries are of type coltype, which should
raise ValueError if it cannot convert its string argument.
NOTE: TAMA locked-segments files contain non-integer start and end
times, so the default column type is set to LIGOTimeGPS.
NOTE: the output is a segmentlist as described by the file; if
the segments in the input file are not coalesced or out of order,
then thusly shall be the output of this function. It is
recommended that this function's output be coalesced before use.
"""
segmentpat = re.compile(r"\A\s*\S+\s+\S+\s+\S+\s+([\d.+-eE]+)\s+([\d.+-eE]+)")
l = segments.segmentlist()
for line in file:
try:
[tokens] = segmentpat.findall(line)
l.append(segments.segment(map(coltype, tokens[0:2])))
except ValueError:
break
return l | def function[fromtama, parameter[file, coltype]]:
constant[
Read a segmentlist from the file object file containing TAMA
locked-segments data. Parsing stops on the first line that cannot
be parsed (which is consumed). The segmentlist will be created
with segments whose boundaries are of type coltype, which should
raise ValueError if it cannot convert its string argument.
NOTE: TAMA locked-segments files contain non-integer start and end
times, so the default column type is set to LIGOTimeGPS.
NOTE: the output is a segmentlist as described by the file; if
the segments in the input file are not coalesced or out of order,
then thusly shall be the output of this function. It is
recommended that this function's output be coalesced before use.
]
variable[segmentpat] assign[=] call[name[re].compile, parameter[constant[\A\s*\S+\s+\S+\s+\S+\s+([\d.+-eE]+)\s+([\d.+-eE]+)]]]
variable[l] assign[=] call[name[segments].segmentlist, parameter[]]
for taget[name[line]] in starred[name[file]] begin[:]
<ast.Try object at 0x7da18dc06950>
return[name[l]] | keyword[def] identifier[fromtama] ( identifier[file] , identifier[coltype] = identifier[lal] . identifier[LIGOTimeGPS] ):
literal[string]
identifier[segmentpat] = identifier[re] . identifier[compile] ( literal[string] )
identifier[l] = identifier[segments] . identifier[segmentlist] ()
keyword[for] identifier[line] keyword[in] identifier[file] :
keyword[try] :
[ identifier[tokens] ]= identifier[segmentpat] . identifier[findall] ( identifier[line] )
identifier[l] . identifier[append] ( identifier[segments] . identifier[segment] ( identifier[map] ( identifier[coltype] , identifier[tokens] [ literal[int] : literal[int] ])))
keyword[except] identifier[ValueError] :
keyword[break]
keyword[return] identifier[l] | def fromtama(file, coltype=lal.LIGOTimeGPS):
"""
Read a segmentlist from the file object file containing TAMA
locked-segments data. Parsing stops on the first line that cannot
be parsed (which is consumed). The segmentlist will be created
with segments whose boundaries are of type coltype, which should
raise ValueError if it cannot convert its string argument.
NOTE: TAMA locked-segments files contain non-integer start and end
times, so the default column type is set to LIGOTimeGPS.
NOTE: the output is a segmentlist as described by the file; if
the segments in the input file are not coalesced or out of order,
then thusly shall be the output of this function. It is
recommended that this function's output be coalesced before use.
"""
segmentpat = re.compile('\\A\\s*\\S+\\s+\\S+\\s+\\S+\\s+([\\d.+-eE]+)\\s+([\\d.+-eE]+)')
l = segments.segmentlist()
for line in file:
try:
[tokens] = segmentpat.findall(line)
l.append(segments.segment(map(coltype, tokens[0:2]))) # depends on [control=['try'], data=[]]
except ValueError:
break # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['line']]
return l |
def get_ntp_servers(self):
"""Implementation of get_ntp_servers for IOS.
Returns the NTP servers configuration as dictionary.
The keys of the dictionary represent the IP Addresses of the servers.
Inner dictionaries do not have yet any available keys.
Example::
{
'192.168.0.1': {},
'17.72.148.53': {},
'37.187.56.220': {},
'162.158.20.18': {}
}
"""
ntp_servers = {}
command = "show run | include ntp server"
output = self._send_command(command)
for line in output.splitlines():
split_line = line.split()
if "vrf" == split_line[2]:
ntp_servers[split_line[4]] = {}
else:
ntp_servers[split_line[2]] = {}
return ntp_servers | def function[get_ntp_servers, parameter[self]]:
constant[Implementation of get_ntp_servers for IOS.
Returns the NTP servers configuration as dictionary.
The keys of the dictionary represent the IP Addresses of the servers.
Inner dictionaries do not have yet any available keys.
Example::
{
'192.168.0.1': {},
'17.72.148.53': {},
'37.187.56.220': {},
'162.158.20.18': {}
}
]
variable[ntp_servers] assign[=] dictionary[[], []]
variable[command] assign[=] constant[show run | include ntp server]
variable[output] assign[=] call[name[self]._send_command, parameter[name[command]]]
for taget[name[line]] in starred[call[name[output].splitlines, parameter[]]] begin[:]
variable[split_line] assign[=] call[name[line].split, parameter[]]
if compare[constant[vrf] equal[==] call[name[split_line]][constant[2]]] begin[:]
call[name[ntp_servers]][call[name[split_line]][constant[4]]] assign[=] dictionary[[], []]
return[name[ntp_servers]] | keyword[def] identifier[get_ntp_servers] ( identifier[self] ):
literal[string]
identifier[ntp_servers] ={}
identifier[command] = literal[string]
identifier[output] = identifier[self] . identifier[_send_command] ( identifier[command] )
keyword[for] identifier[line] keyword[in] identifier[output] . identifier[splitlines] ():
identifier[split_line] = identifier[line] . identifier[split] ()
keyword[if] literal[string] == identifier[split_line] [ literal[int] ]:
identifier[ntp_servers] [ identifier[split_line] [ literal[int] ]]={}
keyword[else] :
identifier[ntp_servers] [ identifier[split_line] [ literal[int] ]]={}
keyword[return] identifier[ntp_servers] | def get_ntp_servers(self):
"""Implementation of get_ntp_servers for IOS.
Returns the NTP servers configuration as dictionary.
The keys of the dictionary represent the IP Addresses of the servers.
Inner dictionaries do not have yet any available keys.
Example::
{
'192.168.0.1': {},
'17.72.148.53': {},
'37.187.56.220': {},
'162.158.20.18': {}
}
"""
ntp_servers = {}
command = 'show run | include ntp server'
output = self._send_command(command)
for line in output.splitlines():
split_line = line.split()
if 'vrf' == split_line[2]:
ntp_servers[split_line[4]] = {} # depends on [control=['if'], data=[]]
else:
ntp_servers[split_line[2]] = {} # depends on [control=['for'], data=['line']]
return ntp_servers |
def _complete_task(self, task_name, **kwargs):
""" Marks this task as completed. Kwargs are stored in the run log. """
logger.debug('Job {0} marking task {1} as completed'.format(self.name, task_name))
self.run_log['tasks'][task_name] = kwargs
for node in self.downstream(task_name, self.snapshot):
self._start_if_ready(node)
try:
self.backend.acquire_lock()
self._commit_run_log()
except:
logger.exception("Error in handling events.")
finally:
self.backend.release_lock()
if kwargs.get('success', None) == False:
task = self.tasks[task_name]
try:
self.backend.acquire_lock()
if self.event_handler:
self.event_handler.emit('task_failed',
task._serialize(include_run_logs=True))
except:
logger.exception("Error in handling events.")
finally:
self.backend.release_lock()
self._on_completion() | def function[_complete_task, parameter[self, task_name]]:
constant[ Marks this task as completed. Kwargs are stored in the run log. ]
call[name[logger].debug, parameter[call[constant[Job {0} marking task {1} as completed].format, parameter[name[self].name, name[task_name]]]]]
call[call[name[self].run_log][constant[tasks]]][name[task_name]] assign[=] name[kwargs]
for taget[name[node]] in starred[call[name[self].downstream, parameter[name[task_name], name[self].snapshot]]] begin[:]
call[name[self]._start_if_ready, parameter[name[node]]]
<ast.Try object at 0x7da1b0be0250>
if compare[call[name[kwargs].get, parameter[constant[success], constant[None]]] equal[==] constant[False]] begin[:]
variable[task] assign[=] call[name[self].tasks][name[task_name]]
<ast.Try object at 0x7da1b0be1c60>
call[name[self]._on_completion, parameter[]] | keyword[def] identifier[_complete_task] ( identifier[self] , identifier[task_name] ,** identifier[kwargs] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] , identifier[task_name] ))
identifier[self] . identifier[run_log] [ literal[string] ][ identifier[task_name] ]= identifier[kwargs]
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[downstream] ( identifier[task_name] , identifier[self] . identifier[snapshot] ):
identifier[self] . identifier[_start_if_ready] ( identifier[node] )
keyword[try] :
identifier[self] . identifier[backend] . identifier[acquire_lock] ()
identifier[self] . identifier[_commit_run_log] ()
keyword[except] :
identifier[logger] . identifier[exception] ( literal[string] )
keyword[finally] :
identifier[self] . identifier[backend] . identifier[release_lock] ()
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )== keyword[False] :
identifier[task] = identifier[self] . identifier[tasks] [ identifier[task_name] ]
keyword[try] :
identifier[self] . identifier[backend] . identifier[acquire_lock] ()
keyword[if] identifier[self] . identifier[event_handler] :
identifier[self] . identifier[event_handler] . identifier[emit] ( literal[string] ,
identifier[task] . identifier[_serialize] ( identifier[include_run_logs] = keyword[True] ))
keyword[except] :
identifier[logger] . identifier[exception] ( literal[string] )
keyword[finally] :
identifier[self] . identifier[backend] . identifier[release_lock] ()
identifier[self] . identifier[_on_completion] () | def _complete_task(self, task_name, **kwargs):
""" Marks this task as completed. Kwargs are stored in the run log. """
logger.debug('Job {0} marking task {1} as completed'.format(self.name, task_name))
self.run_log['tasks'][task_name] = kwargs
for node in self.downstream(task_name, self.snapshot):
self._start_if_ready(node) # depends on [control=['for'], data=['node']]
try:
self.backend.acquire_lock()
self._commit_run_log() # depends on [control=['try'], data=[]]
except:
logger.exception('Error in handling events.') # depends on [control=['except'], data=[]]
finally:
self.backend.release_lock()
if kwargs.get('success', None) == False:
task = self.tasks[task_name]
try:
self.backend.acquire_lock()
if self.event_handler:
self.event_handler.emit('task_failed', task._serialize(include_run_logs=True)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
logger.exception('Error in handling events.') # depends on [control=['except'], data=[]]
finally:
self.backend.release_lock() # depends on [control=['if'], data=[]]
self._on_completion() |
def deterministic_solve(
model,
shocks=None,
s1=None,
T=100,
ignore_constraints=False,
maxit=100,
initial_guess=None,
verbose=True,
solver='ncpsolve',
tol=1e-6):
"""
Computes a perfect foresight simulation using a stacked-time algorithm.
The initial state is specified either by providing a series of exogenous
shocks and assuming the model is initially in equilibrium with the first
value of the shock, or by specifying an initial value for the states.
Parameters
----------
model : Model
Model to be solved
shocks : array-like, dict, or pandas.DataFrame
A specification of the shocks to the model. Can be any of the
following (note by "declaration order" below we mean the order
of `model.symbols["shocks"]`):
- A 1d numpy array-like specifying a time series for a single
shock, or all shocks stacked into a single array.
- A 2d numpy array where each column specifies the time series
for one of the shocks in declaration order. This must be an
`N` by number of shocks 2d array.
- A dict where keys are strings found in
`model.symbols["shocks"]` and values are a time series of
values for that shock. For model shocks that do not appear in
this dict, the shock is set to the calibrated value. Note
that this interface is the most flexible as it allows the user
to pass values for only a subset of the model shocks and it
allows the passed time series to be of different lengths.
- A DataFrame where columns map shock names into time series.
The same assumptions and behavior that are used in the dict
case apply here
If nothing is given here, `shocks` is set equal to the
calibrated values found in `model.calibration["shocks"]` for
all periods.
If the length of any time-series in shocks is less than `T`
(see below) it is assumed that that particular shock will
remain at the final given value for the duration of the
simulaiton.
s1 : ndarray or dict
a vector with the value of initial states
T : int
horizon for the perfect foresight simulation
maxit : int
maximum number of iteration for the nonlinear solver
verbose : boolean
if True, the solver displays iterations
tol : float
stopping criterium for the nonlinear solver
ignore_constraints : bool
if True, complementarity constraints are ignored.
Returns
-------
pandas dataframe
a dataframe with T+1 observations of the model variables along the
simulation (states, controls, auxiliaries). The first observation is
the steady-state corresponding to the first value of the shocks. The
simulation should return to a steady-state corresponding to the last
value of the exogenous shocks.
"""
# definitions
n_s = len(model.calibration['states'])
n_x = len(model.calibration['controls'])
p = model.calibration['parameters']
epsilons = _shocks_to_epsilons(model, shocks, T)
m0 = epsilons[0, :]
# get initial steady-state
from dolo.algos.steady_state import find_steady_state
# TODO: use initial_guess for steady_state
# TODO:
if s1 is None:
start_state = find_steady_state(model, m=m0)
s0 = start_state['states']
x0 = start_state['controls']
m1 = epsilons[1,:]
s1 = model.functions['transition'](m0, s0, x0, m1, p)
else:
s0 = model.calibration['states']*np.nan
x0 = model.calibration['controls']*np.nan
s1 = np.array(s1)
x1_g = model.calibration['controls'] # we can do better here
sT_g = model.calibration['states'] # we can do better here
xT_g = model.calibration['controls'] # we can do better here
if initial_guess is None:
start = np.concatenate([s1, x1_g])
final = np.concatenate([sT_g, xT_g])
initial_guess = np.row_stack(
[start * (1 - l) + final * l for l in linspace(0.0, 1.0, T)])
else:
if isinstance(initial_guess, pd.DataFrame):
initial_guess = np.array(initial_guess[model.symbols['states']+model.symbols['controls']])
initial_guess = initial_guess[1:,:]
initial_guess = initial_guess[:, :n_s + n_x]
sh = initial_guess.shape
if model.x_bounds and not ignore_constraints:
initial_states = initial_guess[:, :n_s]
[lb, ub] = [u(epsilons[1:,:], initial_states, p) for u in model.x_bounds]
lower_bound = initial_guess * 0 - np.inf
lower_bound[:, n_s:] = lb
upper_bound = initial_guess * 0 + np.inf
upper_bound[:, n_s:] = ub
test1 = max(lb.max(axis=0) - lb.min(axis=0))
test2 = max(ub.max(axis=0) - ub.min(axis=0))
if test1 > 0.00000001 or test2 > 0.00000001:
msg = "Not implemented: perfect foresight solution requires that "
msg += "controls have constant bounds."
raise Exception(msg)
else:
ignore_constraints = True
lower_bound = None
upper_bound = None
if not ignore_constraints:
def ff(vec):
return det_residual(
model, vec.reshape(sh), s1, xT_g, epsilons[1:, :], jactype='sparse')
v0 = initial_guess.ravel()
if solver=='ncpsolve':
sol, nit = ncpsolve(
ff,
lower_bound.ravel(),
upper_bound.ravel(),
initial_guess.ravel(),
verbose=verbose,
maxit=maxit,
tol=tol,
jactype='sparse')
else:
from dolo.numeric.extern.lmmcp import lmmcp
sol = lmmcp(
lambda u: ff(u)[0],
lambda u: ff(u)[1].todense(),
lower_bound.ravel(),
upper_bound.ravel(),
initial_guess.ravel(),
verbose=verbose)
nit = -1
sol = sol.reshape(sh)
else:
def ff(vec):
ll = det_residual(
model, vec.reshape(sh), s1, xT_g, epsilons[1:, :],
diff=True)
return(ll)
v0 = initial_guess.ravel()
# from scipy.optimize import root
# sol = root(ff, v0, jac=True)
# sol = sol.x.reshape(sh)
from dolo.numeric.optimize.newton import newton
sol, nit = newton(ff, v0, jactype='sparse')
sol = sol.reshape(sh)
sx = np.concatenate([s0, x0])
# sol = sol[:-1, :]
sol = np.concatenate([sx[None, :], sol], axis=0)
# epsilons = np.concatenate([epsilons[:1,:], epsilons], axis=0)
if 'auxiliary' in model.functions:
colnames = (model.symbols['states'] + model.symbols['controls'] +
model.symbols['auxiliaries'])
# compute auxiliaries
y = model.functions['auxiliary'](epsilons, sol[:, :n_s], sol[:, n_s:],
p)
sol = np.column_stack([sol, y])
else:
colnames = model.symbols['states'] + model.symbols['controls']
sol = np.column_stack([sol, epsilons])
colnames = colnames + model.symbols['exogenous']
ts = pd.DataFrame(sol, columns=colnames)
return ts | def function[deterministic_solve, parameter[model, shocks, s1, T, ignore_constraints, maxit, initial_guess, verbose, solver, tol]]:
constant[
Computes a perfect foresight simulation using a stacked-time algorithm.
The initial state is specified either by providing a series of exogenous
shocks and assuming the model is initially in equilibrium with the first
value of the shock, or by specifying an initial value for the states.
Parameters
----------
model : Model
Model to be solved
shocks : array-like, dict, or pandas.DataFrame
A specification of the shocks to the model. Can be any of the
following (note by "declaration order" below we mean the order
of `model.symbols["shocks"]`):
- A 1d numpy array-like specifying a time series for a single
shock, or all shocks stacked into a single array.
- A 2d numpy array where each column specifies the time series
for one of the shocks in declaration order. This must be an
`N` by number of shocks 2d array.
- A dict where keys are strings found in
`model.symbols["shocks"]` and values are a time series of
values for that shock. For model shocks that do not appear in
this dict, the shock is set to the calibrated value. Note
that this interface is the most flexible as it allows the user
to pass values for only a subset of the model shocks and it
allows the passed time series to be of different lengths.
- A DataFrame where columns map shock names into time series.
The same assumptions and behavior that are used in the dict
case apply here
If nothing is given here, `shocks` is set equal to the
calibrated values found in `model.calibration["shocks"]` for
all periods.
If the length of any time-series in shocks is less than `T`
(see below) it is assumed that that particular shock will
remain at the final given value for the duration of the
simulaiton.
s1 : ndarray or dict
a vector with the value of initial states
T : int
horizon for the perfect foresight simulation
maxit : int
maximum number of iteration for the nonlinear solver
verbose : boolean
if True, the solver displays iterations
tol : float
stopping criterium for the nonlinear solver
ignore_constraints : bool
if True, complementarity constraints are ignored.
Returns
-------
pandas dataframe
a dataframe with T+1 observations of the model variables along the
simulation (states, controls, auxiliaries). The first observation is
the steady-state corresponding to the first value of the shocks. The
simulation should return to a steady-state corresponding to the last
value of the exogenous shocks.
]
variable[n_s] assign[=] call[name[len], parameter[call[name[model].calibration][constant[states]]]]
variable[n_x] assign[=] call[name[len], parameter[call[name[model].calibration][constant[controls]]]]
variable[p] assign[=] call[name[model].calibration][constant[parameters]]
variable[epsilons] assign[=] call[name[_shocks_to_epsilons], parameter[name[model], name[shocks], name[T]]]
variable[m0] assign[=] call[name[epsilons]][tuple[[<ast.Constant object at 0x7da20cabdb10>, <ast.Slice object at 0x7da20cabf790>]]]
from relative_module[dolo.algos.steady_state] import module[find_steady_state]
if compare[name[s1] is constant[None]] begin[:]
variable[start_state] assign[=] call[name[find_steady_state], parameter[name[model]]]
variable[s0] assign[=] call[name[start_state]][constant[states]]
variable[x0] assign[=] call[name[start_state]][constant[controls]]
variable[m1] assign[=] call[name[epsilons]][tuple[[<ast.Constant object at 0x7da20cabfcd0>, <ast.Slice object at 0x7da20cabd090>]]]
variable[s1] assign[=] call[call[name[model].functions][constant[transition]], parameter[name[m0], name[s0], name[x0], name[m1], name[p]]]
variable[x1_g] assign[=] call[name[model].calibration][constant[controls]]
variable[sT_g] assign[=] call[name[model].calibration][constant[states]]
variable[xT_g] assign[=] call[name[model].calibration][constant[controls]]
if compare[name[initial_guess] is constant[None]] begin[:]
variable[start] assign[=] call[name[np].concatenate, parameter[list[[<ast.Name object at 0x7da18eb557e0>, <ast.Name object at 0x7da18eb554b0>]]]]
variable[final] assign[=] call[name[np].concatenate, parameter[list[[<ast.Name object at 0x7da18eb54b50>, <ast.Name object at 0x7da18eb56c80>]]]]
variable[initial_guess] assign[=] call[name[np].row_stack, parameter[<ast.ListComp object at 0x7da18eb54700>]]
variable[sh] assign[=] name[initial_guess].shape
if <ast.BoolOp object at 0x7da18eb55d50> begin[:]
variable[initial_states] assign[=] call[name[initial_guess]][tuple[[<ast.Slice object at 0x7da18eb54ee0>, <ast.Slice object at 0x7da18eb540a0>]]]
<ast.List object at 0x7da18eb56b60> assign[=] <ast.ListComp object at 0x7da18eb561d0>
variable[lower_bound] assign[=] binary_operation[binary_operation[name[initial_guess] * constant[0]] - name[np].inf]
call[name[lower_bound]][tuple[[<ast.Slice object at 0x7da18eb578b0>, <ast.Slice object at 0x7da18eb55a20>]]] assign[=] name[lb]
variable[upper_bound] assign[=] binary_operation[binary_operation[name[initial_guess] * constant[0]] + name[np].inf]
call[name[upper_bound]][tuple[[<ast.Slice object at 0x7da18eb571f0>, <ast.Slice object at 0x7da18eb568c0>]]] assign[=] name[ub]
variable[test1] assign[=] call[name[max], parameter[binary_operation[call[name[lb].max, parameter[]] - call[name[lb].min, parameter[]]]]]
variable[test2] assign[=] call[name[max], parameter[binary_operation[call[name[ub].max, parameter[]] - call[name[ub].min, parameter[]]]]]
if <ast.BoolOp object at 0x7da18eb55a80> begin[:]
variable[msg] assign[=] constant[Not implemented: perfect foresight solution requires that ]
<ast.AugAssign object at 0x7da18eb562c0>
<ast.Raise object at 0x7da18eb551b0>
if <ast.UnaryOp object at 0x7da18eb54190> begin[:]
def function[ff, parameter[vec]]:
return[call[name[det_residual], parameter[name[model], call[name[vec].reshape, parameter[name[sh]]], name[s1], name[xT_g], call[name[epsilons]][tuple[[<ast.Slice object at 0x7da18eb56c50>, <ast.Slice object at 0x7da18eb548e0>]]]]]]
variable[v0] assign[=] call[name[initial_guess].ravel, parameter[]]
if compare[name[solver] equal[==] constant[ncpsolve]] begin[:]
<ast.Tuple object at 0x7da18eb542e0> assign[=] call[name[ncpsolve], parameter[name[ff], call[name[lower_bound].ravel, parameter[]], call[name[upper_bound].ravel, parameter[]], call[name[initial_guess].ravel, parameter[]]]]
variable[sol] assign[=] call[name[sol].reshape, parameter[name[sh]]]
variable[sx] assign[=] call[name[np].concatenate, parameter[list[[<ast.Name object at 0x7da2054a7580>, <ast.Name object at 0x7da2054a76d0>]]]]
variable[sol] assign[=] call[name[np].concatenate, parameter[list[[<ast.Subscript object at 0x7da2049600d0>, <ast.Name object at 0x7da204961870>]]]]
if compare[constant[auxiliary] in name[model].functions] begin[:]
variable[colnames] assign[=] binary_operation[binary_operation[call[name[model].symbols][constant[states]] + call[name[model].symbols][constant[controls]]] + call[name[model].symbols][constant[auxiliaries]]]
variable[y] assign[=] call[call[name[model].functions][constant[auxiliary]], parameter[name[epsilons], call[name[sol]][tuple[[<ast.Slice object at 0x7da204961030>, <ast.Slice object at 0x7da2049634c0>]]], call[name[sol]][tuple[[<ast.Slice object at 0x7da204963d30>, <ast.Slice object at 0x7da204962e60>]]], name[p]]]
variable[sol] assign[=] call[name[np].column_stack, parameter[list[[<ast.Name object at 0x7da2049628f0>, <ast.Name object at 0x7da2049621d0>]]]]
variable[sol] assign[=] call[name[np].column_stack, parameter[list[[<ast.Name object at 0x7da204961a80>, <ast.Name object at 0x7da2049623e0>]]]]
variable[colnames] assign[=] binary_operation[name[colnames] + call[name[model].symbols][constant[exogenous]]]
variable[ts] assign[=] call[name[pd].DataFrame, parameter[name[sol]]]
return[name[ts]] | keyword[def] identifier[deterministic_solve] (
identifier[model] ,
identifier[shocks] = keyword[None] ,
identifier[s1] = keyword[None] ,
identifier[T] = literal[int] ,
identifier[ignore_constraints] = keyword[False] ,
identifier[maxit] = literal[int] ,
identifier[initial_guess] = keyword[None] ,
identifier[verbose] = keyword[True] ,
identifier[solver] = literal[string] ,
identifier[tol] = literal[int] ):
literal[string]
identifier[n_s] = identifier[len] ( identifier[model] . identifier[calibration] [ literal[string] ])
identifier[n_x] = identifier[len] ( identifier[model] . identifier[calibration] [ literal[string] ])
identifier[p] = identifier[model] . identifier[calibration] [ literal[string] ]
identifier[epsilons] = identifier[_shocks_to_epsilons] ( identifier[model] , identifier[shocks] , identifier[T] )
identifier[m0] = identifier[epsilons] [ literal[int] ,:]
keyword[from] identifier[dolo] . identifier[algos] . identifier[steady_state] keyword[import] identifier[find_steady_state]
keyword[if] identifier[s1] keyword[is] keyword[None] :
identifier[start_state] = identifier[find_steady_state] ( identifier[model] , identifier[m] = identifier[m0] )
identifier[s0] = identifier[start_state] [ literal[string] ]
identifier[x0] = identifier[start_state] [ literal[string] ]
identifier[m1] = identifier[epsilons] [ literal[int] ,:]
identifier[s1] = identifier[model] . identifier[functions] [ literal[string] ]( identifier[m0] , identifier[s0] , identifier[x0] , identifier[m1] , identifier[p] )
keyword[else] :
identifier[s0] = identifier[model] . identifier[calibration] [ literal[string] ]* identifier[np] . identifier[nan]
identifier[x0] = identifier[model] . identifier[calibration] [ literal[string] ]* identifier[np] . identifier[nan]
identifier[s1] = identifier[np] . identifier[array] ( identifier[s1] )
identifier[x1_g] = identifier[model] . identifier[calibration] [ literal[string] ]
identifier[sT_g] = identifier[model] . identifier[calibration] [ literal[string] ]
identifier[xT_g] = identifier[model] . identifier[calibration] [ literal[string] ]
keyword[if] identifier[initial_guess] keyword[is] keyword[None] :
identifier[start] = identifier[np] . identifier[concatenate] ([ identifier[s1] , identifier[x1_g] ])
identifier[final] = identifier[np] . identifier[concatenate] ([ identifier[sT_g] , identifier[xT_g] ])
identifier[initial_guess] = identifier[np] . identifier[row_stack] (
[ identifier[start] *( literal[int] - identifier[l] )+ identifier[final] * identifier[l] keyword[for] identifier[l] keyword[in] identifier[linspace] ( literal[int] , literal[int] , identifier[T] )])
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[initial_guess] , identifier[pd] . identifier[DataFrame] ):
identifier[initial_guess] = identifier[np] . identifier[array] ( identifier[initial_guess] [ identifier[model] . identifier[symbols] [ literal[string] ]+ identifier[model] . identifier[symbols] [ literal[string] ]])
identifier[initial_guess] = identifier[initial_guess] [ literal[int] :,:]
identifier[initial_guess] = identifier[initial_guess] [:,: identifier[n_s] + identifier[n_x] ]
identifier[sh] = identifier[initial_guess] . identifier[shape]
keyword[if] identifier[model] . identifier[x_bounds] keyword[and] keyword[not] identifier[ignore_constraints] :
identifier[initial_states] = identifier[initial_guess] [:,: identifier[n_s] ]
[ identifier[lb] , identifier[ub] ]=[ identifier[u] ( identifier[epsilons] [ literal[int] :,:], identifier[initial_states] , identifier[p] ) keyword[for] identifier[u] keyword[in] identifier[model] . identifier[x_bounds] ]
identifier[lower_bound] = identifier[initial_guess] * literal[int] - identifier[np] . identifier[inf]
identifier[lower_bound] [:, identifier[n_s] :]= identifier[lb]
identifier[upper_bound] = identifier[initial_guess] * literal[int] + identifier[np] . identifier[inf]
identifier[upper_bound] [:, identifier[n_s] :]= identifier[ub]
identifier[test1] = identifier[max] ( identifier[lb] . identifier[max] ( identifier[axis] = literal[int] )- identifier[lb] . identifier[min] ( identifier[axis] = literal[int] ))
identifier[test2] = identifier[max] ( identifier[ub] . identifier[max] ( identifier[axis] = literal[int] )- identifier[ub] . identifier[min] ( identifier[axis] = literal[int] ))
keyword[if] identifier[test1] > literal[int] keyword[or] identifier[test2] > literal[int] :
identifier[msg] = literal[string]
identifier[msg] += literal[string]
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[else] :
identifier[ignore_constraints] = keyword[True]
identifier[lower_bound] = keyword[None]
identifier[upper_bound] = keyword[None]
keyword[if] keyword[not] identifier[ignore_constraints] :
keyword[def] identifier[ff] ( identifier[vec] ):
keyword[return] identifier[det_residual] (
identifier[model] , identifier[vec] . identifier[reshape] ( identifier[sh] ), identifier[s1] , identifier[xT_g] , identifier[epsilons] [ literal[int] :,:], identifier[jactype] = literal[string] )
identifier[v0] = identifier[initial_guess] . identifier[ravel] ()
keyword[if] identifier[solver] == literal[string] :
identifier[sol] , identifier[nit] = identifier[ncpsolve] (
identifier[ff] ,
identifier[lower_bound] . identifier[ravel] (),
identifier[upper_bound] . identifier[ravel] (),
identifier[initial_guess] . identifier[ravel] (),
identifier[verbose] = identifier[verbose] ,
identifier[maxit] = identifier[maxit] ,
identifier[tol] = identifier[tol] ,
identifier[jactype] = literal[string] )
keyword[else] :
keyword[from] identifier[dolo] . identifier[numeric] . identifier[extern] . identifier[lmmcp] keyword[import] identifier[lmmcp]
identifier[sol] = identifier[lmmcp] (
keyword[lambda] identifier[u] : identifier[ff] ( identifier[u] )[ literal[int] ],
keyword[lambda] identifier[u] : identifier[ff] ( identifier[u] )[ literal[int] ]. identifier[todense] (),
identifier[lower_bound] . identifier[ravel] (),
identifier[upper_bound] . identifier[ravel] (),
identifier[initial_guess] . identifier[ravel] (),
identifier[verbose] = identifier[verbose] )
identifier[nit] =- literal[int]
identifier[sol] = identifier[sol] . identifier[reshape] ( identifier[sh] )
keyword[else] :
keyword[def] identifier[ff] ( identifier[vec] ):
identifier[ll] = identifier[det_residual] (
identifier[model] , identifier[vec] . identifier[reshape] ( identifier[sh] ), identifier[s1] , identifier[xT_g] , identifier[epsilons] [ literal[int] :,:],
identifier[diff] = keyword[True] )
keyword[return] ( identifier[ll] )
identifier[v0] = identifier[initial_guess] . identifier[ravel] ()
keyword[from] identifier[dolo] . identifier[numeric] . identifier[optimize] . identifier[newton] keyword[import] identifier[newton]
identifier[sol] , identifier[nit] = identifier[newton] ( identifier[ff] , identifier[v0] , identifier[jactype] = literal[string] )
identifier[sol] = identifier[sol] . identifier[reshape] ( identifier[sh] )
identifier[sx] = identifier[np] . identifier[concatenate] ([ identifier[s0] , identifier[x0] ])
identifier[sol] = identifier[np] . identifier[concatenate] ([ identifier[sx] [ keyword[None] ,:], identifier[sol] ], identifier[axis] = literal[int] )
keyword[if] literal[string] keyword[in] identifier[model] . identifier[functions] :
identifier[colnames] =( identifier[model] . identifier[symbols] [ literal[string] ]+ identifier[model] . identifier[symbols] [ literal[string] ]+
identifier[model] . identifier[symbols] [ literal[string] ])
identifier[y] = identifier[model] . identifier[functions] [ literal[string] ]( identifier[epsilons] , identifier[sol] [:,: identifier[n_s] ], identifier[sol] [:, identifier[n_s] :],
identifier[p] )
identifier[sol] = identifier[np] . identifier[column_stack] ([ identifier[sol] , identifier[y] ])
keyword[else] :
identifier[colnames] = identifier[model] . identifier[symbols] [ literal[string] ]+ identifier[model] . identifier[symbols] [ literal[string] ]
identifier[sol] = identifier[np] . identifier[column_stack] ([ identifier[sol] , identifier[epsilons] ])
identifier[colnames] = identifier[colnames] + identifier[model] . identifier[symbols] [ literal[string] ]
identifier[ts] = identifier[pd] . identifier[DataFrame] ( identifier[sol] , identifier[columns] = identifier[colnames] )
keyword[return] identifier[ts] | def deterministic_solve(model, shocks=None, s1=None, T=100, ignore_constraints=False, maxit=100, initial_guess=None, verbose=True, solver='ncpsolve', tol=1e-06):
"""
Computes a perfect foresight simulation using a stacked-time algorithm.
The initial state is specified either by providing a series of exogenous
shocks and assuming the model is initially in equilibrium with the first
value of the shock, or by specifying an initial value for the states.
Parameters
----------
model : Model
Model to be solved
shocks : array-like, dict, or pandas.DataFrame
A specification of the shocks to the model. Can be any of the
following (note by "declaration order" below we mean the order
of `model.symbols["shocks"]`):
- A 1d numpy array-like specifying a time series for a single
shock, or all shocks stacked into a single array.
- A 2d numpy array where each column specifies the time series
for one of the shocks in declaration order. This must be an
`N` by number of shocks 2d array.
- A dict where keys are strings found in
`model.symbols["shocks"]` and values are a time series of
values for that shock. For model shocks that do not appear in
this dict, the shock is set to the calibrated value. Note
that this interface is the most flexible as it allows the user
to pass values for only a subset of the model shocks and it
allows the passed time series to be of different lengths.
- A DataFrame where columns map shock names into time series.
The same assumptions and behavior that are used in the dict
case apply here
If nothing is given here, `shocks` is set equal to the
calibrated values found in `model.calibration["shocks"]` for
all periods.
If the length of any time-series in shocks is less than `T`
(see below) it is assumed that that particular shock will
remain at the final given value for the duration of the
simulaiton.
s1 : ndarray or dict
a vector with the value of initial states
T : int
horizon for the perfect foresight simulation
maxit : int
maximum number of iteration for the nonlinear solver
verbose : boolean
if True, the solver displays iterations
tol : float
stopping criterium for the nonlinear solver
ignore_constraints : bool
if True, complementarity constraints are ignored.
Returns
-------
pandas dataframe
a dataframe with T+1 observations of the model variables along the
simulation (states, controls, auxiliaries). The first observation is
the steady-state corresponding to the first value of the shocks. The
simulation should return to a steady-state corresponding to the last
value of the exogenous shocks.
"""
# definitions
n_s = len(model.calibration['states'])
n_x = len(model.calibration['controls'])
p = model.calibration['parameters']
epsilons = _shocks_to_epsilons(model, shocks, T)
m0 = epsilons[0, :]
# get initial steady-state
from dolo.algos.steady_state import find_steady_state
# TODO: use initial_guess for steady_state
# TODO:
if s1 is None:
start_state = find_steady_state(model, m=m0)
s0 = start_state['states']
x0 = start_state['controls']
m1 = epsilons[1, :]
s1 = model.functions['transition'](m0, s0, x0, m1, p) # depends on [control=['if'], data=['s1']]
else:
s0 = model.calibration['states'] * np.nan
x0 = model.calibration['controls'] * np.nan
s1 = np.array(s1)
x1_g = model.calibration['controls'] # we can do better here
sT_g = model.calibration['states'] # we can do better here
xT_g = model.calibration['controls'] # we can do better here
if initial_guess is None:
start = np.concatenate([s1, x1_g])
final = np.concatenate([sT_g, xT_g])
initial_guess = np.row_stack([start * (1 - l) + final * l for l in linspace(0.0, 1.0, T)]) # depends on [control=['if'], data=['initial_guess']]
else:
if isinstance(initial_guess, pd.DataFrame):
initial_guess = np.array(initial_guess[model.symbols['states'] + model.symbols['controls']]) # depends on [control=['if'], data=[]]
initial_guess = initial_guess[1:, :]
initial_guess = initial_guess[:, :n_s + n_x]
sh = initial_guess.shape
if model.x_bounds and (not ignore_constraints):
initial_states = initial_guess[:, :n_s]
[lb, ub] = [u(epsilons[1:, :], initial_states, p) for u in model.x_bounds]
lower_bound = initial_guess * 0 - np.inf
lower_bound[:, n_s:] = lb
upper_bound = initial_guess * 0 + np.inf
upper_bound[:, n_s:] = ub
test1 = max(lb.max(axis=0) - lb.min(axis=0))
test2 = max(ub.max(axis=0) - ub.min(axis=0))
if test1 > 1e-08 or test2 > 1e-08:
msg = 'Not implemented: perfect foresight solution requires that '
msg += 'controls have constant bounds.'
raise Exception(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
ignore_constraints = True
lower_bound = None
upper_bound = None
if not ignore_constraints:
def ff(vec):
return det_residual(model, vec.reshape(sh), s1, xT_g, epsilons[1:, :], jactype='sparse')
v0 = initial_guess.ravel()
if solver == 'ncpsolve':
(sol, nit) = ncpsolve(ff, lower_bound.ravel(), upper_bound.ravel(), initial_guess.ravel(), verbose=verbose, maxit=maxit, tol=tol, jactype='sparse') # depends on [control=['if'], data=[]]
else:
from dolo.numeric.extern.lmmcp import lmmcp
sol = lmmcp(lambda u: ff(u)[0], lambda u: ff(u)[1].todense(), lower_bound.ravel(), upper_bound.ravel(), initial_guess.ravel(), verbose=verbose)
nit = -1
sol = sol.reshape(sh) # depends on [control=['if'], data=[]]
else:
def ff(vec):
ll = det_residual(model, vec.reshape(sh), s1, xT_g, epsilons[1:, :], diff=True)
return ll
v0 = initial_guess.ravel()
# from scipy.optimize import root
# sol = root(ff, v0, jac=True)
# sol = sol.x.reshape(sh)
from dolo.numeric.optimize.newton import newton
(sol, nit) = newton(ff, v0, jactype='sparse')
sol = sol.reshape(sh)
sx = np.concatenate([s0, x0])
# sol = sol[:-1, :]
sol = np.concatenate([sx[None, :], sol], axis=0)
# epsilons = np.concatenate([epsilons[:1,:], epsilons], axis=0)
if 'auxiliary' in model.functions:
colnames = model.symbols['states'] + model.symbols['controls'] + model.symbols['auxiliaries']
# compute auxiliaries
y = model.functions['auxiliary'](epsilons, sol[:, :n_s], sol[:, n_s:], p)
sol = np.column_stack([sol, y]) # depends on [control=['if'], data=[]]
else:
colnames = model.symbols['states'] + model.symbols['controls']
sol = np.column_stack([sol, epsilons])
colnames = colnames + model.symbols['exogenous']
ts = pd.DataFrame(sol, columns=colnames)
return ts |
def calculate_nf(sample_frame, ref_targets, ref_sample):
"""Calculates a normalization factor from the geometric mean of the
expression of all ref_targets, normalized to a reference sample.
:param DataFrame sample_frame: A sample data frame.
:param iterable ref_targets: A list or Series of target names.
:param string ref_sample: The name of the sample to normalize against.
:return: a Series indexed by sample name containing normalization factors
for each sample.
"""
grouped = sample_frame.groupby(['Target', 'Sample'])['Cq'].aggregate(average_cq)
samples = sample_frame['Sample'].unique()
nfs = gmean([pow(2, -grouped.ix[zip(repeat(ref_gene), samples)] + grouped.ix[ref_gene, ref_sample]) for ref_gene in ref_targets])
return pd.Series(nfs, index=samples) | def function[calculate_nf, parameter[sample_frame, ref_targets, ref_sample]]:
constant[Calculates a normalization factor from the geometric mean of the
expression of all ref_targets, normalized to a reference sample.
:param DataFrame sample_frame: A sample data frame.
:param iterable ref_targets: A list or Series of target names.
:param string ref_sample: The name of the sample to normalize against.
:return: a Series indexed by sample name containing normalization factors
for each sample.
]
variable[grouped] assign[=] call[call[call[name[sample_frame].groupby, parameter[list[[<ast.Constant object at 0x7da18f09c6a0>, <ast.Constant object at 0x7da18f09f0a0>]]]]][constant[Cq]].aggregate, parameter[name[average_cq]]]
variable[samples] assign[=] call[call[name[sample_frame]][constant[Sample]].unique, parameter[]]
variable[nfs] assign[=] call[name[gmean], parameter[<ast.ListComp object at 0x7da18f09eda0>]]
return[call[name[pd].Series, parameter[name[nfs]]]] | keyword[def] identifier[calculate_nf] ( identifier[sample_frame] , identifier[ref_targets] , identifier[ref_sample] ):
literal[string]
identifier[grouped] = identifier[sample_frame] . identifier[groupby] ([ literal[string] , literal[string] ])[ literal[string] ]. identifier[aggregate] ( identifier[average_cq] )
identifier[samples] = identifier[sample_frame] [ literal[string] ]. identifier[unique] ()
identifier[nfs] = identifier[gmean] ([ identifier[pow] ( literal[int] ,- identifier[grouped] . identifier[ix] [ identifier[zip] ( identifier[repeat] ( identifier[ref_gene] ), identifier[samples] )]+ identifier[grouped] . identifier[ix] [ identifier[ref_gene] , identifier[ref_sample] ]) keyword[for] identifier[ref_gene] keyword[in] identifier[ref_targets] ])
keyword[return] identifier[pd] . identifier[Series] ( identifier[nfs] , identifier[index] = identifier[samples] ) | def calculate_nf(sample_frame, ref_targets, ref_sample):
"""Calculates a normalization factor from the geometric mean of the
expression of all ref_targets, normalized to a reference sample.
:param DataFrame sample_frame: A sample data frame.
:param iterable ref_targets: A list or Series of target names.
:param string ref_sample: The name of the sample to normalize against.
:return: a Series indexed by sample name containing normalization factors
for each sample.
"""
grouped = sample_frame.groupby(['Target', 'Sample'])['Cq'].aggregate(average_cq)
samples = sample_frame['Sample'].unique()
nfs = gmean([pow(2, -grouped.ix[zip(repeat(ref_gene), samples)] + grouped.ix[ref_gene, ref_sample]) for ref_gene in ref_targets])
return pd.Series(nfs, index=samples) |
def back_slash_to_front_converter(string):
"""
Replacing all \ in the str to /
:param string: single string to modify
:type string: str
"""
try:
if not string or not isinstance(string, str):
return string
return string.replace('\\', '/')
except Exception:
return string | def function[back_slash_to_front_converter, parameter[string]]:
constant[
Replacing all \ in the str to /
:param string: single string to modify
:type string: str
]
<ast.Try object at 0x7da18dc9b790> | keyword[def] identifier[back_slash_to_front_converter] ( identifier[string] ):
literal[string]
keyword[try] :
keyword[if] keyword[not] identifier[string] keyword[or] keyword[not] identifier[isinstance] ( identifier[string] , identifier[str] ):
keyword[return] identifier[string]
keyword[return] identifier[string] . identifier[replace] ( literal[string] , literal[string] )
keyword[except] identifier[Exception] :
keyword[return] identifier[string] | def back_slash_to_front_converter(string):
"""
Replacing all \\ in the str to /
:param string: single string to modify
:type string: str
"""
try:
if not string or not isinstance(string, str):
return string # depends on [control=['if'], data=[]]
return string.replace('\\', '/') # depends on [control=['try'], data=[]]
except Exception:
return string # depends on [control=['except'], data=[]] |
def color_prompt(self):
''' Construct psiTurk shell prompt '''
prompt = '[' + colorize('psiTurk', 'bold')
server_string = ''
server_status = self.server.is_server_running()
if server_status == 'yes':
server_string = colorize('on', 'green')
elif server_status == 'no':
server_string = colorize('off', 'red')
elif server_status == 'maybe':
server_string = colorize('unknown', 'yellow')
elif server_status == 'blocked':
server_string = colorize('blocked', 'red')
prompt += ' server:' + server_string
prompt += ' mode:' + colorize('cabin', 'bold')
prompt += ']$ '
self.prompt = prompt | def function[color_prompt, parameter[self]]:
constant[ Construct psiTurk shell prompt ]
variable[prompt] assign[=] binary_operation[constant[[] + call[name[colorize], parameter[constant[psiTurk], constant[bold]]]]
variable[server_string] assign[=] constant[]
variable[server_status] assign[=] call[name[self].server.is_server_running, parameter[]]
if compare[name[server_status] equal[==] constant[yes]] begin[:]
variable[server_string] assign[=] call[name[colorize], parameter[constant[on], constant[green]]]
<ast.AugAssign object at 0x7da1b07465f0>
<ast.AugAssign object at 0x7da1b0744fd0>
<ast.AugAssign object at 0x7da1b0745270>
name[self].prompt assign[=] name[prompt] | keyword[def] identifier[color_prompt] ( identifier[self] ):
literal[string]
identifier[prompt] = literal[string] + identifier[colorize] ( literal[string] , literal[string] )
identifier[server_string] = literal[string]
identifier[server_status] = identifier[self] . identifier[server] . identifier[is_server_running] ()
keyword[if] identifier[server_status] == literal[string] :
identifier[server_string] = identifier[colorize] ( literal[string] , literal[string] )
keyword[elif] identifier[server_status] == literal[string] :
identifier[server_string] = identifier[colorize] ( literal[string] , literal[string] )
keyword[elif] identifier[server_status] == literal[string] :
identifier[server_string] = identifier[colorize] ( literal[string] , literal[string] )
keyword[elif] identifier[server_status] == literal[string] :
identifier[server_string] = identifier[colorize] ( literal[string] , literal[string] )
identifier[prompt] += literal[string] + identifier[server_string]
identifier[prompt] += literal[string] + identifier[colorize] ( literal[string] , literal[string] )
identifier[prompt] += literal[string]
identifier[self] . identifier[prompt] = identifier[prompt] | def color_prompt(self):
""" Construct psiTurk shell prompt """
prompt = '[' + colorize('psiTurk', 'bold')
server_string = ''
server_status = self.server.is_server_running()
if server_status == 'yes':
server_string = colorize('on', 'green') # depends on [control=['if'], data=[]]
elif server_status == 'no':
server_string = colorize('off', 'red') # depends on [control=['if'], data=[]]
elif server_status == 'maybe':
server_string = colorize('unknown', 'yellow') # depends on [control=['if'], data=[]]
elif server_status == 'blocked':
server_string = colorize('blocked', 'red') # depends on [control=['if'], data=[]]
prompt += ' server:' + server_string
prompt += ' mode:' + colorize('cabin', 'bold')
prompt += ']$ '
self.prompt = prompt |
def name(self, name):
"""
Get the ``mets:name`` element value.
"""
if name is not None:
el_name = self._el.find('mets:name', NS)
if el_name is None:
el_name = ET.SubElement(self._el, TAG_METS_NAME)
el_name.text = name | def function[name, parameter[self, name]]:
constant[
Get the ``mets:name`` element value.
]
if compare[name[name] is_not constant[None]] begin[:]
variable[el_name] assign[=] call[name[self]._el.find, parameter[constant[mets:name], name[NS]]]
if compare[name[el_name] is constant[None]] begin[:]
variable[el_name] assign[=] call[name[ET].SubElement, parameter[name[self]._el, name[TAG_METS_NAME]]]
name[el_name].text assign[=] name[name] | keyword[def] identifier[name] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[el_name] = identifier[self] . identifier[_el] . identifier[find] ( literal[string] , identifier[NS] )
keyword[if] identifier[el_name] keyword[is] keyword[None] :
identifier[el_name] = identifier[ET] . identifier[SubElement] ( identifier[self] . identifier[_el] , identifier[TAG_METS_NAME] )
identifier[el_name] . identifier[text] = identifier[name] | def name(self, name):
"""
Get the ``mets:name`` element value.
"""
if name is not None:
el_name = self._el.find('mets:name', NS)
if el_name is None:
el_name = ET.SubElement(self._el, TAG_METS_NAME) # depends on [control=['if'], data=['el_name']]
el_name.text = name # depends on [control=['if'], data=['name']] |
def variable(
self,
name=None,
function=None,
decl_type=None,
header_dir=None,
header_file=None,
recursive=None):
"""returns reference to variable declaration, that is matched defined
criteria"""
return (
self._find_single(
self._impl_matchers[
scopedef_t.variable],
name=name,
function=function,
decl_type=decl_type,
header_dir=header_dir,
header_file=header_file,
recursive=recursive)
) | def function[variable, parameter[self, name, function, decl_type, header_dir, header_file, recursive]]:
constant[returns reference to variable declaration, that is matched defined
criteria]
return[call[name[self]._find_single, parameter[call[name[self]._impl_matchers][name[scopedef_t].variable]]]] | keyword[def] identifier[variable] (
identifier[self] ,
identifier[name] = keyword[None] ,
identifier[function] = keyword[None] ,
identifier[decl_type] = keyword[None] ,
identifier[header_dir] = keyword[None] ,
identifier[header_file] = keyword[None] ,
identifier[recursive] = keyword[None] ):
literal[string]
keyword[return] (
identifier[self] . identifier[_find_single] (
identifier[self] . identifier[_impl_matchers] [
identifier[scopedef_t] . identifier[variable] ],
identifier[name] = identifier[name] ,
identifier[function] = identifier[function] ,
identifier[decl_type] = identifier[decl_type] ,
identifier[header_dir] = identifier[header_dir] ,
identifier[header_file] = identifier[header_file] ,
identifier[recursive] = identifier[recursive] )
) | def variable(self, name=None, function=None, decl_type=None, header_dir=None, header_file=None, recursive=None):
"""returns reference to variable declaration, that is matched defined
criteria"""
return self._find_single(self._impl_matchers[scopedef_t.variable], name=name, function=function, decl_type=decl_type, header_dir=header_dir, header_file=header_file, recursive=recursive) |
def relabel(self, label=None, group=None, depth=1):
"""Clone object and apply new group and/or label.
Applies relabeling to children up to the supplied depth.
Args:
label (str, optional): New label to apply to returned object
group (str, optional): New group to apply to returned object
depth (int, optional): Depth to which relabel will be applied
If applied to container allows applying relabeling to
contained objects up to the specified depth
Returns:
Returns relabelled object
"""
return super(HoloMap, self).relabel(label=label, group=group, depth=depth) | def function[relabel, parameter[self, label, group, depth]]:
constant[Clone object and apply new group and/or label.
Applies relabeling to children up to the supplied depth.
Args:
label (str, optional): New label to apply to returned object
group (str, optional): New group to apply to returned object
depth (int, optional): Depth to which relabel will be applied
If applied to container allows applying relabeling to
contained objects up to the specified depth
Returns:
Returns relabelled object
]
return[call[call[name[super], parameter[name[HoloMap], name[self]]].relabel, parameter[]]] | keyword[def] identifier[relabel] ( identifier[self] , identifier[label] = keyword[None] , identifier[group] = keyword[None] , identifier[depth] = literal[int] ):
literal[string]
keyword[return] identifier[super] ( identifier[HoloMap] , identifier[self] ). identifier[relabel] ( identifier[label] = identifier[label] , identifier[group] = identifier[group] , identifier[depth] = identifier[depth] ) | def relabel(self, label=None, group=None, depth=1):
"""Clone object and apply new group and/or label.
Applies relabeling to children up to the supplied depth.
Args:
label (str, optional): New label to apply to returned object
group (str, optional): New group to apply to returned object
depth (int, optional): Depth to which relabel will be applied
If applied to container allows applying relabeling to
contained objects up to the specified depth
Returns:
Returns relabelled object
"""
return super(HoloMap, self).relabel(label=label, group=group, depth=depth) |
def extract(pcmiter, samplerate, channels, duration = -1):
"""Given a PCM data stream, extract fingerprint data from the
audio. Returns a byte string of fingerprint data. Raises an
ExtractionError if fingerprinting fails.
"""
extractor = _fplib.Extractor(samplerate, channels, duration)
# Get first block.
try:
next_block = next(pcmiter)
except StopIteration:
raise ExtractionError()
# Get and process subsequent blocks.
while True:
# Shift over blocks.
cur_block = next_block
try:
next_block = next(pcmiter)
except StopIteration:
next_block = None
done = next_block is None
# Process the block.
try:
if extractor.process(cur_block, done):
# Success!
break
except RuntimeError as exc:
# Exception from fplib. Most likely the file is too short.
raise ExtractionError(exc.args[0])
# End of file but processor never became ready?
if done:
raise ExtractionError()
# Get resulting fingerprint data.
out = extractor.result()
if out is None:
raise ExtractionError()
# Free extractor memory.
extractor.free()
return out | def function[extract, parameter[pcmiter, samplerate, channels, duration]]:
constant[Given a PCM data stream, extract fingerprint data from the
audio. Returns a byte string of fingerprint data. Raises an
ExtractionError if fingerprinting fails.
]
variable[extractor] assign[=] call[name[_fplib].Extractor, parameter[name[samplerate], name[channels], name[duration]]]
<ast.Try object at 0x7da1b0b44400>
while constant[True] begin[:]
variable[cur_block] assign[=] name[next_block]
<ast.Try object at 0x7da1b0bcf520>
variable[done] assign[=] compare[name[next_block] is constant[None]]
<ast.Try object at 0x7da1b0bcfc70>
if name[done] begin[:]
<ast.Raise object at 0x7da1b0bced70>
variable[out] assign[=] call[name[extractor].result, parameter[]]
if compare[name[out] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0bcfb80>
call[name[extractor].free, parameter[]]
return[name[out]] | keyword[def] identifier[extract] ( identifier[pcmiter] , identifier[samplerate] , identifier[channels] , identifier[duration] =- literal[int] ):
literal[string]
identifier[extractor] = identifier[_fplib] . identifier[Extractor] ( identifier[samplerate] , identifier[channels] , identifier[duration] )
keyword[try] :
identifier[next_block] = identifier[next] ( identifier[pcmiter] )
keyword[except] identifier[StopIteration] :
keyword[raise] identifier[ExtractionError] ()
keyword[while] keyword[True] :
identifier[cur_block] = identifier[next_block]
keyword[try] :
identifier[next_block] = identifier[next] ( identifier[pcmiter] )
keyword[except] identifier[StopIteration] :
identifier[next_block] = keyword[None]
identifier[done] = identifier[next_block] keyword[is] keyword[None]
keyword[try] :
keyword[if] identifier[extractor] . identifier[process] ( identifier[cur_block] , identifier[done] ):
keyword[break]
keyword[except] identifier[RuntimeError] keyword[as] identifier[exc] :
keyword[raise] identifier[ExtractionError] ( identifier[exc] . identifier[args] [ literal[int] ])
keyword[if] identifier[done] :
keyword[raise] identifier[ExtractionError] ()
identifier[out] = identifier[extractor] . identifier[result] ()
keyword[if] identifier[out] keyword[is] keyword[None] :
keyword[raise] identifier[ExtractionError] ()
identifier[extractor] . identifier[free] ()
keyword[return] identifier[out] | def extract(pcmiter, samplerate, channels, duration=-1):
"""Given a PCM data stream, extract fingerprint data from the
audio. Returns a byte string of fingerprint data. Raises an
ExtractionError if fingerprinting fails.
"""
extractor = _fplib.Extractor(samplerate, channels, duration)
# Get first block.
try:
next_block = next(pcmiter) # depends on [control=['try'], data=[]]
except StopIteration:
raise ExtractionError() # depends on [control=['except'], data=[]]
# Get and process subsequent blocks.
while True:
# Shift over blocks.
cur_block = next_block
try:
next_block = next(pcmiter) # depends on [control=['try'], data=[]]
except StopIteration:
next_block = None # depends on [control=['except'], data=[]]
done = next_block is None
# Process the block.
try:
if extractor.process(cur_block, done):
# Success!
break # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except RuntimeError as exc:
# Exception from fplib. Most likely the file is too short.
raise ExtractionError(exc.args[0]) # depends on [control=['except'], data=['exc']]
# End of file but processor never became ready?
if done:
raise ExtractionError() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# Get resulting fingerprint data.
out = extractor.result()
if out is None:
raise ExtractionError() # depends on [control=['if'], data=[]]
# Free extractor memory.
extractor.free()
return out |
def index_particles(particles):
"""Indexes :class:`Particle` objects. It returns a regex pattern which
matches to any particle morphs and a dictionary indexes the given particles
by regex groups.
"""
patterns, indices = [], {}
for x, p in enumerate(particles):
group = u'_%d' % x
indices[group] = x
patterns.append(u'(?P<%s>%s)' % (group, p.regex_pattern()))
pattern = re.compile(u'|'.join(patterns))
return pattern, indices | def function[index_particles, parameter[particles]]:
constant[Indexes :class:`Particle` objects. It returns a regex pattern which
matches to any particle morphs and a dictionary indexes the given particles
by regex groups.
]
<ast.Tuple object at 0x7da1aff1c7c0> assign[=] tuple[[<ast.List object at 0x7da1aff1de10>, <ast.Dict object at 0x7da1aff1ff40>]]
for taget[tuple[[<ast.Name object at 0x7da1aff1c760>, <ast.Name object at 0x7da1aff1cf10>]]] in starred[call[name[enumerate], parameter[name[particles]]]] begin[:]
variable[group] assign[=] binary_operation[constant[_%d] <ast.Mod object at 0x7da2590d6920> name[x]]
call[name[indices]][name[group]] assign[=] name[x]
call[name[patterns].append, parameter[binary_operation[constant[(?P<%s>%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1aff1d870>, <ast.Call object at 0x7da1aff1c370>]]]]]
variable[pattern] assign[=] call[name[re].compile, parameter[call[constant[|].join, parameter[name[patterns]]]]]
return[tuple[[<ast.Name object at 0x7da1aff1cd60>, <ast.Name object at 0x7da1aff1cb80>]]] | keyword[def] identifier[index_particles] ( identifier[particles] ):
literal[string]
identifier[patterns] , identifier[indices] =[],{}
keyword[for] identifier[x] , identifier[p] keyword[in] identifier[enumerate] ( identifier[particles] ):
identifier[group] = literal[string] % identifier[x]
identifier[indices] [ identifier[group] ]= identifier[x]
identifier[patterns] . identifier[append] ( literal[string] %( identifier[group] , identifier[p] . identifier[regex_pattern] ()))
identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] . identifier[join] ( identifier[patterns] ))
keyword[return] identifier[pattern] , identifier[indices] | def index_particles(particles):
"""Indexes :class:`Particle` objects. It returns a regex pattern which
matches to any particle morphs and a dictionary indexes the given particles
by regex groups.
"""
(patterns, indices) = ([], {})
for (x, p) in enumerate(particles):
group = u'_%d' % x
indices[group] = x
patterns.append(u'(?P<%s>%s)' % (group, p.regex_pattern())) # depends on [control=['for'], data=[]]
pattern = re.compile(u'|'.join(patterns))
return (pattern, indices) |
def get_eager_datasource(cls, session, datasource_type, datasource_id):
"""Returns datasource with columns and metrics."""
datasource_class = ConnectorRegistry.sources[datasource_type]
return (
session.query(datasource_class)
.options(
subqueryload(datasource_class.columns),
subqueryload(datasource_class.metrics),
)
.filter_by(id=datasource_id)
.one()
) | def function[get_eager_datasource, parameter[cls, session, datasource_type, datasource_id]]:
constant[Returns datasource with columns and metrics.]
variable[datasource_class] assign[=] call[name[ConnectorRegistry].sources][name[datasource_type]]
return[call[call[call[call[name[session].query, parameter[name[datasource_class]]].options, parameter[call[name[subqueryload], parameter[name[datasource_class].columns]], call[name[subqueryload], parameter[name[datasource_class].metrics]]]].filter_by, parameter[]].one, parameter[]]] | keyword[def] identifier[get_eager_datasource] ( identifier[cls] , identifier[session] , identifier[datasource_type] , identifier[datasource_id] ):
literal[string]
identifier[datasource_class] = identifier[ConnectorRegistry] . identifier[sources] [ identifier[datasource_type] ]
keyword[return] (
identifier[session] . identifier[query] ( identifier[datasource_class] )
. identifier[options] (
identifier[subqueryload] ( identifier[datasource_class] . identifier[columns] ),
identifier[subqueryload] ( identifier[datasource_class] . identifier[metrics] ),
)
. identifier[filter_by] ( identifier[id] = identifier[datasource_id] )
. identifier[one] ()
) | def get_eager_datasource(cls, session, datasource_type, datasource_id):
"""Returns datasource with columns and metrics."""
datasource_class = ConnectorRegistry.sources[datasource_type]
return session.query(datasource_class).options(subqueryload(datasource_class.columns), subqueryload(datasource_class.metrics)).filter_by(id=datasource_id).one() |
def command_for_func(func):
"""Create a command that calls the given function."""
class FuncCommand(BaseCommand):
def run(self):
func()
update_package_data(self.distribution)
return FuncCommand | def function[command_for_func, parameter[func]]:
constant[Create a command that calls the given function.]
class class[FuncCommand, parameter[]] begin[:]
def function[run, parameter[self]]:
call[name[func], parameter[]]
call[name[update_package_data], parameter[name[self].distribution]]
return[name[FuncCommand]] | keyword[def] identifier[command_for_func] ( identifier[func] ):
literal[string]
keyword[class] identifier[FuncCommand] ( identifier[BaseCommand] ):
keyword[def] identifier[run] ( identifier[self] ):
identifier[func] ()
identifier[update_package_data] ( identifier[self] . identifier[distribution] )
keyword[return] identifier[FuncCommand] | def command_for_func(func):
"""Create a command that calls the given function."""
class FuncCommand(BaseCommand):
def run(self):
func()
update_package_data(self.distribution)
return FuncCommand |
def _set_autocommit(connection):
"""Make sure a connection is in autocommit mode."""
if hasattr(connection.connection, "autocommit"):
if callable(connection.connection.autocommit):
connection.connection.autocommit(True)
else:
connection.connection.autocommit = True
elif hasattr(connection.connection, "set_isolation_level"):
connection.connection.set_isolation_level(0) | def function[_set_autocommit, parameter[connection]]:
constant[Make sure a connection is in autocommit mode.]
if call[name[hasattr], parameter[name[connection].connection, constant[autocommit]]] begin[:]
if call[name[callable], parameter[name[connection].connection.autocommit]] begin[:]
call[name[connection].connection.autocommit, parameter[constant[True]]] | keyword[def] identifier[_set_autocommit] ( identifier[connection] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[connection] . identifier[connection] , literal[string] ):
keyword[if] identifier[callable] ( identifier[connection] . identifier[connection] . identifier[autocommit] ):
identifier[connection] . identifier[connection] . identifier[autocommit] ( keyword[True] )
keyword[else] :
identifier[connection] . identifier[connection] . identifier[autocommit] = keyword[True]
keyword[elif] identifier[hasattr] ( identifier[connection] . identifier[connection] , literal[string] ):
identifier[connection] . identifier[connection] . identifier[set_isolation_level] ( literal[int] ) | def _set_autocommit(connection):
"""Make sure a connection is in autocommit mode."""
if hasattr(connection.connection, 'autocommit'):
if callable(connection.connection.autocommit):
connection.connection.autocommit(True) # depends on [control=['if'], data=[]]
else:
connection.connection.autocommit = True # depends on [control=['if'], data=[]]
elif hasattr(connection.connection, 'set_isolation_level'):
connection.connection.set_isolation_level(0) # depends on [control=['if'], data=[]] |
def load_module(self, path, changed_time, parser=None):
"""Attempts to load the specified module from a serialized, cached
version. If that fails, the method returns none."""
if settings.use_filesystem_cache == False:
return None
try:
pickle_changed_time = self._index[path]
except KeyError:
return None
if (changed_time is not None and
pickle_changed_time < changed_time):
# the pickle file is outdated
return None
target_path = self._get_hashed_path(path)
with open(target_path, 'rb') as f:
try:
gc.disable()
cache_module = pickle.load(f)
if parser is not None:
for mod in cache_module:
mod.unpickle(parser)
finally:
gc.enable()
debug.dbg('pickle loaded: %s', path)
return cache_module | def function[load_module, parameter[self, path, changed_time, parser]]:
constant[Attempts to load the specified module from a serialized, cached
version. If that fails, the method returns none.]
if compare[name[settings].use_filesystem_cache equal[==] constant[False]] begin[:]
return[constant[None]]
<ast.Try object at 0x7da18fe91a50>
if <ast.BoolOp object at 0x7da18fe91d80> begin[:]
return[constant[None]]
variable[target_path] assign[=] call[name[self]._get_hashed_path, parameter[name[path]]]
with call[name[open], parameter[name[target_path], constant[rb]]] begin[:]
<ast.Try object at 0x7da204620b50>
call[name[debug].dbg, parameter[constant[pickle loaded: %s], name[path]]]
return[name[cache_module]] | keyword[def] identifier[load_module] ( identifier[self] , identifier[path] , identifier[changed_time] , identifier[parser] = keyword[None] ):
literal[string]
keyword[if] identifier[settings] . identifier[use_filesystem_cache] == keyword[False] :
keyword[return] keyword[None]
keyword[try] :
identifier[pickle_changed_time] = identifier[self] . identifier[_index] [ identifier[path] ]
keyword[except] identifier[KeyError] :
keyword[return] keyword[None]
keyword[if] ( identifier[changed_time] keyword[is] keyword[not] keyword[None] keyword[and]
identifier[pickle_changed_time] < identifier[changed_time] ):
keyword[return] keyword[None]
identifier[target_path] = identifier[self] . identifier[_get_hashed_path] ( identifier[path] )
keyword[with] identifier[open] ( identifier[target_path] , literal[string] ) keyword[as] identifier[f] :
keyword[try] :
identifier[gc] . identifier[disable] ()
identifier[cache_module] = identifier[pickle] . identifier[load] ( identifier[f] )
keyword[if] identifier[parser] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[mod] keyword[in] identifier[cache_module] :
identifier[mod] . identifier[unpickle] ( identifier[parser] )
keyword[finally] :
identifier[gc] . identifier[enable] ()
identifier[debug] . identifier[dbg] ( literal[string] , identifier[path] )
keyword[return] identifier[cache_module] | def load_module(self, path, changed_time, parser=None):
"""Attempts to load the specified module from a serialized, cached
version. If that fails, the method returns none."""
if settings.use_filesystem_cache == False:
return None # depends on [control=['if'], data=[]]
try:
pickle_changed_time = self._index[path] # depends on [control=['try'], data=[]]
except KeyError:
return None # depends on [control=['except'], data=[]]
if changed_time is not None and pickle_changed_time < changed_time:
# the pickle file is outdated
return None # depends on [control=['if'], data=[]]
target_path = self._get_hashed_path(path)
with open(target_path, 'rb') as f:
try:
gc.disable()
cache_module = pickle.load(f)
if parser is not None:
for mod in cache_module:
mod.unpickle(parser) # depends on [control=['for'], data=['mod']] # depends on [control=['if'], data=['parser']] # depends on [control=['try'], data=[]]
finally:
gc.enable() # depends on [control=['with'], data=['f']]
debug.dbg('pickle loaded: %s', path)
return cache_module |
def save(self):
"""Save a new Entity into repository.
Performs unique validations before creating the entity.
"""
logger.debug(
f'Saving `{self.__class__.__name__}` object')
# Fetch Model class and connected repository from Repository Factory
model_cls = repo_factory.get_model(self.__class__)
repository = repo_factory.get_repository(self.__class__)
try:
# Do unique checks, update the record and return the Entity
self._validate_unique(create=False)
# Perform Pre-Save Actions
self.pre_save()
# Build the model object and create it
model_obj = repository.create(model_cls.from_entity(self))
# Update the auto fields of the entity
for field_name, field_obj in self.meta_.declared_fields.items():
if isinstance(field_obj, Auto):
if isinstance(model_obj, dict):
field_val = model_obj[field_name]
else:
field_val = getattr(model_obj, field_name)
setattr(self, field_name, field_val)
# Set Entity status to saved
self.state_.mark_saved()
# Perform Post-Save Actions
self.post_save()
return self
except Exception:
# FIXME Log Exception
raise | def function[save, parameter[self]]:
constant[Save a new Entity into repository.
Performs unique validations before creating the entity.
]
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da18dc9bfa0>]]
variable[model_cls] assign[=] call[name[repo_factory].get_model, parameter[name[self].__class__]]
variable[repository] assign[=] call[name[repo_factory].get_repository, parameter[name[self].__class__]]
<ast.Try object at 0x7da1b1905ea0> | keyword[def] identifier[save] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[debug] (
literal[string] )
identifier[model_cls] = identifier[repo_factory] . identifier[get_model] ( identifier[self] . identifier[__class__] )
identifier[repository] = identifier[repo_factory] . identifier[get_repository] ( identifier[self] . identifier[__class__] )
keyword[try] :
identifier[self] . identifier[_validate_unique] ( identifier[create] = keyword[False] )
identifier[self] . identifier[pre_save] ()
identifier[model_obj] = identifier[repository] . identifier[create] ( identifier[model_cls] . identifier[from_entity] ( identifier[self] ))
keyword[for] identifier[field_name] , identifier[field_obj] keyword[in] identifier[self] . identifier[meta_] . identifier[declared_fields] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[field_obj] , identifier[Auto] ):
keyword[if] identifier[isinstance] ( identifier[model_obj] , identifier[dict] ):
identifier[field_val] = identifier[model_obj] [ identifier[field_name] ]
keyword[else] :
identifier[field_val] = identifier[getattr] ( identifier[model_obj] , identifier[field_name] )
identifier[setattr] ( identifier[self] , identifier[field_name] , identifier[field_val] )
identifier[self] . identifier[state_] . identifier[mark_saved] ()
identifier[self] . identifier[post_save] ()
keyword[return] identifier[self]
keyword[except] identifier[Exception] :
keyword[raise] | def save(self):
"""Save a new Entity into repository.
Performs unique validations before creating the entity.
"""
logger.debug(f'Saving `{self.__class__.__name__}` object')
# Fetch Model class and connected repository from Repository Factory
model_cls = repo_factory.get_model(self.__class__)
repository = repo_factory.get_repository(self.__class__)
try:
# Do unique checks, update the record and return the Entity
self._validate_unique(create=False)
# Perform Pre-Save Actions
self.pre_save()
# Build the model object and create it
model_obj = repository.create(model_cls.from_entity(self))
# Update the auto fields of the entity
for (field_name, field_obj) in self.meta_.declared_fields.items():
if isinstance(field_obj, Auto):
if isinstance(model_obj, dict):
field_val = model_obj[field_name] # depends on [control=['if'], data=[]]
else:
field_val = getattr(model_obj, field_name)
setattr(self, field_name, field_val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Set Entity status to saved
self.state_.mark_saved()
# Perform Post-Save Actions
self.post_save()
return self # depends on [control=['try'], data=[]]
except Exception:
# FIXME Log Exception
raise # depends on [control=['except'], data=[]] |
def primary_key(self, hkey, rkey=None):
"""
Construct a primary key dictionary
You can either pass in a (hash_key[, range_key]) as the arguments, or
you may pass in an Item itself
"""
if isinstance(hkey, dict):
def decode(val):
""" Convert Decimals back to primitives """
if isinstance(val, Decimal):
return float(val)
return val
pkey = {self.hash_key.name: decode(hkey[self.hash_key.name])}
if self.range_key is not None:
pkey[self.range_key.name] = decode(hkey[self.range_key.name])
return pkey
else:
pkey = {self.hash_key.name: hkey}
if self.range_key is not None:
if rkey is None:
raise ValueError("Range key is missing!")
pkey[self.range_key.name] = rkey
return pkey | def function[primary_key, parameter[self, hkey, rkey]]:
constant[
Construct a primary key dictionary
You can either pass in a (hash_key[, range_key]) as the arguments, or
you may pass in an Item itself
]
if call[name[isinstance], parameter[name[hkey], name[dict]]] begin[:]
def function[decode, parameter[val]]:
constant[ Convert Decimals back to primitives ]
if call[name[isinstance], parameter[name[val], name[Decimal]]] begin[:]
return[call[name[float], parameter[name[val]]]]
return[name[val]]
variable[pkey] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0cb1c60>], [<ast.Call object at 0x7da1b0cb0490>]]
if compare[name[self].range_key is_not constant[None]] begin[:]
call[name[pkey]][name[self].range_key.name] assign[=] call[name[decode], parameter[call[name[hkey]][name[self].range_key.name]]]
return[name[pkey]] | keyword[def] identifier[primary_key] ( identifier[self] , identifier[hkey] , identifier[rkey] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[hkey] , identifier[dict] ):
keyword[def] identifier[decode] ( identifier[val] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[val] , identifier[Decimal] ):
keyword[return] identifier[float] ( identifier[val] )
keyword[return] identifier[val]
identifier[pkey] ={ identifier[self] . identifier[hash_key] . identifier[name] : identifier[decode] ( identifier[hkey] [ identifier[self] . identifier[hash_key] . identifier[name] ])}
keyword[if] identifier[self] . identifier[range_key] keyword[is] keyword[not] keyword[None] :
identifier[pkey] [ identifier[self] . identifier[range_key] . identifier[name] ]= identifier[decode] ( identifier[hkey] [ identifier[self] . identifier[range_key] . identifier[name] ])
keyword[return] identifier[pkey]
keyword[else] :
identifier[pkey] ={ identifier[self] . identifier[hash_key] . identifier[name] : identifier[hkey] }
keyword[if] identifier[self] . identifier[range_key] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[rkey] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[pkey] [ identifier[self] . identifier[range_key] . identifier[name] ]= identifier[rkey]
keyword[return] identifier[pkey] | def primary_key(self, hkey, rkey=None):
"""
Construct a primary key dictionary
You can either pass in a (hash_key[, range_key]) as the arguments, or
you may pass in an Item itself
"""
if isinstance(hkey, dict):
def decode(val):
""" Convert Decimals back to primitives """
if isinstance(val, Decimal):
return float(val) # depends on [control=['if'], data=[]]
return val
pkey = {self.hash_key.name: decode(hkey[self.hash_key.name])}
if self.range_key is not None:
pkey[self.range_key.name] = decode(hkey[self.range_key.name]) # depends on [control=['if'], data=[]]
return pkey # depends on [control=['if'], data=[]]
else:
pkey = {self.hash_key.name: hkey}
if self.range_key is not None:
if rkey is None:
raise ValueError('Range key is missing!') # depends on [control=['if'], data=[]]
pkey[self.range_key.name] = rkey # depends on [control=['if'], data=[]]
return pkey |
def load(self, filename):
"""Load file information from a filename."""
self.filename = filename
fileobj = open(filename, "rb")
try:
try:
self.info = self._Info(fileobj)
self.tags = self._Tags(fileobj, self.info)
self.info._post_tags(fileobj)
except error as e:
reraise(self._Error, e, sys.exc_info()[2])
except EOFError:
raise self._Error("no appropriate stream found")
finally:
fileobj.close() | def function[load, parameter[self, filename]]:
constant[Load file information from a filename.]
name[self].filename assign[=] name[filename]
variable[fileobj] assign[=] call[name[open], parameter[name[filename], constant[rb]]]
<ast.Try object at 0x7da20cabdc30> | keyword[def] identifier[load] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[self] . identifier[filename] = identifier[filename]
identifier[fileobj] = identifier[open] ( identifier[filename] , literal[string] )
keyword[try] :
keyword[try] :
identifier[self] . identifier[info] = identifier[self] . identifier[_Info] ( identifier[fileobj] )
identifier[self] . identifier[tags] = identifier[self] . identifier[_Tags] ( identifier[fileobj] , identifier[self] . identifier[info] )
identifier[self] . identifier[info] . identifier[_post_tags] ( identifier[fileobj] )
keyword[except] identifier[error] keyword[as] identifier[e] :
identifier[reraise] ( identifier[self] . identifier[_Error] , identifier[e] , identifier[sys] . identifier[exc_info] ()[ literal[int] ])
keyword[except] identifier[EOFError] :
keyword[raise] identifier[self] . identifier[_Error] ( literal[string] )
keyword[finally] :
identifier[fileobj] . identifier[close] () | def load(self, filename):
"""Load file information from a filename."""
self.filename = filename
fileobj = open(filename, 'rb')
try:
try:
self.info = self._Info(fileobj)
self.tags = self._Tags(fileobj, self.info)
self.info._post_tags(fileobj) # depends on [control=['try'], data=[]]
except error as e:
reraise(self._Error, e, sys.exc_info()[2]) # depends on [control=['except'], data=['e']]
except EOFError:
raise self._Error('no appropriate stream found') # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
finally:
fileobj.close() |
def mkdir(dir_path,
user=None,
group=None,
mode=None):
'''
Ensure that a directory is available.
CLI Example:
.. code-block:: bash
salt '*' file.mkdir /opt/jetty/context
'''
dir_path = os.path.expanduser(dir_path)
directory = os.path.normpath(dir_path)
if not os.path.isdir(directory):
# If a caller such as managed() is invoked with makedirs=True, make
# sure that any created dirs are created with the same user and group
# to follow the principal of least surprise method.
makedirs_perms(directory, user, group, mode)
return True | def function[mkdir, parameter[dir_path, user, group, mode]]:
constant[
Ensure that a directory is available.
CLI Example:
.. code-block:: bash
salt '*' file.mkdir /opt/jetty/context
]
variable[dir_path] assign[=] call[name[os].path.expanduser, parameter[name[dir_path]]]
variable[directory] assign[=] call[name[os].path.normpath, parameter[name[dir_path]]]
if <ast.UnaryOp object at 0x7da18f09eb90> begin[:]
call[name[makedirs_perms], parameter[name[directory], name[user], name[group], name[mode]]]
return[constant[True]] | keyword[def] identifier[mkdir] ( identifier[dir_path] ,
identifier[user] = keyword[None] ,
identifier[group] = keyword[None] ,
identifier[mode] = keyword[None] ):
literal[string]
identifier[dir_path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[dir_path] )
identifier[directory] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[dir_path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[directory] ):
identifier[makedirs_perms] ( identifier[directory] , identifier[user] , identifier[group] , identifier[mode] )
keyword[return] keyword[True] | def mkdir(dir_path, user=None, group=None, mode=None):
"""
Ensure that a directory is available.
CLI Example:
.. code-block:: bash
salt '*' file.mkdir /opt/jetty/context
"""
dir_path = os.path.expanduser(dir_path)
directory = os.path.normpath(dir_path)
if not os.path.isdir(directory):
# If a caller such as managed() is invoked with makedirs=True, make
# sure that any created dirs are created with the same user and group
# to follow the principal of least surprise method.
makedirs_perms(directory, user, group, mode) # depends on [control=['if'], data=[]]
return True |
def branch(self, root, parts):
"""
Traverse the path until a leaf is reached.
@param parts: A list of path parts.
@type parts: [str,..]
@param root: The root.
@type root: L{xsd.sxbase.SchemaObject}
@return: The end of the branch.
@rtype: L{xsd.sxbase.SchemaObject}
"""
result = root
for part in parts[1:-1]:
name = splitPrefix(part)[1]
log.debug('searching parent (%s) for (%s)', Repr(result), name)
result, ancestry = result.get_child(name)
if result is None:
log.error('(%s) not-found', name)
raise PathResolver.BadPath(name)
result = result.resolve(nobuiltin=True)
log.debug('found (%s) as (%s)', name, Repr(result))
return result | def function[branch, parameter[self, root, parts]]:
constant[
Traverse the path until a leaf is reached.
@param parts: A list of path parts.
@type parts: [str,..]
@param root: The root.
@type root: L{xsd.sxbase.SchemaObject}
@return: The end of the branch.
@rtype: L{xsd.sxbase.SchemaObject}
]
variable[result] assign[=] name[root]
for taget[name[part]] in starred[call[name[parts]][<ast.Slice object at 0x7da18ede5600>]] begin[:]
variable[name] assign[=] call[call[name[splitPrefix], parameter[name[part]]]][constant[1]]
call[name[log].debug, parameter[constant[searching parent (%s) for (%s)], call[name[Repr], parameter[name[result]]], name[name]]]
<ast.Tuple object at 0x7da18ede6c50> assign[=] call[name[result].get_child, parameter[name[name]]]
if compare[name[result] is constant[None]] begin[:]
call[name[log].error, parameter[constant[(%s) not-found], name[name]]]
<ast.Raise object at 0x7da18f723970>
variable[result] assign[=] call[name[result].resolve, parameter[]]
call[name[log].debug, parameter[constant[found (%s) as (%s)], name[name], call[name[Repr], parameter[name[result]]]]]
return[name[result]] | keyword[def] identifier[branch] ( identifier[self] , identifier[root] , identifier[parts] ):
literal[string]
identifier[result] = identifier[root]
keyword[for] identifier[part] keyword[in] identifier[parts] [ literal[int] :- literal[int] ]:
identifier[name] = identifier[splitPrefix] ( identifier[part] )[ literal[int] ]
identifier[log] . identifier[debug] ( literal[string] , identifier[Repr] ( identifier[result] ), identifier[name] )
identifier[result] , identifier[ancestry] = identifier[result] . identifier[get_child] ( identifier[name] )
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[log] . identifier[error] ( literal[string] , identifier[name] )
keyword[raise] identifier[PathResolver] . identifier[BadPath] ( identifier[name] )
identifier[result] = identifier[result] . identifier[resolve] ( identifier[nobuiltin] = keyword[True] )
identifier[log] . identifier[debug] ( literal[string] , identifier[name] , identifier[Repr] ( identifier[result] ))
keyword[return] identifier[result] | def branch(self, root, parts):
"""
Traverse the path until a leaf is reached.
@param parts: A list of path parts.
@type parts: [str,..]
@param root: The root.
@type root: L{xsd.sxbase.SchemaObject}
@return: The end of the branch.
@rtype: L{xsd.sxbase.SchemaObject}
"""
result = root
for part in parts[1:-1]:
name = splitPrefix(part)[1]
log.debug('searching parent (%s) for (%s)', Repr(result), name)
(result, ancestry) = result.get_child(name)
if result is None:
log.error('(%s) not-found', name)
raise PathResolver.BadPath(name) # depends on [control=['if'], data=[]]
result = result.resolve(nobuiltin=True)
log.debug('found (%s) as (%s)', name, Repr(result)) # depends on [control=['for'], data=['part']]
return result |
def deprecated(func):
'''This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.'''
import warnings
@functools.wraps(func)
def new_func(*args, **kwargs):
if is_python_3:
code = func.__code__
else:
code = func.func_code
warnings.warn_explicit(
"Call to deprecated function {}.".format(func.__name__),
category=Warning,
filename=code.co_filename,
lineno=code.co_firstlineno + 1
)
return func(*args, **kwargs)
return new_func | def function[deprecated, parameter[func]]:
constant[This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.]
import module[warnings]
def function[new_func, parameter[]]:
if name[is_python_3] begin[:]
variable[code] assign[=] name[func].__code__
call[name[warnings].warn_explicit, parameter[call[constant[Call to deprecated function {}.].format, parameter[name[func].__name__]]]]
return[call[name[func], parameter[<ast.Starred object at 0x7da2047ea350>]]]
return[name[new_func]] | keyword[def] identifier[deprecated] ( identifier[func] ):
literal[string]
keyword[import] identifier[warnings]
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[new_func] (* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[is_python_3] :
identifier[code] = identifier[func] . identifier[__code__]
keyword[else] :
identifier[code] = identifier[func] . identifier[func_code]
identifier[warnings] . identifier[warn_explicit] (
literal[string] . identifier[format] ( identifier[func] . identifier[__name__] ),
identifier[category] = identifier[Warning] ,
identifier[filename] = identifier[code] . identifier[co_filename] ,
identifier[lineno] = identifier[code] . identifier[co_firstlineno] + literal[int]
)
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[new_func] | def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
import warnings
@functools.wraps(func)
def new_func(*args, **kwargs):
if is_python_3:
code = func.__code__ # depends on [control=['if'], data=[]]
else:
code = func.func_code
warnings.warn_explicit('Call to deprecated function {}.'.format(func.__name__), category=Warning, filename=code.co_filename, lineno=code.co_firstlineno + 1)
return func(*args, **kwargs)
return new_func |
def __retrieve_data(self):
"""Read more data from the file."""
if self.__eof is True:
return b''
logging.debug("Reading another block.")
block = self.read(self.__block_size)
if block == b'':
self.__log.debug("We've encountered the EOF.")
self.__eof = True
return block | def function[__retrieve_data, parameter[self]]:
constant[Read more data from the file.]
if compare[name[self].__eof is constant[True]] begin[:]
return[constant[b'']]
call[name[logging].debug, parameter[constant[Reading another block.]]]
variable[block] assign[=] call[name[self].read, parameter[name[self].__block_size]]
if compare[name[block] equal[==] constant[b'']] begin[:]
call[name[self].__log.debug, parameter[constant[We've encountered the EOF.]]]
name[self].__eof assign[=] constant[True]
return[name[block]] | keyword[def] identifier[__retrieve_data] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__eof] keyword[is] keyword[True] :
keyword[return] literal[string]
identifier[logging] . identifier[debug] ( literal[string] )
identifier[block] = identifier[self] . identifier[read] ( identifier[self] . identifier[__block_size] )
keyword[if] identifier[block] == literal[string] :
identifier[self] . identifier[__log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[__eof] = keyword[True]
keyword[return] identifier[block] | def __retrieve_data(self):
"""Read more data from the file."""
if self.__eof is True:
return b'' # depends on [control=['if'], data=[]]
logging.debug('Reading another block.')
block = self.read(self.__block_size)
if block == b'':
self.__log.debug("We've encountered the EOF.")
self.__eof = True # depends on [control=['if'], data=[]]
return block |
def _ssweek_num_weeks(ssweek_year):
"Get the number of Sundaystarting-weeks in this year"
year_start = _ssweek_year_start(ssweek_year)
next_year_start = _ssweek_year_start(ssweek_year+1)
year_num_weeks = ((next_year_start - year_start).days) // 7
return year_num_weeks | def function[_ssweek_num_weeks, parameter[ssweek_year]]:
constant[Get the number of Sundaystarting-weeks in this year]
variable[year_start] assign[=] call[name[_ssweek_year_start], parameter[name[ssweek_year]]]
variable[next_year_start] assign[=] call[name[_ssweek_year_start], parameter[binary_operation[name[ssweek_year] + constant[1]]]]
variable[year_num_weeks] assign[=] binary_operation[binary_operation[name[next_year_start] - name[year_start]].days <ast.FloorDiv object at 0x7da2590d6bc0> constant[7]]
return[name[year_num_weeks]] | keyword[def] identifier[_ssweek_num_weeks] ( identifier[ssweek_year] ):
literal[string]
identifier[year_start] = identifier[_ssweek_year_start] ( identifier[ssweek_year] )
identifier[next_year_start] = identifier[_ssweek_year_start] ( identifier[ssweek_year] + literal[int] )
identifier[year_num_weeks] =(( identifier[next_year_start] - identifier[year_start] ). identifier[days] )// literal[int]
keyword[return] identifier[year_num_weeks] | def _ssweek_num_weeks(ssweek_year):
"""Get the number of Sundaystarting-weeks in this year"""
year_start = _ssweek_year_start(ssweek_year)
next_year_start = _ssweek_year_start(ssweek_year + 1)
year_num_weeks = (next_year_start - year_start).days // 7
return year_num_weeks |
def x_is_greater_than(self, test_ordinate):
""" Comparison for x coordinate"""
self._is_coordinate(test_ordinate)
if self.x > test_ordinate.x:
return True
else:
return False | def function[x_is_greater_than, parameter[self, test_ordinate]]:
constant[ Comparison for x coordinate]
call[name[self]._is_coordinate, parameter[name[test_ordinate]]]
if compare[name[self].x greater[>] name[test_ordinate].x] begin[:]
return[constant[True]] | keyword[def] identifier[x_is_greater_than] ( identifier[self] , identifier[test_ordinate] ):
literal[string]
identifier[self] . identifier[_is_coordinate] ( identifier[test_ordinate] )
keyword[if] identifier[self] . identifier[x] > identifier[test_ordinate] . identifier[x] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def x_is_greater_than(self, test_ordinate):
""" Comparison for x coordinate"""
self._is_coordinate(test_ordinate)
if self.x > test_ordinate.x:
return True # depends on [control=['if'], data=[]]
else:
return False |
def profiler_set_config(mode='symbolic', filename='profile.json'):
"""Set up the configure of profiler (Deprecated).
Parameters
----------
mode : string, optional
Indicates whether to enable the profiler, can
be 'symbolic', or 'all'. Defaults to `symbolic`.
filename : string, optional
The name of output trace file. Defaults to 'profile.json'.
"""
warnings.warn('profiler.profiler_set_config() is deprecated. '
'Please use profiler.set_config() instead')
keys = c_str_array([key for key in ["profile_" + mode, "filename"]])
values = c_str_array([str(val) for val in [True, filename]])
assert len(keys) == len(values)
check_call(_LIB.MXSetProcessProfilerConfig(len(keys), keys, values, profiler_kvstore_handle)) | def function[profiler_set_config, parameter[mode, filename]]:
constant[Set up the configure of profiler (Deprecated).
Parameters
----------
mode : string, optional
Indicates whether to enable the profiler, can
be 'symbolic', or 'all'. Defaults to `symbolic`.
filename : string, optional
The name of output trace file. Defaults to 'profile.json'.
]
call[name[warnings].warn, parameter[constant[profiler.profiler_set_config() is deprecated. Please use profiler.set_config() instead]]]
variable[keys] assign[=] call[name[c_str_array], parameter[<ast.ListComp object at 0x7da1b204f250>]]
variable[values] assign[=] call[name[c_str_array], parameter[<ast.ListComp object at 0x7da1b204c610>]]
assert[compare[call[name[len], parameter[name[keys]]] equal[==] call[name[len], parameter[name[values]]]]]
call[name[check_call], parameter[call[name[_LIB].MXSetProcessProfilerConfig, parameter[call[name[len], parameter[name[keys]]], name[keys], name[values], name[profiler_kvstore_handle]]]]] | keyword[def] identifier[profiler_set_config] ( identifier[mode] = literal[string] , identifier[filename] = literal[string] ):
literal[string]
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] )
identifier[keys] = identifier[c_str_array] ([ identifier[key] keyword[for] identifier[key] keyword[in] [ literal[string] + identifier[mode] , literal[string] ]])
identifier[values] = identifier[c_str_array] ([ identifier[str] ( identifier[val] ) keyword[for] identifier[val] keyword[in] [ keyword[True] , identifier[filename] ]])
keyword[assert] identifier[len] ( identifier[keys] )== identifier[len] ( identifier[values] )
identifier[check_call] ( identifier[_LIB] . identifier[MXSetProcessProfilerConfig] ( identifier[len] ( identifier[keys] ), identifier[keys] , identifier[values] , identifier[profiler_kvstore_handle] )) | def profiler_set_config(mode='symbolic', filename='profile.json'):
"""Set up the configure of profiler (Deprecated).
Parameters
----------
mode : string, optional
Indicates whether to enable the profiler, can
be 'symbolic', or 'all'. Defaults to `symbolic`.
filename : string, optional
The name of output trace file. Defaults to 'profile.json'.
"""
warnings.warn('profiler.profiler_set_config() is deprecated. Please use profiler.set_config() instead')
keys = c_str_array([key for key in ['profile_' + mode, 'filename']])
values = c_str_array([str(val) for val in [True, filename]])
assert len(keys) == len(values)
check_call(_LIB.MXSetProcessProfilerConfig(len(keys), keys, values, profiler_kvstore_handle)) |
def grain_funcs(opts, proxy=None):
'''
Returns the grain functions
.. code-block:: python
import salt.config
import salt.loader
__opts__ = salt.config.minion_config('/etc/salt/minion')
grainfuncs = salt.loader.grain_funcs(__opts__)
'''
ret = LazyLoader(
_module_dirs(
opts,
'grains',
'grain',
ext_type_dirs='grains_dirs',
),
opts,
tag='grains',
)
ret.pack['__utils__'] = utils(opts, proxy=proxy)
return ret | def function[grain_funcs, parameter[opts, proxy]]:
constant[
Returns the grain functions
.. code-block:: python
import salt.config
import salt.loader
__opts__ = salt.config.minion_config('/etc/salt/minion')
grainfuncs = salt.loader.grain_funcs(__opts__)
]
variable[ret] assign[=] call[name[LazyLoader], parameter[call[name[_module_dirs], parameter[name[opts], constant[grains], constant[grain]]], name[opts]]]
call[name[ret].pack][constant[__utils__]] assign[=] call[name[utils], parameter[name[opts]]]
return[name[ret]] | keyword[def] identifier[grain_funcs] ( identifier[opts] , identifier[proxy] = keyword[None] ):
literal[string]
identifier[ret] = identifier[LazyLoader] (
identifier[_module_dirs] (
identifier[opts] ,
literal[string] ,
literal[string] ,
identifier[ext_type_dirs] = literal[string] ,
),
identifier[opts] ,
identifier[tag] = literal[string] ,
)
identifier[ret] . identifier[pack] [ literal[string] ]= identifier[utils] ( identifier[opts] , identifier[proxy] = identifier[proxy] )
keyword[return] identifier[ret] | def grain_funcs(opts, proxy=None):
"""
Returns the grain functions
.. code-block:: python
import salt.config
import salt.loader
__opts__ = salt.config.minion_config('/etc/salt/minion')
grainfuncs = salt.loader.grain_funcs(__opts__)
"""
ret = LazyLoader(_module_dirs(opts, 'grains', 'grain', ext_type_dirs='grains_dirs'), opts, tag='grains')
ret.pack['__utils__'] = utils(opts, proxy=proxy)
return ret |
def plotActivation(self, position=None, time=None, velocity=None):
"""
Plot the activation of the current cell populations. Assumes that
two axes have already been created, ax1 and ax2. If done in a Jupyter
notebook, this plotting will overwrite the old plot.
:param position: The current location of the animal
:param time: The current time in the simulation
:param velocity: The current velocity of the animal
"""
self.ax1.clear()
y = self.activations["n"] + self.activations["s"] + self.activations["e"] + \
self.activations["w"]
self.ax1.matshow(y.reshape(self.dimensions))
self.ax2.clear()
self.ax2.matshow(self.activationsI.reshape(self.dimensions))
self.ax3.clear()
self.ax3.matshow(self.activationHistoryI.reshape(self.dimensions))
titleString = ""
if time is not None:
titleString += "Time = {}".format(str(time))
if velocity is not None:
titleString += " Velocity = {}".format(str(velocity)[:4])
if position is not None:
titleString += " Position = {}".format(str(position)[:4])
plt.suptitle(titleString)
self.ax1.set_xlabel("Excitatory activity")
self.ax2.set_xlabel("Inhibitory activity")
self.ax3.set_xlabel("Boosting activity")
plt.tight_layout()
self.fig.canvas.draw() | def function[plotActivation, parameter[self, position, time, velocity]]:
constant[
Plot the activation of the current cell populations. Assumes that
two axes have already been created, ax1 and ax2. If done in a Jupyter
notebook, this plotting will overwrite the old plot.
:param position: The current location of the animal
:param time: The current time in the simulation
:param velocity: The current velocity of the animal
]
call[name[self].ax1.clear, parameter[]]
variable[y] assign[=] binary_operation[binary_operation[binary_operation[call[name[self].activations][constant[n]] + call[name[self].activations][constant[s]]] + call[name[self].activations][constant[e]]] + call[name[self].activations][constant[w]]]
call[name[self].ax1.matshow, parameter[call[name[y].reshape, parameter[name[self].dimensions]]]]
call[name[self].ax2.clear, parameter[]]
call[name[self].ax2.matshow, parameter[call[name[self].activationsI.reshape, parameter[name[self].dimensions]]]]
call[name[self].ax3.clear, parameter[]]
call[name[self].ax3.matshow, parameter[call[name[self].activationHistoryI.reshape, parameter[name[self].dimensions]]]]
variable[titleString] assign[=] constant[]
if compare[name[time] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b08bef50>
if compare[name[velocity] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b08bda50>
if compare[name[position] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b08bc4c0>
call[name[plt].suptitle, parameter[name[titleString]]]
call[name[self].ax1.set_xlabel, parameter[constant[Excitatory activity]]]
call[name[self].ax2.set_xlabel, parameter[constant[Inhibitory activity]]]
call[name[self].ax3.set_xlabel, parameter[constant[Boosting activity]]]
call[name[plt].tight_layout, parameter[]]
call[name[self].fig.canvas.draw, parameter[]] | keyword[def] identifier[plotActivation] ( identifier[self] , identifier[position] = keyword[None] , identifier[time] = keyword[None] , identifier[velocity] = keyword[None] ):
literal[string]
identifier[self] . identifier[ax1] . identifier[clear] ()
identifier[y] = identifier[self] . identifier[activations] [ literal[string] ]+ identifier[self] . identifier[activations] [ literal[string] ]+ identifier[self] . identifier[activations] [ literal[string] ]+ identifier[self] . identifier[activations] [ literal[string] ]
identifier[self] . identifier[ax1] . identifier[matshow] ( identifier[y] . identifier[reshape] ( identifier[self] . identifier[dimensions] ))
identifier[self] . identifier[ax2] . identifier[clear] ()
identifier[self] . identifier[ax2] . identifier[matshow] ( identifier[self] . identifier[activationsI] . identifier[reshape] ( identifier[self] . identifier[dimensions] ))
identifier[self] . identifier[ax3] . identifier[clear] ()
identifier[self] . identifier[ax3] . identifier[matshow] ( identifier[self] . identifier[activationHistoryI] . identifier[reshape] ( identifier[self] . identifier[dimensions] ))
identifier[titleString] = literal[string]
keyword[if] identifier[time] keyword[is] keyword[not] keyword[None] :
identifier[titleString] += literal[string] . identifier[format] ( identifier[str] ( identifier[time] ))
keyword[if] identifier[velocity] keyword[is] keyword[not] keyword[None] :
identifier[titleString] += literal[string] . identifier[format] ( identifier[str] ( identifier[velocity] )[: literal[int] ])
keyword[if] identifier[position] keyword[is] keyword[not] keyword[None] :
identifier[titleString] += literal[string] . identifier[format] ( identifier[str] ( identifier[position] )[: literal[int] ])
identifier[plt] . identifier[suptitle] ( identifier[titleString] )
identifier[self] . identifier[ax1] . identifier[set_xlabel] ( literal[string] )
identifier[self] . identifier[ax2] . identifier[set_xlabel] ( literal[string] )
identifier[self] . identifier[ax3] . identifier[set_xlabel] ( literal[string] )
identifier[plt] . identifier[tight_layout] ()
identifier[self] . identifier[fig] . identifier[canvas] . identifier[draw] () | def plotActivation(self, position=None, time=None, velocity=None):
"""
Plot the activation of the current cell populations. Assumes that
two axes have already been created, ax1 and ax2. If done in a Jupyter
notebook, this plotting will overwrite the old plot.
:param position: The current location of the animal
:param time: The current time in the simulation
:param velocity: The current velocity of the animal
"""
self.ax1.clear()
y = self.activations['n'] + self.activations['s'] + self.activations['e'] + self.activations['w']
self.ax1.matshow(y.reshape(self.dimensions))
self.ax2.clear()
self.ax2.matshow(self.activationsI.reshape(self.dimensions))
self.ax3.clear()
self.ax3.matshow(self.activationHistoryI.reshape(self.dimensions))
titleString = ''
if time is not None:
titleString += 'Time = {}'.format(str(time)) # depends on [control=['if'], data=['time']]
if velocity is not None:
titleString += ' Velocity = {}'.format(str(velocity)[:4]) # depends on [control=['if'], data=['velocity']]
if position is not None:
titleString += ' Position = {}'.format(str(position)[:4]) # depends on [control=['if'], data=['position']]
plt.suptitle(titleString)
self.ax1.set_xlabel('Excitatory activity')
self.ax2.set_xlabel('Inhibitory activity')
self.ax3.set_xlabel('Boosting activity')
plt.tight_layout()
self.fig.canvas.draw() |
def start(self):
""" Restart the listener
"""
if not event.contains(self.field, 'set', self.__validate):
self.__create_event() | def function[start, parameter[self]]:
constant[ Restart the listener
]
if <ast.UnaryOp object at 0x7da18bc739a0> begin[:]
call[name[self].__create_event, parameter[]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[event] . identifier[contains] ( identifier[self] . identifier[field] , literal[string] , identifier[self] . identifier[__validate] ):
identifier[self] . identifier[__create_event] () | def start(self):
""" Restart the listener
"""
if not event.contains(self.field, 'set', self.__validate):
self.__create_event() # depends on [control=['if'], data=[]] |
def html(self, data=None, template=None):
"""
Send html document to user.
Args:
- data: Dict to render template, or string with rendered HTML.
- template: Name of template to render HTML document with passed data.
"""
if data is None:
data = {}
if template:
return render(self.request, template, data)
return HttpResponse(data) | def function[html, parameter[self, data, template]]:
constant[
Send html document to user.
Args:
- data: Dict to render template, or string with rendered HTML.
- template: Name of template to render HTML document with passed data.
]
if compare[name[data] is constant[None]] begin[:]
variable[data] assign[=] dictionary[[], []]
if name[template] begin[:]
return[call[name[render], parameter[name[self].request, name[template], name[data]]]]
return[call[name[HttpResponse], parameter[name[data]]]] | keyword[def] identifier[html] ( identifier[self] , identifier[data] = keyword[None] , identifier[template] = keyword[None] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] ={}
keyword[if] identifier[template] :
keyword[return] identifier[render] ( identifier[self] . identifier[request] , identifier[template] , identifier[data] )
keyword[return] identifier[HttpResponse] ( identifier[data] ) | def html(self, data=None, template=None):
"""
Send html document to user.
Args:
- data: Dict to render template, or string with rendered HTML.
- template: Name of template to render HTML document with passed data.
"""
if data is None:
data = {} # depends on [control=['if'], data=['data']]
if template:
return render(self.request, template, data) # depends on [control=['if'], data=[]]
return HttpResponse(data) |
def hangup_call(self, call_params):
"""REST Hangup Live Call Helper
"""
path = '/' + self.api_version + '/HangupCall/'
method = 'POST'
return self.request(path, method, call_params) | def function[hangup_call, parameter[self, call_params]]:
constant[REST Hangup Live Call Helper
]
variable[path] assign[=] binary_operation[binary_operation[constant[/] + name[self].api_version] + constant[/HangupCall/]]
variable[method] assign[=] constant[POST]
return[call[name[self].request, parameter[name[path], name[method], name[call_params]]]] | keyword[def] identifier[hangup_call] ( identifier[self] , identifier[call_params] ):
literal[string]
identifier[path] = literal[string] + identifier[self] . identifier[api_version] + literal[string]
identifier[method] = literal[string]
keyword[return] identifier[self] . identifier[request] ( identifier[path] , identifier[method] , identifier[call_params] ) | def hangup_call(self, call_params):
"""REST Hangup Live Call Helper
"""
path = '/' + self.api_version + '/HangupCall/'
method = 'POST'
return self.request(path, method, call_params) |
def del_application(self, application, sync=True):
"""
delete application from this team
:param application: the application to be deleted from this team
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the application object on list to be removed on next save().
:return:
"""
LOGGER.debug("Team.del_application")
if not sync:
self.app_2_rm.append(application)
else:
if application.id is None:
application.sync()
if self.id is not None and application.id is not None:
params = {
'id': self.id,
'applicationID': application.id
}
args = {'http_operation': 'GET', 'operation_path': 'update/applications/delete', 'parameters': params}
response = TeamService.requester.call(args)
if response.rc != 0:
LOGGER.warning(
'Team.del_application - Problem while updating team ' + self.name +
'. Reason: ' + str(response.response_content) + '-' + str(response.error_message) +
" (" + str(response.rc) + ")"
)
else:
self.app_ids.remove(application.id)
application.sync()
else:
LOGGER.warning(
'Team.del_application - Problem while updating team ' + self.name + '. Reason: application ' +
application.name + ' id is None or self.id is None'
) | def function[del_application, parameter[self, application, sync]]:
constant[
delete application from this team
:param application: the application to be deleted from this team
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the application object on list to be removed on next save().
:return:
]
call[name[LOGGER].debug, parameter[constant[Team.del_application]]]
if <ast.UnaryOp object at 0x7da1b134b970> begin[:]
call[name[self].app_2_rm.append, parameter[name[application]]] | keyword[def] identifier[del_application] ( identifier[self] , identifier[application] , identifier[sync] = keyword[True] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] keyword[not] identifier[sync] :
identifier[self] . identifier[app_2_rm] . identifier[append] ( identifier[application] )
keyword[else] :
keyword[if] identifier[application] . identifier[id] keyword[is] keyword[None] :
identifier[application] . identifier[sync] ()
keyword[if] identifier[self] . identifier[id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[application] . identifier[id] keyword[is] keyword[not] keyword[None] :
identifier[params] ={
literal[string] : identifier[self] . identifier[id] ,
literal[string] : identifier[application] . identifier[id]
}
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] }
identifier[response] = identifier[TeamService] . identifier[requester] . identifier[call] ( identifier[args] )
keyword[if] identifier[response] . identifier[rc] != literal[int] :
identifier[LOGGER] . identifier[warning] (
literal[string] + identifier[self] . identifier[name] +
literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+
literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
)
keyword[else] :
identifier[self] . identifier[app_ids] . identifier[remove] ( identifier[application] . identifier[id] )
identifier[application] . identifier[sync] ()
keyword[else] :
identifier[LOGGER] . identifier[warning] (
literal[string] + identifier[self] . identifier[name] + literal[string] +
identifier[application] . identifier[name] + literal[string]
) | def del_application(self, application, sync=True):
"""
delete application from this team
:param application: the application to be deleted from this team
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the application object on list to be removed on next save().
:return:
"""
LOGGER.debug('Team.del_application')
if not sync:
self.app_2_rm.append(application) # depends on [control=['if'], data=[]]
else:
if application.id is None:
application.sync() # depends on [control=['if'], data=[]]
if self.id is not None and application.id is not None:
params = {'id': self.id, 'applicationID': application.id}
args = {'http_operation': 'GET', 'operation_path': 'update/applications/delete', 'parameters': params}
response = TeamService.requester.call(args)
if response.rc != 0:
LOGGER.warning('Team.del_application - Problem while updating team ' + self.name + '. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + ' (' + str(response.rc) + ')') # depends on [control=['if'], data=[]]
else:
self.app_ids.remove(application.id)
application.sync() # depends on [control=['if'], data=[]]
else:
LOGGER.warning('Team.del_application - Problem while updating team ' + self.name + '. Reason: application ' + application.name + ' id is None or self.id is None') |
def _region_from_key_id(key_id, default_region=None):
"""Determine the target region from a key ID, falling back to a default region if provided.
:param str key_id: AWS KMS key ID
:param str default_region: Region to use if no region found in key_id
:returns: region name
:rtype: str
:raises UnknownRegionError: if no region found in key_id and no default_region provided
"""
try:
region_name = key_id.split(":", 4)[3]
except IndexError:
if default_region is None:
raise UnknownRegionError(
"No default region found and no region determinable from key id: {}".format(key_id)
)
region_name = default_region
return region_name | def function[_region_from_key_id, parameter[key_id, default_region]]:
constant[Determine the target region from a key ID, falling back to a default region if provided.
:param str key_id: AWS KMS key ID
:param str default_region: Region to use if no region found in key_id
:returns: region name
:rtype: str
:raises UnknownRegionError: if no region found in key_id and no default_region provided
]
<ast.Try object at 0x7da18fe91780>
return[name[region_name]] | keyword[def] identifier[_region_from_key_id] ( identifier[key_id] , identifier[default_region] = keyword[None] ):
literal[string]
keyword[try] :
identifier[region_name] = identifier[key_id] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[if] identifier[default_region] keyword[is] keyword[None] :
keyword[raise] identifier[UnknownRegionError] (
literal[string] . identifier[format] ( identifier[key_id] )
)
identifier[region_name] = identifier[default_region]
keyword[return] identifier[region_name] | def _region_from_key_id(key_id, default_region=None):
"""Determine the target region from a key ID, falling back to a default region if provided.
:param str key_id: AWS KMS key ID
:param str default_region: Region to use if no region found in key_id
:returns: region name
:rtype: str
:raises UnknownRegionError: if no region found in key_id and no default_region provided
"""
try:
region_name = key_id.split(':', 4)[3] # depends on [control=['try'], data=[]]
except IndexError:
if default_region is None:
raise UnknownRegionError('No default region found and no region determinable from key id: {}'.format(key_id)) # depends on [control=['if'], data=[]]
region_name = default_region # depends on [control=['except'], data=[]]
return region_name |
def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPFeatureRecordSetLayer()
if "defaultValue" in j:
v.value = j['defaultValue']
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName']
elif 'name' in j:
v.paramName = j['name']
return v | def function[fromJSON, parameter[value]]:
constant[loads the GP object from a JSON string ]
variable[j] assign[=] call[name[json].loads, parameter[name[value]]]
variable[v] assign[=] call[name[GPFeatureRecordSetLayer], parameter[]]
if compare[constant[defaultValue] in name[j]] begin[:]
name[v].value assign[=] call[name[j]][constant[defaultValue]]
if compare[constant[paramName] in name[j]] begin[:]
name[v].paramName assign[=] call[name[j]][constant[paramName]]
return[name[v]] | keyword[def] identifier[fromJSON] ( identifier[value] ):
literal[string]
identifier[j] = identifier[json] . identifier[loads] ( identifier[value] )
identifier[v] = identifier[GPFeatureRecordSetLayer] ()
keyword[if] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[value] = identifier[j] [ literal[string] ]
keyword[else] :
identifier[v] . identifier[value] = identifier[j] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[paramName] = identifier[j] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[paramName] = identifier[j] [ literal[string] ]
keyword[return] identifier[v] | def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPFeatureRecordSetLayer()
if 'defaultValue' in j:
v.value = j['defaultValue'] # depends on [control=['if'], data=['j']]
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName'] # depends on [control=['if'], data=['j']]
elif 'name' in j:
v.paramName = j['name'] # depends on [control=['if'], data=['j']]
return v |
def snmp_server_mib_community_map_community(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp")
mib = ET.SubElement(snmp_server, "mib")
community_map = ET.SubElement(mib, "community-map")
community = ET.SubElement(community_map, "community")
community.text = kwargs.pop('community')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[snmp_server_mib_community_map_community, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[snmp_server] assign[=] call[name[ET].SubElement, parameter[name[config], constant[snmp-server]]]
variable[mib] assign[=] call[name[ET].SubElement, parameter[name[snmp_server], constant[mib]]]
variable[community_map] assign[=] call[name[ET].SubElement, parameter[name[mib], constant[community-map]]]
variable[community] assign[=] call[name[ET].SubElement, parameter[name[community_map], constant[community]]]
name[community].text assign[=] call[name[kwargs].pop, parameter[constant[community]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[snmp_server_mib_community_map_community] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[snmp_server] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[mib] = identifier[ET] . identifier[SubElement] ( identifier[snmp_server] , literal[string] )
identifier[community_map] = identifier[ET] . identifier[SubElement] ( identifier[mib] , literal[string] )
identifier[community] = identifier[ET] . identifier[SubElement] ( identifier[community_map] , literal[string] )
identifier[community] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def snmp_server_mib_community_map_community(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
snmp_server = ET.SubElement(config, 'snmp-server', xmlns='urn:brocade.com:mgmt:brocade-snmp')
mib = ET.SubElement(snmp_server, 'mib')
community_map = ET.SubElement(mib, 'community-map')
community = ET.SubElement(community_map, 'community')
community.text = kwargs.pop('community')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def save(self, path, binary=False):
"""Save a set of constructs into the CLIPS data base.
If binary is True, the constructs will be saved in binary format.
The Python equivalent of the CLIPS load command.
"""
if binary:
ret = lib.EnvBsave(self._env, path.encode())
else:
ret = lib.EnvSave(self._env, path.encode())
if ret == 0:
raise CLIPSError(self._env) | def function[save, parameter[self, path, binary]]:
constant[Save a set of constructs into the CLIPS data base.
If binary is True, the constructs will be saved in binary format.
The Python equivalent of the CLIPS load command.
]
if name[binary] begin[:]
variable[ret] assign[=] call[name[lib].EnvBsave, parameter[name[self]._env, call[name[path].encode, parameter[]]]]
if compare[name[ret] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18dc07f40> | keyword[def] identifier[save] ( identifier[self] , identifier[path] , identifier[binary] = keyword[False] ):
literal[string]
keyword[if] identifier[binary] :
identifier[ret] = identifier[lib] . identifier[EnvBsave] ( identifier[self] . identifier[_env] , identifier[path] . identifier[encode] ())
keyword[else] :
identifier[ret] = identifier[lib] . identifier[EnvSave] ( identifier[self] . identifier[_env] , identifier[path] . identifier[encode] ())
keyword[if] identifier[ret] == literal[int] :
keyword[raise] identifier[CLIPSError] ( identifier[self] . identifier[_env] ) | def save(self, path, binary=False):
"""Save a set of constructs into the CLIPS data base.
If binary is True, the constructs will be saved in binary format.
The Python equivalent of the CLIPS load command.
"""
if binary:
ret = lib.EnvBsave(self._env, path.encode()) # depends on [control=['if'], data=[]]
else:
ret = lib.EnvSave(self._env, path.encode())
if ret == 0:
raise CLIPSError(self._env) # depends on [control=['if'], data=[]] |
def serve(application, host='127.0.0.1', port=8080, **options):
"""Tornado's HTTPServer.
This is a high quality asynchronous server with many options. For details, please visit:
http://www.tornadoweb.org/en/stable/httpserver.html#http-server
"""
# Wrap our our WSGI application (potentially stack) in a Tornado adapter.
container = tornado.wsgi.WSGIContainer(application)
# Spin up a Tornado HTTP server using this container.
http_server = tornado.httpserver.HTTPServer(container, **options)
http_server.listen(int(port), host)
# Start and block on the Tornado IO loop.
tornado.ioloop.IOLoop.instance().start() | def function[serve, parameter[application, host, port]]:
constant[Tornado's HTTPServer.
This is a high quality asynchronous server with many options. For details, please visit:
http://www.tornadoweb.org/en/stable/httpserver.html#http-server
]
variable[container] assign[=] call[name[tornado].wsgi.WSGIContainer, parameter[name[application]]]
variable[http_server] assign[=] call[name[tornado].httpserver.HTTPServer, parameter[name[container]]]
call[name[http_server].listen, parameter[call[name[int], parameter[name[port]]], name[host]]]
call[call[name[tornado].ioloop.IOLoop.instance, parameter[]].start, parameter[]] | keyword[def] identifier[serve] ( identifier[application] , identifier[host] = literal[string] , identifier[port] = literal[int] ,** identifier[options] ):
literal[string]
identifier[container] = identifier[tornado] . identifier[wsgi] . identifier[WSGIContainer] ( identifier[application] )
identifier[http_server] = identifier[tornado] . identifier[httpserver] . identifier[HTTPServer] ( identifier[container] ,** identifier[options] )
identifier[http_server] . identifier[listen] ( identifier[int] ( identifier[port] ), identifier[host] )
identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[instance] (). identifier[start] () | def serve(application, host='127.0.0.1', port=8080, **options):
"""Tornado's HTTPServer.
This is a high quality asynchronous server with many options. For details, please visit:
http://www.tornadoweb.org/en/stable/httpserver.html#http-server
""" # Wrap our our WSGI application (potentially stack) in a Tornado adapter.
container = tornado.wsgi.WSGIContainer(application) # Spin up a Tornado HTTP server using this container.
http_server = tornado.httpserver.HTTPServer(container, **options)
http_server.listen(int(port), host) # Start and block on the Tornado IO loop.
tornado.ioloop.IOLoop.instance().start() |
def take_nd(arr, indexer, axis=0, out=None, fill_value=np.nan, mask_info=None,
allow_fill=True):
"""
Specialized Cython take which sets NaN values in one pass
This dispatches to ``take`` defined on ExtensionArrays. It does not
currently dispatch to ``SparseArray.take`` for sparse ``arr``.
Parameters
----------
arr : array-like
Input array.
indexer : ndarray
1-D array of indices to take, subarrays corresponding to -1 value
indices are filed with fill_value
axis : int, default 0
Axis to take from
out : ndarray or None, default None
Optional output array, must be appropriate type to hold input and
fill_value together, if indexer has any -1 value entries; call
_maybe_promote to determine this type for any fill_value
fill_value : any, default np.nan
Fill value to replace -1 values with
mask_info : tuple of (ndarray, boolean)
If provided, value should correspond to:
(indexer != -1, (indexer != -1).any())
If not provided, it will be computed internally if necessary
allow_fill : boolean, default True
If False, indexer is assumed to contain no -1 values so no filling
will be done. This short-circuits computation of a mask. Result is
undefined if allow_fill == False and -1 is present in indexer.
Returns
-------
subarray : array-like
May be the same type as the input, or cast to an ndarray.
"""
# TODO(EA): Remove these if / elifs as datetimeTZ, interval, become EAs
# dispatch to internal type takes
if is_extension_array_dtype(arr):
return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill)
elif is_datetime64tz_dtype(arr):
return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill)
elif is_interval_dtype(arr):
return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill)
if is_sparse(arr):
arr = arr.get_values()
elif isinstance(arr, (ABCIndexClass, ABCSeries)):
arr = arr.values
arr = np.asarray(arr)
if indexer is None:
indexer = np.arange(arr.shape[axis], dtype=np.int64)
dtype, fill_value = arr.dtype, arr.dtype.type()
else:
indexer = ensure_int64(indexer, copy=False)
if not allow_fill:
dtype, fill_value = arr.dtype, arr.dtype.type()
mask_info = None, False
else:
# check for promotion based on types only (do this first because
# it's faster than computing a mask)
dtype, fill_value = maybe_promote(arr.dtype, fill_value)
if dtype != arr.dtype and (out is None or out.dtype != dtype):
# check if promotion is actually required based on indexer
if mask_info is not None:
mask, needs_masking = mask_info
else:
mask = indexer == -1
needs_masking = mask.any()
mask_info = mask, needs_masking
if needs_masking:
if out is not None and out.dtype != dtype:
raise TypeError('Incompatible type for fill_value')
else:
# if not, then depromote, set fill_value to dummy
# (it won't be used but we don't want the cython code
# to crash when trying to cast it to dtype)
dtype, fill_value = arr.dtype, arr.dtype.type()
flip_order = False
if arr.ndim == 2:
if arr.flags.f_contiguous:
flip_order = True
if flip_order:
arr = arr.T
axis = arr.ndim - axis - 1
if out is not None:
out = out.T
# at this point, it's guaranteed that dtype can hold both the arr values
# and the fill_value
if out is None:
out_shape = list(arr.shape)
out_shape[axis] = len(indexer)
out_shape = tuple(out_shape)
if arr.flags.f_contiguous and axis == arr.ndim - 1:
# minor tweak that can make an order-of-magnitude difference
# for dataframes initialized directly from 2-d ndarrays
# (s.t. df.values is c-contiguous and df._data.blocks[0] is its
# f-contiguous transpose)
out = np.empty(out_shape, dtype=dtype, order='F')
else:
out = np.empty(out_shape, dtype=dtype)
func = _get_take_nd_function(arr.ndim, arr.dtype, out.dtype, axis=axis,
mask_info=mask_info)
func(arr, indexer, out, fill_value)
if flip_order:
out = out.T
return out | def function[take_nd, parameter[arr, indexer, axis, out, fill_value, mask_info, allow_fill]]:
constant[
Specialized Cython take which sets NaN values in one pass
This dispatches to ``take`` defined on ExtensionArrays. It does not
currently dispatch to ``SparseArray.take`` for sparse ``arr``.
Parameters
----------
arr : array-like
Input array.
indexer : ndarray
1-D array of indices to take, subarrays corresponding to -1 value
indices are filed with fill_value
axis : int, default 0
Axis to take from
out : ndarray or None, default None
Optional output array, must be appropriate type to hold input and
fill_value together, if indexer has any -1 value entries; call
_maybe_promote to determine this type for any fill_value
fill_value : any, default np.nan
Fill value to replace -1 values with
mask_info : tuple of (ndarray, boolean)
If provided, value should correspond to:
(indexer != -1, (indexer != -1).any())
If not provided, it will be computed internally if necessary
allow_fill : boolean, default True
If False, indexer is assumed to contain no -1 values so no filling
will be done. This short-circuits computation of a mask. Result is
undefined if allow_fill == False and -1 is present in indexer.
Returns
-------
subarray : array-like
May be the same type as the input, or cast to an ndarray.
]
if call[name[is_extension_array_dtype], parameter[name[arr]]] begin[:]
return[call[name[arr].take, parameter[name[indexer]]]]
if call[name[is_sparse], parameter[name[arr]]] begin[:]
variable[arr] assign[=] call[name[arr].get_values, parameter[]]
variable[arr] assign[=] call[name[np].asarray, parameter[name[arr]]]
if compare[name[indexer] is constant[None]] begin[:]
variable[indexer] assign[=] call[name[np].arange, parameter[call[name[arr].shape][name[axis]]]]
<ast.Tuple object at 0x7da18f722c50> assign[=] tuple[[<ast.Attribute object at 0x7da18f721360>, <ast.Call object at 0x7da18f722080>]]
variable[flip_order] assign[=] constant[False]
if compare[name[arr].ndim equal[==] constant[2]] begin[:]
if name[arr].flags.f_contiguous begin[:]
variable[flip_order] assign[=] constant[True]
if name[flip_order] begin[:]
variable[arr] assign[=] name[arr].T
variable[axis] assign[=] binary_operation[binary_operation[name[arr].ndim - name[axis]] - constant[1]]
if compare[name[out] is_not constant[None]] begin[:]
variable[out] assign[=] name[out].T
if compare[name[out] is constant[None]] begin[:]
variable[out_shape] assign[=] call[name[list], parameter[name[arr].shape]]
call[name[out_shape]][name[axis]] assign[=] call[name[len], parameter[name[indexer]]]
variable[out_shape] assign[=] call[name[tuple], parameter[name[out_shape]]]
if <ast.BoolOp object at 0x7da18ede4550> begin[:]
variable[out] assign[=] call[name[np].empty, parameter[name[out_shape]]]
variable[func] assign[=] call[name[_get_take_nd_function], parameter[name[arr].ndim, name[arr].dtype, name[out].dtype]]
call[name[func], parameter[name[arr], name[indexer], name[out], name[fill_value]]]
if name[flip_order] begin[:]
variable[out] assign[=] name[out].T
return[name[out]] | keyword[def] identifier[take_nd] ( identifier[arr] , identifier[indexer] , identifier[axis] = literal[int] , identifier[out] = keyword[None] , identifier[fill_value] = identifier[np] . identifier[nan] , identifier[mask_info] = keyword[None] ,
identifier[allow_fill] = keyword[True] ):
literal[string]
keyword[if] identifier[is_extension_array_dtype] ( identifier[arr] ):
keyword[return] identifier[arr] . identifier[take] ( identifier[indexer] , identifier[fill_value] = identifier[fill_value] , identifier[allow_fill] = identifier[allow_fill] )
keyword[elif] identifier[is_datetime64tz_dtype] ( identifier[arr] ):
keyword[return] identifier[arr] . identifier[take] ( identifier[indexer] , identifier[fill_value] = identifier[fill_value] , identifier[allow_fill] = identifier[allow_fill] )
keyword[elif] identifier[is_interval_dtype] ( identifier[arr] ):
keyword[return] identifier[arr] . identifier[take] ( identifier[indexer] , identifier[fill_value] = identifier[fill_value] , identifier[allow_fill] = identifier[allow_fill] )
keyword[if] identifier[is_sparse] ( identifier[arr] ):
identifier[arr] = identifier[arr] . identifier[get_values] ()
keyword[elif] identifier[isinstance] ( identifier[arr] ,( identifier[ABCIndexClass] , identifier[ABCSeries] )):
identifier[arr] = identifier[arr] . identifier[values]
identifier[arr] = identifier[np] . identifier[asarray] ( identifier[arr] )
keyword[if] identifier[indexer] keyword[is] keyword[None] :
identifier[indexer] = identifier[np] . identifier[arange] ( identifier[arr] . identifier[shape] [ identifier[axis] ], identifier[dtype] = identifier[np] . identifier[int64] )
identifier[dtype] , identifier[fill_value] = identifier[arr] . identifier[dtype] , identifier[arr] . identifier[dtype] . identifier[type] ()
keyword[else] :
identifier[indexer] = identifier[ensure_int64] ( identifier[indexer] , identifier[copy] = keyword[False] )
keyword[if] keyword[not] identifier[allow_fill] :
identifier[dtype] , identifier[fill_value] = identifier[arr] . identifier[dtype] , identifier[arr] . identifier[dtype] . identifier[type] ()
identifier[mask_info] = keyword[None] , keyword[False]
keyword[else] :
identifier[dtype] , identifier[fill_value] = identifier[maybe_promote] ( identifier[arr] . identifier[dtype] , identifier[fill_value] )
keyword[if] identifier[dtype] != identifier[arr] . identifier[dtype] keyword[and] ( identifier[out] keyword[is] keyword[None] keyword[or] identifier[out] . identifier[dtype] != identifier[dtype] ):
keyword[if] identifier[mask_info] keyword[is] keyword[not] keyword[None] :
identifier[mask] , identifier[needs_masking] = identifier[mask_info]
keyword[else] :
identifier[mask] = identifier[indexer] ==- literal[int]
identifier[needs_masking] = identifier[mask] . identifier[any] ()
identifier[mask_info] = identifier[mask] , identifier[needs_masking]
keyword[if] identifier[needs_masking] :
keyword[if] identifier[out] keyword[is] keyword[not] keyword[None] keyword[and] identifier[out] . identifier[dtype] != identifier[dtype] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[else] :
identifier[dtype] , identifier[fill_value] = identifier[arr] . identifier[dtype] , identifier[arr] . identifier[dtype] . identifier[type] ()
identifier[flip_order] = keyword[False]
keyword[if] identifier[arr] . identifier[ndim] == literal[int] :
keyword[if] identifier[arr] . identifier[flags] . identifier[f_contiguous] :
identifier[flip_order] = keyword[True]
keyword[if] identifier[flip_order] :
identifier[arr] = identifier[arr] . identifier[T]
identifier[axis] = identifier[arr] . identifier[ndim] - identifier[axis] - literal[int]
keyword[if] identifier[out] keyword[is] keyword[not] keyword[None] :
identifier[out] = identifier[out] . identifier[T]
keyword[if] identifier[out] keyword[is] keyword[None] :
identifier[out_shape] = identifier[list] ( identifier[arr] . identifier[shape] )
identifier[out_shape] [ identifier[axis] ]= identifier[len] ( identifier[indexer] )
identifier[out_shape] = identifier[tuple] ( identifier[out_shape] )
keyword[if] identifier[arr] . identifier[flags] . identifier[f_contiguous] keyword[and] identifier[axis] == identifier[arr] . identifier[ndim] - literal[int] :
identifier[out] = identifier[np] . identifier[empty] ( identifier[out_shape] , identifier[dtype] = identifier[dtype] , identifier[order] = literal[string] )
keyword[else] :
identifier[out] = identifier[np] . identifier[empty] ( identifier[out_shape] , identifier[dtype] = identifier[dtype] )
identifier[func] = identifier[_get_take_nd_function] ( identifier[arr] . identifier[ndim] , identifier[arr] . identifier[dtype] , identifier[out] . identifier[dtype] , identifier[axis] = identifier[axis] ,
identifier[mask_info] = identifier[mask_info] )
identifier[func] ( identifier[arr] , identifier[indexer] , identifier[out] , identifier[fill_value] )
keyword[if] identifier[flip_order] :
identifier[out] = identifier[out] . identifier[T]
keyword[return] identifier[out] | def take_nd(arr, indexer, axis=0, out=None, fill_value=np.nan, mask_info=None, allow_fill=True):
"""
Specialized Cython take which sets NaN values in one pass
This dispatches to ``take`` defined on ExtensionArrays. It does not
currently dispatch to ``SparseArray.take`` for sparse ``arr``.
Parameters
----------
arr : array-like
Input array.
indexer : ndarray
1-D array of indices to take, subarrays corresponding to -1 value
indices are filed with fill_value
axis : int, default 0
Axis to take from
out : ndarray or None, default None
Optional output array, must be appropriate type to hold input and
fill_value together, if indexer has any -1 value entries; call
_maybe_promote to determine this type for any fill_value
fill_value : any, default np.nan
Fill value to replace -1 values with
mask_info : tuple of (ndarray, boolean)
If provided, value should correspond to:
(indexer != -1, (indexer != -1).any())
If not provided, it will be computed internally if necessary
allow_fill : boolean, default True
If False, indexer is assumed to contain no -1 values so no filling
will be done. This short-circuits computation of a mask. Result is
undefined if allow_fill == False and -1 is present in indexer.
Returns
-------
subarray : array-like
May be the same type as the input, or cast to an ndarray.
"""
# TODO(EA): Remove these if / elifs as datetimeTZ, interval, become EAs
# dispatch to internal type takes
if is_extension_array_dtype(arr):
return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill) # depends on [control=['if'], data=[]]
elif is_datetime64tz_dtype(arr):
return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill) # depends on [control=['if'], data=[]]
elif is_interval_dtype(arr):
return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill) # depends on [control=['if'], data=[]]
if is_sparse(arr):
arr = arr.get_values() # depends on [control=['if'], data=[]]
elif isinstance(arr, (ABCIndexClass, ABCSeries)):
arr = arr.values # depends on [control=['if'], data=[]]
arr = np.asarray(arr)
if indexer is None:
indexer = np.arange(arr.shape[axis], dtype=np.int64)
(dtype, fill_value) = (arr.dtype, arr.dtype.type()) # depends on [control=['if'], data=['indexer']]
else:
indexer = ensure_int64(indexer, copy=False)
if not allow_fill:
(dtype, fill_value) = (arr.dtype, arr.dtype.type())
mask_info = (None, False) # depends on [control=['if'], data=[]]
else:
# check for promotion based on types only (do this first because
# it's faster than computing a mask)
(dtype, fill_value) = maybe_promote(arr.dtype, fill_value)
if dtype != arr.dtype and (out is None or out.dtype != dtype):
# check if promotion is actually required based on indexer
if mask_info is not None:
(mask, needs_masking) = mask_info # depends on [control=['if'], data=['mask_info']]
else:
mask = indexer == -1
needs_masking = mask.any()
mask_info = (mask, needs_masking)
if needs_masking:
if out is not None and out.dtype != dtype:
raise TypeError('Incompatible type for fill_value') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# if not, then depromote, set fill_value to dummy
# (it won't be used but we don't want the cython code
# to crash when trying to cast it to dtype)
(dtype, fill_value) = (arr.dtype, arr.dtype.type()) # depends on [control=['if'], data=[]]
flip_order = False
if arr.ndim == 2:
if arr.flags.f_contiguous:
flip_order = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if flip_order:
arr = arr.T
axis = arr.ndim - axis - 1
if out is not None:
out = out.T # depends on [control=['if'], data=['out']] # depends on [control=['if'], data=[]]
# at this point, it's guaranteed that dtype can hold both the arr values
# and the fill_value
if out is None:
out_shape = list(arr.shape)
out_shape[axis] = len(indexer)
out_shape = tuple(out_shape)
if arr.flags.f_contiguous and axis == arr.ndim - 1:
# minor tweak that can make an order-of-magnitude difference
# for dataframes initialized directly from 2-d ndarrays
# (s.t. df.values is c-contiguous and df._data.blocks[0] is its
# f-contiguous transpose)
out = np.empty(out_shape, dtype=dtype, order='F') # depends on [control=['if'], data=[]]
else:
out = np.empty(out_shape, dtype=dtype) # depends on [control=['if'], data=['out']]
func = _get_take_nd_function(arr.ndim, arr.dtype, out.dtype, axis=axis, mask_info=mask_info)
func(arr, indexer, out, fill_value)
if flip_order:
out = out.T # depends on [control=['if'], data=[]]
return out |
def get_qcos_client(self, app_uri):
"""获得资源管理客户端
缓存,但不是线程安全的
"""
client = self.qcos_clients.get(app_uri)
if (client is None):
client = self.create_qcos_client(app_uri)
self.qcos_clients[app_uri] = client
return client | def function[get_qcos_client, parameter[self, app_uri]]:
constant[获得资源管理客户端
缓存,但不是线程安全的
]
variable[client] assign[=] call[name[self].qcos_clients.get, parameter[name[app_uri]]]
if compare[name[client] is constant[None]] begin[:]
variable[client] assign[=] call[name[self].create_qcos_client, parameter[name[app_uri]]]
call[name[self].qcos_clients][name[app_uri]] assign[=] name[client]
return[name[client]] | keyword[def] identifier[get_qcos_client] ( identifier[self] , identifier[app_uri] ):
literal[string]
identifier[client] = identifier[self] . identifier[qcos_clients] . identifier[get] ( identifier[app_uri] )
keyword[if] ( identifier[client] keyword[is] keyword[None] ):
identifier[client] = identifier[self] . identifier[create_qcos_client] ( identifier[app_uri] )
identifier[self] . identifier[qcos_clients] [ identifier[app_uri] ]= identifier[client]
keyword[return] identifier[client] | def get_qcos_client(self, app_uri):
"""获得资源管理客户端
缓存,但不是线程安全的
"""
client = self.qcos_clients.get(app_uri)
if client is None:
client = self.create_qcos_client(app_uri)
self.qcos_clients[app_uri] = client # depends on [control=['if'], data=['client']]
return client |
def list_jobs(self, argument_filters=None):
'''
a method to list jobs in the scheduler
:param argument_filters: list of query criteria dictionaries for class argument keys
:return: list of jobs (which satisfy the filters)
NOTE: query criteria architecture
each item in the argument filters list must be a dictionary
which is composed of one or more key names which represent the
dotpath to a key in the job record to be queried with a value
that is a dictionary of conditional operators used to test the
value in the corresponding key in each record in the list of jobs.
eg. argument_filters = [ { '.function': { 'must_contain': [ 'debug' ] } } ]
this example filter looks in the function key of each job for a
value which contains the characters 'debug'.
NOTE: the filter method uses a query filters list structure to represent
the disjunctive normal form of a logical expression. a record is
added to the results list if any query criteria dictionary in the
list evaluates to true. within each query criteria dictionary, all
declared conditional operators must evaluate to true.
in this way, the argument_filters represents a boolean OR operator and
each criteria dictionary inside the list represents a boolean AND
operator between all keys in the dictionary.
NOTE: each query_criteria uses the architecture of query declaration in
the jsonModel.query method
the list of keys in each query_criteria is the same as the arguments for
adding a job to the scheduler
query_criteria = {
'.id': {},
'.function': {},
'.name': {},
'.dt': {},
'.interval': {},
'.month': {},
'.day': {},
'.weekday': {},
'.hour': {},
'.minute': {},
'.second': {},
'.start_date': {},
'.end_date': {}
}
conditional operators for '.id', '.function', '.name':
"byte_data": false,
"discrete_values": [ "" ],
"excluded_values": [ "" ],
"greater_than": "",
"less_than": "",
"max_length": 0,
"max_value": "",
"min_length": 0,
"min_value": "",
"must_contain": [ "" ],
"must_not_contain": [ "" ],
"contains_either": [ "" ]
conditional operators for '.dt', 'start', 'end':
"discrete_values": [ 0.0 ],
"excluded_values": [ 0.0 ],
"greater_than": 0.0,
"less_than": 0.0,
"max_value": 0.0,
"min_value": 0.0
operators for '.interval', '.month', '.day', '.weekday', '.hour', '.minute', '.second':
"discrete_values": [ 0 ],
"excluded_values": [ 0 ],
"greater_than": 0,
"less_than": 0,
"max_value": 0,
"min_value": 0
'''
title = '%s.list_jobs' % self.__class__.__name__
# validate inputs
if argument_filters:
self.fields.validate(argument_filters, '.argument_filters')
# send request to get jobs
url = '%s/scheduler/jobs' % self.url
job_list = self._get_request(url)
# construct filter function
def query_function(**kwargs):
job_details = {}
for key, value in kwargs.items():
if key in self.job_model.schema.keys():
job_details[key] = value
for query_criteria in argument_filters:
if self.job_model.query(query_criteria, job_details):
return True
return False
# construct empty list
results_list = []
# add refactored jobs to results list
for job in job_list:
job_details = self._construct_details(job)
if argument_filters:
if query_function(**job_details):
results_list.append(job_details)
else:
results_list.append(job_details)
return results_list | def function[list_jobs, parameter[self, argument_filters]]:
constant[
a method to list jobs in the scheduler
:param argument_filters: list of query criteria dictionaries for class argument keys
:return: list of jobs (which satisfy the filters)
NOTE: query criteria architecture
each item in the argument filters list must be a dictionary
which is composed of one or more key names which represent the
dotpath to a key in the job record to be queried with a value
that is a dictionary of conditional operators used to test the
value in the corresponding key in each record in the list of jobs.
eg. argument_filters = [ { '.function': { 'must_contain': [ 'debug' ] } } ]
this example filter looks in the function key of each job for a
value which contains the characters 'debug'.
NOTE: the filter method uses a query filters list structure to represent
the disjunctive normal form of a logical expression. a record is
added to the results list if any query criteria dictionary in the
list evaluates to true. within each query criteria dictionary, all
declared conditional operators must evaluate to true.
in this way, the argument_filters represents a boolean OR operator and
each criteria dictionary inside the list represents a boolean AND
operator between all keys in the dictionary.
NOTE: each query_criteria uses the architecture of query declaration in
the jsonModel.query method
the list of keys in each query_criteria is the same as the arguments for
adding a job to the scheduler
query_criteria = {
'.id': {},
'.function': {},
'.name': {},
'.dt': {},
'.interval': {},
'.month': {},
'.day': {},
'.weekday': {},
'.hour': {},
'.minute': {},
'.second': {},
'.start_date': {},
'.end_date': {}
}
conditional operators for '.id', '.function', '.name':
"byte_data": false,
"discrete_values": [ "" ],
"excluded_values": [ "" ],
"greater_than": "",
"less_than": "",
"max_length": 0,
"max_value": "",
"min_length": 0,
"min_value": "",
"must_contain": [ "" ],
"must_not_contain": [ "" ],
"contains_either": [ "" ]
conditional operators for '.dt', 'start', 'end':
"discrete_values": [ 0.0 ],
"excluded_values": [ 0.0 ],
"greater_than": 0.0,
"less_than": 0.0,
"max_value": 0.0,
"min_value": 0.0
operators for '.interval', '.month', '.day', '.weekday', '.hour', '.minute', '.second':
"discrete_values": [ 0 ],
"excluded_values": [ 0 ],
"greater_than": 0,
"less_than": 0,
"max_value": 0,
"min_value": 0
]
variable[title] assign[=] binary_operation[constant[%s.list_jobs] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__]
if name[argument_filters] begin[:]
call[name[self].fields.validate, parameter[name[argument_filters], constant[.argument_filters]]]
variable[url] assign[=] binary_operation[constant[%s/scheduler/jobs] <ast.Mod object at 0x7da2590d6920> name[self].url]
variable[job_list] assign[=] call[name[self]._get_request, parameter[name[url]]]
def function[query_function, parameter[]]:
variable[job_details] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b14d2740>, <ast.Name object at 0x7da1b14d22f0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[key] in call[name[self].job_model.schema.keys, parameter[]]] begin[:]
call[name[job_details]][name[key]] assign[=] name[value]
for taget[name[query_criteria]] in starred[name[argument_filters]] begin[:]
if call[name[self].job_model.query, parameter[name[query_criteria], name[job_details]]] begin[:]
return[constant[True]]
return[constant[False]]
variable[results_list] assign[=] list[[]]
for taget[name[job]] in starred[name[job_list]] begin[:]
variable[job_details] assign[=] call[name[self]._construct_details, parameter[name[job]]]
if name[argument_filters] begin[:]
if call[name[query_function], parameter[]] begin[:]
call[name[results_list].append, parameter[name[job_details]]]
return[name[results_list]] | keyword[def] identifier[list_jobs] ( identifier[self] , identifier[argument_filters] = keyword[None] ):
literal[string]
identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__]
keyword[if] identifier[argument_filters] :
identifier[self] . identifier[fields] . identifier[validate] ( identifier[argument_filters] , literal[string] )
identifier[url] = literal[string] % identifier[self] . identifier[url]
identifier[job_list] = identifier[self] . identifier[_get_request] ( identifier[url] )
keyword[def] identifier[query_function] (** identifier[kwargs] ):
identifier[job_details] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[job_model] . identifier[schema] . identifier[keys] ():
identifier[job_details] [ identifier[key] ]= identifier[value]
keyword[for] identifier[query_criteria] keyword[in] identifier[argument_filters] :
keyword[if] identifier[self] . identifier[job_model] . identifier[query] ( identifier[query_criteria] , identifier[job_details] ):
keyword[return] keyword[True]
keyword[return] keyword[False]
identifier[results_list] =[]
keyword[for] identifier[job] keyword[in] identifier[job_list] :
identifier[job_details] = identifier[self] . identifier[_construct_details] ( identifier[job] )
keyword[if] identifier[argument_filters] :
keyword[if] identifier[query_function] (** identifier[job_details] ):
identifier[results_list] . identifier[append] ( identifier[job_details] )
keyword[else] :
identifier[results_list] . identifier[append] ( identifier[job_details] )
keyword[return] identifier[results_list] | def list_jobs(self, argument_filters=None):
"""
a method to list jobs in the scheduler
:param argument_filters: list of query criteria dictionaries for class argument keys
:return: list of jobs (which satisfy the filters)
NOTE: query criteria architecture
each item in the argument filters list must be a dictionary
which is composed of one or more key names which represent the
dotpath to a key in the job record to be queried with a value
that is a dictionary of conditional operators used to test the
value in the corresponding key in each record in the list of jobs.
eg. argument_filters = [ { '.function': { 'must_contain': [ 'debug' ] } } ]
this example filter looks in the function key of each job for a
value which contains the characters 'debug'.
NOTE: the filter method uses a query filters list structure to represent
the disjunctive normal form of a logical expression. a record is
added to the results list if any query criteria dictionary in the
list evaluates to true. within each query criteria dictionary, all
declared conditional operators must evaluate to true.
in this way, the argument_filters represents a boolean OR operator and
each criteria dictionary inside the list represents a boolean AND
operator between all keys in the dictionary.
NOTE: each query_criteria uses the architecture of query declaration in
the jsonModel.query method
the list of keys in each query_criteria is the same as the arguments for
adding a job to the scheduler
query_criteria = {
'.id': {},
'.function': {},
'.name': {},
'.dt': {},
'.interval': {},
'.month': {},
'.day': {},
'.weekday': {},
'.hour': {},
'.minute': {},
'.second': {},
'.start_date': {},
'.end_date': {}
}
conditional operators for '.id', '.function', '.name':
"byte_data": false,
"discrete_values": [ "" ],
"excluded_values": [ "" ],
"greater_than": "",
"less_than": "",
"max_length": 0,
"max_value": "",
"min_length": 0,
"min_value": "",
"must_contain": [ "" ],
"must_not_contain": [ "" ],
"contains_either": [ "" ]
conditional operators for '.dt', 'start', 'end':
"discrete_values": [ 0.0 ],
"excluded_values": [ 0.0 ],
"greater_than": 0.0,
"less_than": 0.0,
"max_value": 0.0,
"min_value": 0.0
operators for '.interval', '.month', '.day', '.weekday', '.hour', '.minute', '.second':
"discrete_values": [ 0 ],
"excluded_values": [ 0 ],
"greater_than": 0,
"less_than": 0,
"max_value": 0,
"min_value": 0
"""
title = '%s.list_jobs' % self.__class__.__name__ # validate inputs
if argument_filters:
self.fields.validate(argument_filters, '.argument_filters') # depends on [control=['if'], data=[]] # send request to get jobs
url = '%s/scheduler/jobs' % self.url
job_list = self._get_request(url) # construct filter function
def query_function(**kwargs):
job_details = {}
for (key, value) in kwargs.items():
if key in self.job_model.schema.keys():
job_details[key] = value # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]]
for query_criteria in argument_filters:
if self.job_model.query(query_criteria, job_details):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['query_criteria']]
return False # construct empty list
results_list = [] # add refactored jobs to results list
for job in job_list:
job_details = self._construct_details(job)
if argument_filters:
if query_function(**job_details):
results_list.append(job_details) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
results_list.append(job_details) # depends on [control=['for'], data=['job']]
return results_list |
def add_arguments(self):
"""
Add the label argument by default, no need to specify it in args.
"""
super(LabelCommand, self).add_arguments()
self.parser.add_argument('labels', metavar=self.label, nargs="+") | def function[add_arguments, parameter[self]]:
constant[
Add the label argument by default, no need to specify it in args.
]
call[call[name[super], parameter[name[LabelCommand], name[self]]].add_arguments, parameter[]]
call[name[self].parser.add_argument, parameter[constant[labels]]] | keyword[def] identifier[add_arguments] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[LabelCommand] , identifier[self] ). identifier[add_arguments] ()
identifier[self] . identifier[parser] . identifier[add_argument] ( literal[string] , identifier[metavar] = identifier[self] . identifier[label] , identifier[nargs] = literal[string] ) | def add_arguments(self):
"""
Add the label argument by default, no need to specify it in args.
"""
super(LabelCommand, self).add_arguments()
self.parser.add_argument('labels', metavar=self.label, nargs='+') |
def _extract_table_root(d, current, pc):
"""
Extract data from the root level of a paleoData table.
:param dict d: paleoData table
:param dict current: Current root data
:param str pc: paleoData or chronData
:return dict current: Current root data
"""
logger_ts.info("enter extract_table_root")
try:
for k, v in d.items():
if isinstance(v, str):
current[pc + '_' + k] = v
except Exception as e:
logger_ts.error("extract_table_root: {}".format(e))
return current | def function[_extract_table_root, parameter[d, current, pc]]:
constant[
Extract data from the root level of a paleoData table.
:param dict d: paleoData table
:param dict current: Current root data
:param str pc: paleoData or chronData
:return dict current: Current root data
]
call[name[logger_ts].info, parameter[constant[enter extract_table_root]]]
<ast.Try object at 0x7da20c7cb3d0>
return[name[current]] | keyword[def] identifier[_extract_table_root] ( identifier[d] , identifier[current] , identifier[pc] ):
literal[string]
identifier[logger_ts] . identifier[info] ( literal[string] )
keyword[try] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[v] , identifier[str] ):
identifier[current] [ identifier[pc] + literal[string] + identifier[k] ]= identifier[v]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger_ts] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] ))
keyword[return] identifier[current] | def _extract_table_root(d, current, pc):
"""
Extract data from the root level of a paleoData table.
:param dict d: paleoData table
:param dict current: Current root data
:param str pc: paleoData or chronData
:return dict current: Current root data
"""
logger_ts.info('enter extract_table_root')
try:
for (k, v) in d.items():
if isinstance(v, str):
current[pc + '_' + k] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
logger_ts.error('extract_table_root: {}'.format(e)) # depends on [control=['except'], data=['e']]
return current |
def get_ticker(api_code=None):
"""Call the 'ticker' method and return a dictionary
of :class:`Currency` objects.
:param str api_code: Blockchain.info API code (optional)
:return: a dictionary in the format of ccy_symbol(str):currency(:class:`Currency`)
"""
response = util.call_api('ticker' if api_code is None else 'ticker?api_code=' + api_code)
json_response = json.loads(response)
ticker = {}
for key in json_response:
json_ccy = json_response[key]
ccy = Currency(json_ccy['last'],
json_ccy['buy'],
json_ccy['sell'],
json_ccy['symbol'],
json_ccy['15m'])
ticker[key] = ccy
return ticker | def function[get_ticker, parameter[api_code]]:
constant[Call the 'ticker' method and return a dictionary
of :class:`Currency` objects.
:param str api_code: Blockchain.info API code (optional)
:return: a dictionary in the format of ccy_symbol(str):currency(:class:`Currency`)
]
variable[response] assign[=] call[name[util].call_api, parameter[<ast.IfExp object at 0x7da1b1e67e20>]]
variable[json_response] assign[=] call[name[json].loads, parameter[name[response]]]
variable[ticker] assign[=] dictionary[[], []]
for taget[name[key]] in starred[name[json_response]] begin[:]
variable[json_ccy] assign[=] call[name[json_response]][name[key]]
variable[ccy] assign[=] call[name[Currency], parameter[call[name[json_ccy]][constant[last]], call[name[json_ccy]][constant[buy]], call[name[json_ccy]][constant[sell]], call[name[json_ccy]][constant[symbol]], call[name[json_ccy]][constant[15m]]]]
call[name[ticker]][name[key]] assign[=] name[ccy]
return[name[ticker]] | keyword[def] identifier[get_ticker] ( identifier[api_code] = keyword[None] ):
literal[string]
identifier[response] = identifier[util] . identifier[call_api] ( literal[string] keyword[if] identifier[api_code] keyword[is] keyword[None] keyword[else] literal[string] + identifier[api_code] )
identifier[json_response] = identifier[json] . identifier[loads] ( identifier[response] )
identifier[ticker] ={}
keyword[for] identifier[key] keyword[in] identifier[json_response] :
identifier[json_ccy] = identifier[json_response] [ identifier[key] ]
identifier[ccy] = identifier[Currency] ( identifier[json_ccy] [ literal[string] ],
identifier[json_ccy] [ literal[string] ],
identifier[json_ccy] [ literal[string] ],
identifier[json_ccy] [ literal[string] ],
identifier[json_ccy] [ literal[string] ])
identifier[ticker] [ identifier[key] ]= identifier[ccy]
keyword[return] identifier[ticker] | def get_ticker(api_code=None):
"""Call the 'ticker' method and return a dictionary
of :class:`Currency` objects.
:param str api_code: Blockchain.info API code (optional)
:return: a dictionary in the format of ccy_symbol(str):currency(:class:`Currency`)
"""
response = util.call_api('ticker' if api_code is None else 'ticker?api_code=' + api_code)
json_response = json.loads(response)
ticker = {}
for key in json_response:
json_ccy = json_response[key]
ccy = Currency(json_ccy['last'], json_ccy['buy'], json_ccy['sell'], json_ccy['symbol'], json_ccy['15m'])
ticker[key] = ccy # depends on [control=['for'], data=['key']]
return ticker |
def dict_diff(dicts):
"""
Subset dictionaries to keys which map to multiple values
"""
diff_keys = set()
for k in union(set(d.keys()) for d in dicts):
values = []
for d in dicts:
if k not in d:
diff_keys.add(k)
break
else:
values.append(d[k])
if nunique(values) > 1:
diff_keys.add(k)
break
return [dict_subset(d, diff_keys) for d in dicts] | def function[dict_diff, parameter[dicts]]:
constant[
Subset dictionaries to keys which map to multiple values
]
variable[diff_keys] assign[=] call[name[set], parameter[]]
for taget[name[k]] in starred[call[name[union], parameter[<ast.GeneratorExp object at 0x7da1b23ed4b0>]]] begin[:]
variable[values] assign[=] list[[]]
for taget[name[d]] in starred[name[dicts]] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[d]] begin[:]
call[name[diff_keys].add, parameter[name[k]]]
break
return[<ast.ListComp object at 0x7da1b24e2e60>] | keyword[def] identifier[dict_diff] ( identifier[dicts] ):
literal[string]
identifier[diff_keys] = identifier[set] ()
keyword[for] identifier[k] keyword[in] identifier[union] ( identifier[set] ( identifier[d] . identifier[keys] ()) keyword[for] identifier[d] keyword[in] identifier[dicts] ):
identifier[values] =[]
keyword[for] identifier[d] keyword[in] identifier[dicts] :
keyword[if] identifier[k] keyword[not] keyword[in] identifier[d] :
identifier[diff_keys] . identifier[add] ( identifier[k] )
keyword[break]
keyword[else] :
identifier[values] . identifier[append] ( identifier[d] [ identifier[k] ])
keyword[if] identifier[nunique] ( identifier[values] )> literal[int] :
identifier[diff_keys] . identifier[add] ( identifier[k] )
keyword[break]
keyword[return] [ identifier[dict_subset] ( identifier[d] , identifier[diff_keys] ) keyword[for] identifier[d] keyword[in] identifier[dicts] ] | def dict_diff(dicts):
"""
Subset dictionaries to keys which map to multiple values
"""
diff_keys = set()
for k in union((set(d.keys()) for d in dicts)):
values = []
for d in dicts:
if k not in d:
diff_keys.add(k)
break # depends on [control=['if'], data=['k']]
else:
values.append(d[k])
if nunique(values) > 1:
diff_keys.add(k)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] # depends on [control=['for'], data=['k']]
return [dict_subset(d, diff_keys) for d in dicts] |
def formatter_factory(show_defaults=True):
"""Formatter factory"""
def get_help_string(self, action):
lhelp = action.help
if isinstance(show_defaults, (list, tuple)):
if "-" + action.dest in show_defaults:
return lhelp
if '%(default)' not in action.help:
if action.default is not argparse.SUPPRESS:
defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
lhelp += ' (default: %(default)s)'
return lhelp
def default_help_string(self, action):
return action.help
if show_defaults is True:
ARPIFormatter._get_help_string = classmethod(get_help_string)
else:
ARPIFormatter._get_help_string = classmethod(default_help_string)
return ARPIFormatter | def function[formatter_factory, parameter[show_defaults]]:
constant[Formatter factory]
def function[get_help_string, parameter[self, action]]:
variable[lhelp] assign[=] name[action].help
if call[name[isinstance], parameter[name[show_defaults], tuple[[<ast.Name object at 0x7da18bc70a90>, <ast.Name object at 0x7da18bc71600>]]]] begin[:]
if compare[binary_operation[constant[-] + name[action].dest] in name[show_defaults]] begin[:]
return[name[lhelp]]
if compare[constant[%(default)] <ast.NotIn object at 0x7da2590d7190> name[action].help] begin[:]
if compare[name[action].default is_not name[argparse].SUPPRESS] begin[:]
variable[defaulting_nargs] assign[=] list[[<ast.Attribute object at 0x7da18bc72b00>, <ast.Attribute object at 0x7da18bc71330>]]
if <ast.BoolOp object at 0x7da18bc73790> begin[:]
<ast.AugAssign object at 0x7da18bc72890>
return[name[lhelp]]
def function[default_help_string, parameter[self, action]]:
return[name[action].help]
if compare[name[show_defaults] is constant[True]] begin[:]
name[ARPIFormatter]._get_help_string assign[=] call[name[classmethod], parameter[name[get_help_string]]]
return[name[ARPIFormatter]] | keyword[def] identifier[formatter_factory] ( identifier[show_defaults] = keyword[True] ):
literal[string]
keyword[def] identifier[get_help_string] ( identifier[self] , identifier[action] ):
identifier[lhelp] = identifier[action] . identifier[help]
keyword[if] identifier[isinstance] ( identifier[show_defaults] ,( identifier[list] , identifier[tuple] )):
keyword[if] literal[string] + identifier[action] . identifier[dest] keyword[in] identifier[show_defaults] :
keyword[return] identifier[lhelp]
keyword[if] literal[string] keyword[not] keyword[in] identifier[action] . identifier[help] :
keyword[if] identifier[action] . identifier[default] keyword[is] keyword[not] identifier[argparse] . identifier[SUPPRESS] :
identifier[defaulting_nargs] =[ identifier[argparse] . identifier[OPTIONAL] , identifier[argparse] . identifier[ZERO_OR_MORE] ]
keyword[if] identifier[action] . identifier[option_strings] keyword[or] identifier[action] . identifier[nargs] keyword[in] identifier[defaulting_nargs] :
identifier[lhelp] += literal[string]
keyword[return] identifier[lhelp]
keyword[def] identifier[default_help_string] ( identifier[self] , identifier[action] ):
keyword[return] identifier[action] . identifier[help]
keyword[if] identifier[show_defaults] keyword[is] keyword[True] :
identifier[ARPIFormatter] . identifier[_get_help_string] = identifier[classmethod] ( identifier[get_help_string] )
keyword[else] :
identifier[ARPIFormatter] . identifier[_get_help_string] = identifier[classmethod] ( identifier[default_help_string] )
keyword[return] identifier[ARPIFormatter] | def formatter_factory(show_defaults=True):
"""Formatter factory"""
def get_help_string(self, action):
lhelp = action.help
if isinstance(show_defaults, (list, tuple)):
if '-' + action.dest in show_defaults:
return lhelp # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if '%(default)' not in action.help:
if action.default is not argparse.SUPPRESS:
defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
lhelp += ' (default: %(default)s)' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return lhelp
def default_help_string(self, action):
return action.help
if show_defaults is True:
ARPIFormatter._get_help_string = classmethod(get_help_string) # depends on [control=['if'], data=[]]
else:
ARPIFormatter._get_help_string = classmethod(default_help_string)
return ARPIFormatter |
def binary_connect_convolution(inp, outmaps, kernel,
pad=None, stride=None, dilation=None, group=1,
quantize_zero_to=1.0,
w_init=None, wb_init=None, b_init=None,
base_axis=1, fix_parameters=False, rng=None,
with_bias=True):
"""Binary Connect Convolution, multiplier-less inner-product.
Binary Connect Convolution is the convolution function,
except the definition of the inner product is modified.
The input-output relation of this function is as follows:
.. math::
y_{n, a, b} = \sum_{m} \sum_{i} \sum_{j} sign(w_{n, m, i, j}) x_{m, a + i, b + j}.
Therefore :math:`sign(w_i)` is either :math:`1` or :math:`-1` and the inner product
simplifies to addition.
This function should be used together with BatchNormalization.
References:
M. Courbariaux, Y. Bengio, and J.-P. David. "BinaryConnect:
Training Deep Neural Networks with binary weights during propagations."
Advances in Neural Information Processing Systems. 2015.
.. note::
1) if you would like to share weights between some layers, please
make sure to share the standard, floating value weights (`weight`)
and not the binarized weights (`binary_weight`)
2) The weights and the binary weights become synced only after :func:`~nnabla._variable.Variable.forward` is called,
and not after a call to :func:`~nnabla._variable.Variable.backward`.
To access the parameters of the network, remember to call :func:`~nnabla._variable.Variable.forward` once before doing so, otherwise the
float weights and the binary weights will not be in sync.
3) Quantized values are stored as floating point number for `binary_weight`,
since this function is only for simulation purposes.
Args:
inp (~nnabla.Variable): N-D array.
outmaps (int): Number of convolution kernels (which is equal to the number of output channels). For example, to apply convolution on an input with 16 types of filters, specify 16.
kernel (:obj:`tuple` of :obj:`int`): Convolution kernel size. For example, to apply convolution on an image with a 3 (height) by 5 (width) two-dimensional kernel, specify (3,5).
pad (:obj:`tuple` of :obj:`int`): Padding sizes for dimensions.
stride (:obj:`tuple` of :obj:`int`): Stride sizes for dimensions.
dilation (:obj:`tuple` of :obj:`int`): Dilation sizes for dimensions.
group (int): Number of groups of channels. This makes connections across channels sparser by grouping connections along map direction.
quantize_zero_to (float): Input value at zero is quantized to this value.
w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
wb_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for binary weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`.
base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions.
fix_parameters (bool): When set to `True`, the weights and biases will not be updated.
rng (numpy.random.RandomState): Random generator for Initializer.
with_bias (bool): Specify whether to include the bias term.
Returns:
:class:`~nnabla.Variable`
"""
if w_init is None:
w_init = UniformInitializer(
calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng)
if wb_init is None:
wb_init = UniformInitializer(
calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng)
if b_init is None:
b_init = ConstantInitializer()
w = get_parameter_or_create(
"W", (outmaps, inp.shape[base_axis]) + tuple(kernel),
w_init, True, not fix_parameters)
wb = get_parameter_or_create(
"Wb", (outmaps, inp.shape[base_axis]) + tuple(kernel),
wb_init, False)
b = None
if with_bias:
b = get_parameter_or_create(
"b", (outmaps,), b_init, True, not fix_parameters)
return F.binary_connect_convolution(inp, w, wb, b, base_axis, pad, stride, dilation, group, quantize_zero_to) | def function[binary_connect_convolution, parameter[inp, outmaps, kernel, pad, stride, dilation, group, quantize_zero_to, w_init, wb_init, b_init, base_axis, fix_parameters, rng, with_bias]]:
constant[Binary Connect Convolution, multiplier-less inner-product.
Binary Connect Convolution is the convolution function,
except the definition of the inner product is modified.
The input-output relation of this function is as follows:
.. math::
y_{n, a, b} = \sum_{m} \sum_{i} \sum_{j} sign(w_{n, m, i, j}) x_{m, a + i, b + j}.
Therefore :math:`sign(w_i)` is either :math:`1` or :math:`-1` and the inner product
simplifies to addition.
This function should be used together with BatchNormalization.
References:
M. Courbariaux, Y. Bengio, and J.-P. David. "BinaryConnect:
Training Deep Neural Networks with binary weights during propagations."
Advances in Neural Information Processing Systems. 2015.
.. note::
1) if you would like to share weights between some layers, please
make sure to share the standard, floating value weights (`weight`)
and not the binarized weights (`binary_weight`)
2) The weights and the binary weights become synced only after :func:`~nnabla._variable.Variable.forward` is called,
and not after a call to :func:`~nnabla._variable.Variable.backward`.
To access the parameters of the network, remember to call :func:`~nnabla._variable.Variable.forward` once before doing so, otherwise the
float weights and the binary weights will not be in sync.
3) Quantized values are stored as floating point number for `binary_weight`,
since this function is only for simulation purposes.
Args:
inp (~nnabla.Variable): N-D array.
outmaps (int): Number of convolution kernels (which is equal to the number of output channels). For example, to apply convolution on an input with 16 types of filters, specify 16.
kernel (:obj:`tuple` of :obj:`int`): Convolution kernel size. For example, to apply convolution on an image with a 3 (height) by 5 (width) two-dimensional kernel, specify (3,5).
pad (:obj:`tuple` of :obj:`int`): Padding sizes for dimensions.
stride (:obj:`tuple` of :obj:`int`): Stride sizes for dimensions.
dilation (:obj:`tuple` of :obj:`int`): Dilation sizes for dimensions.
group (int): Number of groups of channels. This makes connections across channels sparser by grouping connections along map direction.
quantize_zero_to (float): Input value at zero is quantized to this value.
w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
wb_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for binary weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`.
base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions.
fix_parameters (bool): When set to `True`, the weights and biases will not be updated.
rng (numpy.random.RandomState): Random generator for Initializer.
with_bias (bool): Specify whether to include the bias term.
Returns:
:class:`~nnabla.Variable`
]
if compare[name[w_init] is constant[None]] begin[:]
variable[w_init] assign[=] call[name[UniformInitializer], parameter[call[name[calc_uniform_lim_glorot], parameter[call[name[inp].shape][name[base_axis]], name[outmaps], call[name[tuple], parameter[name[kernel]]]]]]]
if compare[name[wb_init] is constant[None]] begin[:]
variable[wb_init] assign[=] call[name[UniformInitializer], parameter[call[name[calc_uniform_lim_glorot], parameter[call[name[inp].shape][name[base_axis]], name[outmaps], call[name[tuple], parameter[name[kernel]]]]]]]
if compare[name[b_init] is constant[None]] begin[:]
variable[b_init] assign[=] call[name[ConstantInitializer], parameter[]]
variable[w] assign[=] call[name[get_parameter_or_create], parameter[constant[W], binary_operation[tuple[[<ast.Name object at 0x7da18bccbee0>, <ast.Subscript object at 0x7da18bccbb80>]] + call[name[tuple], parameter[name[kernel]]]], name[w_init], constant[True], <ast.UnaryOp object at 0x7da18bcc8b50>]]
variable[wb] assign[=] call[name[get_parameter_or_create], parameter[constant[Wb], binary_operation[tuple[[<ast.Name object at 0x7da18bcca9e0>, <ast.Subscript object at 0x7da18bcc9450>]] + call[name[tuple], parameter[name[kernel]]]], name[wb_init], constant[False]]]
variable[b] assign[=] constant[None]
if name[with_bias] begin[:]
variable[b] assign[=] call[name[get_parameter_or_create], parameter[constant[b], tuple[[<ast.Name object at 0x7da18bcc9ae0>]], name[b_init], constant[True], <ast.UnaryOp object at 0x7da18bcc9210>]]
return[call[name[F].binary_connect_convolution, parameter[name[inp], name[w], name[wb], name[b], name[base_axis], name[pad], name[stride], name[dilation], name[group], name[quantize_zero_to]]]] | keyword[def] identifier[binary_connect_convolution] ( identifier[inp] , identifier[outmaps] , identifier[kernel] ,
identifier[pad] = keyword[None] , identifier[stride] = keyword[None] , identifier[dilation] = keyword[None] , identifier[group] = literal[int] ,
identifier[quantize_zero_to] = literal[int] ,
identifier[w_init] = keyword[None] , identifier[wb_init] = keyword[None] , identifier[b_init] = keyword[None] ,
identifier[base_axis] = literal[int] , identifier[fix_parameters] = keyword[False] , identifier[rng] = keyword[None] ,
identifier[with_bias] = keyword[True] ):
literal[string]
keyword[if] identifier[w_init] keyword[is] keyword[None] :
identifier[w_init] = identifier[UniformInitializer] (
identifier[calc_uniform_lim_glorot] ( identifier[inp] . identifier[shape] [ identifier[base_axis] ], identifier[outmaps] , identifier[tuple] ( identifier[kernel] )), identifier[rng] = identifier[rng] )
keyword[if] identifier[wb_init] keyword[is] keyword[None] :
identifier[wb_init] = identifier[UniformInitializer] (
identifier[calc_uniform_lim_glorot] ( identifier[inp] . identifier[shape] [ identifier[base_axis] ], identifier[outmaps] , identifier[tuple] ( identifier[kernel] )), identifier[rng] = identifier[rng] )
keyword[if] identifier[b_init] keyword[is] keyword[None] :
identifier[b_init] = identifier[ConstantInitializer] ()
identifier[w] = identifier[get_parameter_or_create] (
literal[string] ,( identifier[outmaps] , identifier[inp] . identifier[shape] [ identifier[base_axis] ])+ identifier[tuple] ( identifier[kernel] ),
identifier[w_init] , keyword[True] , keyword[not] identifier[fix_parameters] )
identifier[wb] = identifier[get_parameter_or_create] (
literal[string] ,( identifier[outmaps] , identifier[inp] . identifier[shape] [ identifier[base_axis] ])+ identifier[tuple] ( identifier[kernel] ),
identifier[wb_init] , keyword[False] )
identifier[b] = keyword[None]
keyword[if] identifier[with_bias] :
identifier[b] = identifier[get_parameter_or_create] (
literal[string] ,( identifier[outmaps] ,), identifier[b_init] , keyword[True] , keyword[not] identifier[fix_parameters] )
keyword[return] identifier[F] . identifier[binary_connect_convolution] ( identifier[inp] , identifier[w] , identifier[wb] , identifier[b] , identifier[base_axis] , identifier[pad] , identifier[stride] , identifier[dilation] , identifier[group] , identifier[quantize_zero_to] ) | def binary_connect_convolution(inp, outmaps, kernel, pad=None, stride=None, dilation=None, group=1, quantize_zero_to=1.0, w_init=None, wb_init=None, b_init=None, base_axis=1, fix_parameters=False, rng=None, with_bias=True):
"""Binary Connect Convolution, multiplier-less inner-product.
Binary Connect Convolution is the convolution function,
except the definition of the inner product is modified.
The input-output relation of this function is as follows:
.. math::
y_{n, a, b} = \\sum_{m} \\sum_{i} \\sum_{j} sign(w_{n, m, i, j}) x_{m, a + i, b + j}.
Therefore :math:`sign(w_i)` is either :math:`1` or :math:`-1` and the inner product
simplifies to addition.
This function should be used together with BatchNormalization.
References:
M. Courbariaux, Y. Bengio, and J.-P. David. "BinaryConnect:
Training Deep Neural Networks with binary weights during propagations."
Advances in Neural Information Processing Systems. 2015.
.. note::
1) if you would like to share weights between some layers, please
make sure to share the standard, floating value weights (`weight`)
and not the binarized weights (`binary_weight`)
2) The weights and the binary weights become synced only after :func:`~nnabla._variable.Variable.forward` is called,
and not after a call to :func:`~nnabla._variable.Variable.backward`.
To access the parameters of the network, remember to call :func:`~nnabla._variable.Variable.forward` once before doing so, otherwise the
float weights and the binary weights will not be in sync.
3) Quantized values are stored as floating point number for `binary_weight`,
since this function is only for simulation purposes.
Args:
inp (~nnabla.Variable): N-D array.
outmaps (int): Number of convolution kernels (which is equal to the number of output channels). For example, to apply convolution on an input with 16 types of filters, specify 16.
kernel (:obj:`tuple` of :obj:`int`): Convolution kernel size. For example, to apply convolution on an image with a 3 (height) by 5 (width) two-dimensional kernel, specify (3,5).
pad (:obj:`tuple` of :obj:`int`): Padding sizes for dimensions.
stride (:obj:`tuple` of :obj:`int`): Stride sizes for dimensions.
dilation (:obj:`tuple` of :obj:`int`): Dilation sizes for dimensions.
group (int): Number of groups of channels. This makes connections across channels sparser by grouping connections along map direction.
quantize_zero_to (float): Input value at zero is quantized to this value.
w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
wb_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for binary weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`.
base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions.
fix_parameters (bool): When set to `True`, the weights and biases will not be updated.
rng (numpy.random.RandomState): Random generator for Initializer.
with_bias (bool): Specify whether to include the bias term.
Returns:
:class:`~nnabla.Variable`
"""
if w_init is None:
w_init = UniformInitializer(calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng) # depends on [control=['if'], data=['w_init']]
if wb_init is None:
wb_init = UniformInitializer(calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng) # depends on [control=['if'], data=['wb_init']]
if b_init is None:
b_init = ConstantInitializer() # depends on [control=['if'], data=['b_init']]
w = get_parameter_or_create('W', (outmaps, inp.shape[base_axis]) + tuple(kernel), w_init, True, not fix_parameters)
wb = get_parameter_or_create('Wb', (outmaps, inp.shape[base_axis]) + tuple(kernel), wb_init, False)
b = None
if with_bias:
b = get_parameter_or_create('b', (outmaps,), b_init, True, not fix_parameters) # depends on [control=['if'], data=[]]
return F.binary_connect_convolution(inp, w, wb, b, base_axis, pad, stride, dilation, group, quantize_zero_to) |
def read_private_key_file(pkey_file,
pkey_password=None,
key_type=None,
logger=None):
"""
Get SSH Public key from a private key file, given an optional password
Arguments:
pkey_file (str):
File containing a private key (RSA, DSS or ECDSA)
Keyword Arguments:
pkey_password (Optional[str]):
Password to decrypt the private key
logger (Optional[logging.Logger])
Return:
paramiko.Pkey
"""
ssh_pkey = None
for pkey_class in (key_type,) if key_type else (
paramiko.RSAKey,
paramiko.DSSKey,
paramiko.ECDSAKey,
paramiko.Ed25519Key
):
try:
ssh_pkey = pkey_class.from_private_key_file(
pkey_file,
password=pkey_password
)
if logger:
logger.debug('Private key file ({0}, {1}) successfully '
'loaded'.format(pkey_file, pkey_class))
break
except paramiko.PasswordRequiredException:
if logger:
logger.error('Password is required for key {0}'
.format(pkey_file))
break
except paramiko.SSHException:
if logger:
logger.debug('Private key file ({0}) could not be loaded '
'as type {1} or bad password'
.format(pkey_file, pkey_class))
return ssh_pkey | def function[read_private_key_file, parameter[pkey_file, pkey_password, key_type, logger]]:
constant[
Get SSH Public key from a private key file, given an optional password
Arguments:
pkey_file (str):
File containing a private key (RSA, DSS or ECDSA)
Keyword Arguments:
pkey_password (Optional[str]):
Password to decrypt the private key
logger (Optional[logging.Logger])
Return:
paramiko.Pkey
]
variable[ssh_pkey] assign[=] constant[None]
for taget[name[pkey_class]] in starred[<ast.IfExp object at 0x7da1b13922c0>] begin[:]
<ast.Try object at 0x7da1b1393220>
return[name[ssh_pkey]] | keyword[def] identifier[read_private_key_file] ( identifier[pkey_file] ,
identifier[pkey_password] = keyword[None] ,
identifier[key_type] = keyword[None] ,
identifier[logger] = keyword[None] ):
literal[string]
identifier[ssh_pkey] = keyword[None]
keyword[for] identifier[pkey_class] keyword[in] ( identifier[key_type] ,) keyword[if] identifier[key_type] keyword[else] (
identifier[paramiko] . identifier[RSAKey] ,
identifier[paramiko] . identifier[DSSKey] ,
identifier[paramiko] . identifier[ECDSAKey] ,
identifier[paramiko] . identifier[Ed25519Key]
):
keyword[try] :
identifier[ssh_pkey] = identifier[pkey_class] . identifier[from_private_key_file] (
identifier[pkey_file] ,
identifier[password] = identifier[pkey_password]
)
keyword[if] identifier[logger] :
identifier[logger] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[pkey_file] , identifier[pkey_class] ))
keyword[break]
keyword[except] identifier[paramiko] . identifier[PasswordRequiredException] :
keyword[if] identifier[logger] :
identifier[logger] . identifier[error] ( literal[string]
. identifier[format] ( identifier[pkey_file] ))
keyword[break]
keyword[except] identifier[paramiko] . identifier[SSHException] :
keyword[if] identifier[logger] :
identifier[logger] . identifier[debug] ( literal[string]
literal[string]
. identifier[format] ( identifier[pkey_file] , identifier[pkey_class] ))
keyword[return] identifier[ssh_pkey] | def read_private_key_file(pkey_file, pkey_password=None, key_type=None, logger=None):
"""
Get SSH Public key from a private key file, given an optional password
Arguments:
pkey_file (str):
File containing a private key (RSA, DSS or ECDSA)
Keyword Arguments:
pkey_password (Optional[str]):
Password to decrypt the private key
logger (Optional[logging.Logger])
Return:
paramiko.Pkey
"""
ssh_pkey = None
for pkey_class in (key_type,) if key_type else (paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey, paramiko.Ed25519Key):
try:
ssh_pkey = pkey_class.from_private_key_file(pkey_file, password=pkey_password)
if logger:
logger.debug('Private key file ({0}, {1}) successfully loaded'.format(pkey_file, pkey_class)) # depends on [control=['if'], data=[]]
break # depends on [control=['try'], data=[]]
except paramiko.PasswordRequiredException:
if logger:
logger.error('Password is required for key {0}'.format(pkey_file)) # depends on [control=['if'], data=[]]
break # depends on [control=['except'], data=[]]
except paramiko.SSHException:
if logger:
logger.debug('Private key file ({0}) could not be loaded as type {1} or bad password'.format(pkey_file, pkey_class)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['pkey_class']]
return ssh_pkey |
def _refresh_show(self, dt):
""" While the toast.show is true, keep calling .show() until the
duration `dt` expires.
Parameters
------------
dt: int
Time left to keep showing
"""
d = self.declaration
if dt <= 0:
#: Done, hide
d.show = False
elif d.show:
#: If user didn't cancel it, keep it alive
self.toast.show()
t = min(1000, dt)
app = self.get_context()
app.timed_call(t, self._refresh_show, dt-t) | def function[_refresh_show, parameter[self, dt]]:
constant[ While the toast.show is true, keep calling .show() until the
duration `dt` expires.
Parameters
------------
dt: int
Time left to keep showing
]
variable[d] assign[=] name[self].declaration
if compare[name[dt] less_or_equal[<=] constant[0]] begin[:]
name[d].show assign[=] constant[False] | keyword[def] identifier[_refresh_show] ( identifier[self] , identifier[dt] ):
literal[string]
identifier[d] = identifier[self] . identifier[declaration]
keyword[if] identifier[dt] <= literal[int] :
identifier[d] . identifier[show] = keyword[False]
keyword[elif] identifier[d] . identifier[show] :
identifier[self] . identifier[toast] . identifier[show] ()
identifier[t] = identifier[min] ( literal[int] , identifier[dt] )
identifier[app] = identifier[self] . identifier[get_context] ()
identifier[app] . identifier[timed_call] ( identifier[t] , identifier[self] . identifier[_refresh_show] , identifier[dt] - identifier[t] ) | def _refresh_show(self, dt):
""" While the toast.show is true, keep calling .show() until the
duration `dt` expires.
Parameters
------------
dt: int
Time left to keep showing
"""
d = self.declaration
if dt <= 0:
#: Done, hide
d.show = False # depends on [control=['if'], data=[]]
elif d.show:
#: If user didn't cancel it, keep it alive
self.toast.show()
t = min(1000, dt)
app = self.get_context()
app.timed_call(t, self._refresh_show, dt - t) # depends on [control=['if'], data=[]] |
def _report_evaluation(self):
"""make the global evaluation report"""
# check with at least check 1 statements (usually 0 when there is a
# syntax error preventing pylint from further processing)
previous_stats = config.load_results(self.file_state.base_name)
if self.stats["statement"] == 0:
return
# get a global note for the code
evaluation = self.config.evaluation
try:
note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
except Exception as ex:
msg = "An exception occurred while rating: %s" % ex
else:
self.stats["global_note"] = note
msg = "Your code has been rated at %.2f/10" % note
pnote = previous_stats.get("global_note")
if pnote is not None:
msg += " (previous run: %.2f/10, %+.2f)" % (pnote, note - pnote)
if self.config.score:
sect = report_nodes.EvaluationSection(msg)
self.reporter.display_reports(sect) | def function[_report_evaluation, parameter[self]]:
constant[make the global evaluation report]
variable[previous_stats] assign[=] call[name[config].load_results, parameter[name[self].file_state.base_name]]
if compare[call[name[self].stats][constant[statement]] equal[==] constant[0]] begin[:]
return[None]
variable[evaluation] assign[=] name[self].config.evaluation
<ast.Try object at 0x7da1b02f1000>
if name[self].config.score begin[:]
variable[sect] assign[=] call[name[report_nodes].EvaluationSection, parameter[name[msg]]]
call[name[self].reporter.display_reports, parameter[name[sect]]] | keyword[def] identifier[_report_evaluation] ( identifier[self] ):
literal[string]
identifier[previous_stats] = identifier[config] . identifier[load_results] ( identifier[self] . identifier[file_state] . identifier[base_name] )
keyword[if] identifier[self] . identifier[stats] [ literal[string] ]== literal[int] :
keyword[return]
identifier[evaluation] = identifier[self] . identifier[config] . identifier[evaluation]
keyword[try] :
identifier[note] = identifier[eval] ( identifier[evaluation] ,{}, identifier[self] . identifier[stats] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[msg] = literal[string] % identifier[ex]
keyword[else] :
identifier[self] . identifier[stats] [ literal[string] ]= identifier[note]
identifier[msg] = literal[string] % identifier[note]
identifier[pnote] = identifier[previous_stats] . identifier[get] ( literal[string] )
keyword[if] identifier[pnote] keyword[is] keyword[not] keyword[None] :
identifier[msg] += literal[string] %( identifier[pnote] , identifier[note] - identifier[pnote] )
keyword[if] identifier[self] . identifier[config] . identifier[score] :
identifier[sect] = identifier[report_nodes] . identifier[EvaluationSection] ( identifier[msg] )
identifier[self] . identifier[reporter] . identifier[display_reports] ( identifier[sect] ) | def _report_evaluation(self):
"""make the global evaluation report"""
# check with at least check 1 statements (usually 0 when there is a
# syntax error preventing pylint from further processing)
previous_stats = config.load_results(self.file_state.base_name)
if self.stats['statement'] == 0:
return # depends on [control=['if'], data=[]]
# get a global note for the code
evaluation = self.config.evaluation
try:
note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used # depends on [control=['try'], data=[]]
except Exception as ex:
msg = 'An exception occurred while rating: %s' % ex # depends on [control=['except'], data=['ex']]
else:
self.stats['global_note'] = note
msg = 'Your code has been rated at %.2f/10' % note
pnote = previous_stats.get('global_note')
if pnote is not None:
msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote) # depends on [control=['if'], data=['pnote']]
if self.config.score:
sect = report_nodes.EvaluationSection(msg)
self.reporter.display_reports(sect) # depends on [control=['if'], data=[]] |
def kernel_modules(attrs=None, where=None):
'''
Return kernel_modules information from osquery
CLI Example:
.. code-block:: bash
salt '*' osquery.kernel_modules
'''
if __grains__['os_family'] in ['RedHat', 'Debian']:
return _osquery_cmd(table='kernel_modules', attrs=attrs, where=where)
return {'result': False, 'comment': 'Only available on Red Hat or Debian based systems.'} | def function[kernel_modules, parameter[attrs, where]]:
constant[
Return kernel_modules information from osquery
CLI Example:
.. code-block:: bash
salt '*' osquery.kernel_modules
]
if compare[call[name[__grains__]][constant[os_family]] in list[[<ast.Constant object at 0x7da1b2008790>, <ast.Constant object at 0x7da1b2008760>]]] begin[:]
return[call[name[_osquery_cmd], parameter[]]]
return[dictionary[[<ast.Constant object at 0x7da1b20092d0>, <ast.Constant object at 0x7da1b20093c0>], [<ast.Constant object at 0x7da1b2008820>, <ast.Constant object at 0x7da1b20089a0>]]] | keyword[def] identifier[kernel_modules] ( identifier[attrs] = keyword[None] , identifier[where] = keyword[None] ):
literal[string]
keyword[if] identifier[__grains__] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]:
keyword[return] identifier[_osquery_cmd] ( identifier[table] = literal[string] , identifier[attrs] = identifier[attrs] , identifier[where] = identifier[where] )
keyword[return] { literal[string] : keyword[False] , literal[string] : literal[string] } | def kernel_modules(attrs=None, where=None):
"""
Return kernel_modules information from osquery
CLI Example:
.. code-block:: bash
salt '*' osquery.kernel_modules
"""
if __grains__['os_family'] in ['RedHat', 'Debian']:
return _osquery_cmd(table='kernel_modules', attrs=attrs, where=where) # depends on [control=['if'], data=[]]
return {'result': False, 'comment': 'Only available on Red Hat or Debian based systems.'} |
def get_synset_1000(self):
"""
Returns:
dict: {cls_number: synset_id}
"""
fname = os.path.join(self.dir, 'synsets.txt')
assert os.path.isfile(fname)
lines = [x.strip() for x in open(fname).readlines()]
return dict(enumerate(lines)) | def function[get_synset_1000, parameter[self]]:
constant[
Returns:
dict: {cls_number: synset_id}
]
variable[fname] assign[=] call[name[os].path.join, parameter[name[self].dir, constant[synsets.txt]]]
assert[call[name[os].path.isfile, parameter[name[fname]]]]
variable[lines] assign[=] <ast.ListComp object at 0x7da18f09fdf0>
return[call[name[dict], parameter[call[name[enumerate], parameter[name[lines]]]]]] | keyword[def] identifier[get_synset_1000] ( identifier[self] ):
literal[string]
identifier[fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[dir] , literal[string] )
keyword[assert] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fname] )
identifier[lines] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[open] ( identifier[fname] ). identifier[readlines] ()]
keyword[return] identifier[dict] ( identifier[enumerate] ( identifier[lines] )) | def get_synset_1000(self):
"""
Returns:
dict: {cls_number: synset_id}
"""
fname = os.path.join(self.dir, 'synsets.txt')
assert os.path.isfile(fname)
lines = [x.strip() for x in open(fname).readlines()]
return dict(enumerate(lines)) |
def expand_short_options(self, argv):
"""Convert grouped short options like `-abc` to `-a, -b, -c`.
This is necessary because we set ``allow_abbrev=False`` on the
``ArgumentParser`` in :prop:`self.arg_parser`. The argparse docs
say ``allow_abbrev`` applies only to long options, but it also
affects whether short options grouped behind a single dash will
be parsed into multiple short options.
"""
new_argv = []
for arg in argv:
result = self.parse_multi_short_option(arg)
new_argv.extend(result)
return new_argv | def function[expand_short_options, parameter[self, argv]]:
constant[Convert grouped short options like `-abc` to `-a, -b, -c`.
This is necessary because we set ``allow_abbrev=False`` on the
``ArgumentParser`` in :prop:`self.arg_parser`. The argparse docs
say ``allow_abbrev`` applies only to long options, but it also
affects whether short options grouped behind a single dash will
be parsed into multiple short options.
]
variable[new_argv] assign[=] list[[]]
for taget[name[arg]] in starred[name[argv]] begin[:]
variable[result] assign[=] call[name[self].parse_multi_short_option, parameter[name[arg]]]
call[name[new_argv].extend, parameter[name[result]]]
return[name[new_argv]] | keyword[def] identifier[expand_short_options] ( identifier[self] , identifier[argv] ):
literal[string]
identifier[new_argv] =[]
keyword[for] identifier[arg] keyword[in] identifier[argv] :
identifier[result] = identifier[self] . identifier[parse_multi_short_option] ( identifier[arg] )
identifier[new_argv] . identifier[extend] ( identifier[result] )
keyword[return] identifier[new_argv] | def expand_short_options(self, argv):
"""Convert grouped short options like `-abc` to `-a, -b, -c`.
This is necessary because we set ``allow_abbrev=False`` on the
``ArgumentParser`` in :prop:`self.arg_parser`. The argparse docs
say ``allow_abbrev`` applies only to long options, but it also
affects whether short options grouped behind a single dash will
be parsed into multiple short options.
"""
new_argv = []
for arg in argv:
result = self.parse_multi_short_option(arg)
new_argv.extend(result) # depends on [control=['for'], data=['arg']]
return new_argv |
def connect(self, interactive=False):
"""
Open SSH connection to droplet
Parameters
----------
interactive: bool, default False
If True then SSH client will prompt for password when necessary
and also print output to console
"""
from poseidon.ssh import SSHClient
rs = SSHClient(self.ip_address, interactive=interactive)
return rs | def function[connect, parameter[self, interactive]]:
constant[
Open SSH connection to droplet
Parameters
----------
interactive: bool, default False
If True then SSH client will prompt for password when necessary
and also print output to console
]
from relative_module[poseidon.ssh] import module[SSHClient]
variable[rs] assign[=] call[name[SSHClient], parameter[name[self].ip_address]]
return[name[rs]] | keyword[def] identifier[connect] ( identifier[self] , identifier[interactive] = keyword[False] ):
literal[string]
keyword[from] identifier[poseidon] . identifier[ssh] keyword[import] identifier[SSHClient]
identifier[rs] = identifier[SSHClient] ( identifier[self] . identifier[ip_address] , identifier[interactive] = identifier[interactive] )
keyword[return] identifier[rs] | def connect(self, interactive=False):
"""
Open SSH connection to droplet
Parameters
----------
interactive: bool, default False
If True then SSH client will prompt for password when necessary
and also print output to console
"""
from poseidon.ssh import SSHClient
rs = SSHClient(self.ip_address, interactive=interactive)
return rs |
def write_atom(dest, entries, author, title, address, updated=None, link=None,
language="en"):
"""
Write an atom feed to a file.
Parameters
----------
dest : str
Destination file path, or a file-like object
entries : list of FeedEntry
Feed entries.
author : str
Author of the feed.
title : str
Title for the feed.
address : str
Address (domain name or email) to be used in building unique IDs.
updated : datetime, optional
Time stamp for the feed. If not given, take from the newest entry.
link : str, optional
Link for the feed.
language : str, optional
Language of the feed. Default is 'en'.
"""
if updated is None:
if entries:
updated = max(entry.updated for entry in entries)
else:
updated = datetime.datetime.utcnow()
root = etree.Element(ATOM_NS + 'feed')
# id (obligatory)
el = etree.Element(ATOM_NS + 'id')
el.text = _get_id(address, None, ["feed", author, title])
root.append(el)
# author (obligatory)
el = etree.Element(ATOM_NS + 'author')
el2 = etree.Element(ATOM_NS + 'name')
el2.text = author
el.append(el2)
root.append(el)
# title (obligatory)
el = etree.Element(ATOM_NS + 'title')
el.attrib[XML_NS + 'lang'] = language
el.text = title
root.append(el)
# updated (obligatory)
el = etree.Element(ATOM_NS + 'updated')
el.text = updated.strftime('%Y-%m-%dT%H:%M:%SZ')
root.append(el)
# link
if link is not None:
el = etree.Element(ATOM_NS + 'link')
el.attrib[ATOM_NS + 'href'] = link
root.append(el)
# entries
for entry in entries:
root.append(entry.get_atom(address, language))
tree = etree.ElementTree(root)
def write(f):
if sys.version_info[:2] < (2, 7):
_etree_py26_write(f, tree)
else:
tree.write(f, xml_declaration=True, default_namespace=ATOM_NS[1:-1],
encoding=str('utf-8'))
if hasattr(dest, 'write'):
write(dest)
else:
with util.long_path_open(dest, 'wb') as f:
write(f) | def function[write_atom, parameter[dest, entries, author, title, address, updated, link, language]]:
constant[
Write an atom feed to a file.
Parameters
----------
dest : str
Destination file path, or a file-like object
entries : list of FeedEntry
Feed entries.
author : str
Author of the feed.
title : str
Title for the feed.
address : str
Address (domain name or email) to be used in building unique IDs.
updated : datetime, optional
Time stamp for the feed. If not given, take from the newest entry.
link : str, optional
Link for the feed.
language : str, optional
Language of the feed. Default is 'en'.
]
if compare[name[updated] is constant[None]] begin[:]
if name[entries] begin[:]
variable[updated] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da18eb57b80>]]
variable[root] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[feed]]]]
variable[el] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[id]]]]
name[el].text assign[=] call[name[_get_id], parameter[name[address], constant[None], list[[<ast.Constant object at 0x7da20e961630>, <ast.Name object at 0x7da20e961f60>, <ast.Name object at 0x7da20e961390>]]]]
call[name[root].append, parameter[name[el]]]
variable[el] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[author]]]]
variable[el2] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[name]]]]
name[el2].text assign[=] name[author]
call[name[el].append, parameter[name[el2]]]
call[name[root].append, parameter[name[el]]]
variable[el] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[title]]]]
call[name[el].attrib][binary_operation[name[XML_NS] + constant[lang]]] assign[=] name[language]
name[el].text assign[=] name[title]
call[name[root].append, parameter[name[el]]]
variable[el] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[updated]]]]
name[el].text assign[=] call[name[updated].strftime, parameter[constant[%Y-%m-%dT%H:%M:%SZ]]]
call[name[root].append, parameter[name[el]]]
if compare[name[link] is_not constant[None]] begin[:]
variable[el] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM_NS] + constant[link]]]]
call[name[el].attrib][binary_operation[name[ATOM_NS] + constant[href]]] assign[=] name[link]
call[name[root].append, parameter[name[el]]]
for taget[name[entry]] in starred[name[entries]] begin[:]
call[name[root].append, parameter[call[name[entry].get_atom, parameter[name[address], name[language]]]]]
variable[tree] assign[=] call[name[etree].ElementTree, parameter[name[root]]]
def function[write, parameter[f]]:
if compare[call[name[sys].version_info][<ast.Slice object at 0x7da204567910>] less[<] tuple[[<ast.Constant object at 0x7da204564f40>, <ast.Constant object at 0x7da204566590>]]] begin[:]
call[name[_etree_py26_write], parameter[name[f], name[tree]]]
if call[name[hasattr], parameter[name[dest], constant[write]]] begin[:]
call[name[write], parameter[name[dest]]] | keyword[def] identifier[write_atom] ( identifier[dest] , identifier[entries] , identifier[author] , identifier[title] , identifier[address] , identifier[updated] = keyword[None] , identifier[link] = keyword[None] ,
identifier[language] = literal[string] ):
literal[string]
keyword[if] identifier[updated] keyword[is] keyword[None] :
keyword[if] identifier[entries] :
identifier[updated] = identifier[max] ( identifier[entry] . identifier[updated] keyword[for] identifier[entry] keyword[in] identifier[entries] )
keyword[else] :
identifier[updated] = identifier[datetime] . identifier[datetime] . identifier[utcnow] ()
identifier[root] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el] . identifier[text] = identifier[_get_id] ( identifier[address] , keyword[None] ,[ literal[string] , identifier[author] , identifier[title] ])
identifier[root] . identifier[append] ( identifier[el] )
identifier[el] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el2] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el2] . identifier[text] = identifier[author]
identifier[el] . identifier[append] ( identifier[el2] )
identifier[root] . identifier[append] ( identifier[el] )
identifier[el] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el] . identifier[attrib] [ identifier[XML_NS] + literal[string] ]= identifier[language]
identifier[el] . identifier[text] = identifier[title]
identifier[root] . identifier[append] ( identifier[el] )
identifier[el] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el] . identifier[text] = identifier[updated] . identifier[strftime] ( literal[string] )
identifier[root] . identifier[append] ( identifier[el] )
keyword[if] identifier[link] keyword[is] keyword[not] keyword[None] :
identifier[el] = identifier[etree] . identifier[Element] ( identifier[ATOM_NS] + literal[string] )
identifier[el] . identifier[attrib] [ identifier[ATOM_NS] + literal[string] ]= identifier[link]
identifier[root] . identifier[append] ( identifier[el] )
keyword[for] identifier[entry] keyword[in] identifier[entries] :
identifier[root] . identifier[append] ( identifier[entry] . identifier[get_atom] ( identifier[address] , identifier[language] ))
identifier[tree] = identifier[etree] . identifier[ElementTree] ( identifier[root] )
keyword[def] identifier[write] ( identifier[f] ):
keyword[if] identifier[sys] . identifier[version_info] [: literal[int] ]<( literal[int] , literal[int] ):
identifier[_etree_py26_write] ( identifier[f] , identifier[tree] )
keyword[else] :
identifier[tree] . identifier[write] ( identifier[f] , identifier[xml_declaration] = keyword[True] , identifier[default_namespace] = identifier[ATOM_NS] [ literal[int] :- literal[int] ],
identifier[encoding] = identifier[str] ( literal[string] ))
keyword[if] identifier[hasattr] ( identifier[dest] , literal[string] ):
identifier[write] ( identifier[dest] )
keyword[else] :
keyword[with] identifier[util] . identifier[long_path_open] ( identifier[dest] , literal[string] ) keyword[as] identifier[f] :
identifier[write] ( identifier[f] ) | def write_atom(dest, entries, author, title, address, updated=None, link=None, language='en'):
"""
Write an atom feed to a file.
Parameters
----------
dest : str
Destination file path, or a file-like object
entries : list of FeedEntry
Feed entries.
author : str
Author of the feed.
title : str
Title for the feed.
address : str
Address (domain name or email) to be used in building unique IDs.
updated : datetime, optional
Time stamp for the feed. If not given, take from the newest entry.
link : str, optional
Link for the feed.
language : str, optional
Language of the feed. Default is 'en'.
"""
if updated is None:
if entries:
updated = max((entry.updated for entry in entries)) # depends on [control=['if'], data=[]]
else:
updated = datetime.datetime.utcnow() # depends on [control=['if'], data=['updated']]
root = etree.Element(ATOM_NS + 'feed')
# id (obligatory)
el = etree.Element(ATOM_NS + 'id')
el.text = _get_id(address, None, ['feed', author, title])
root.append(el)
# author (obligatory)
el = etree.Element(ATOM_NS + 'author')
el2 = etree.Element(ATOM_NS + 'name')
el2.text = author
el.append(el2)
root.append(el)
# title (obligatory)
el = etree.Element(ATOM_NS + 'title')
el.attrib[XML_NS + 'lang'] = language
el.text = title
root.append(el)
# updated (obligatory)
el = etree.Element(ATOM_NS + 'updated')
el.text = updated.strftime('%Y-%m-%dT%H:%M:%SZ')
root.append(el)
# link
if link is not None:
el = etree.Element(ATOM_NS + 'link')
el.attrib[ATOM_NS + 'href'] = link
root.append(el) # depends on [control=['if'], data=['link']]
# entries
for entry in entries:
root.append(entry.get_atom(address, language)) # depends on [control=['for'], data=['entry']]
tree = etree.ElementTree(root)
def write(f):
if sys.version_info[:2] < (2, 7):
_etree_py26_write(f, tree) # depends on [control=['if'], data=[]]
else:
tree.write(f, xml_declaration=True, default_namespace=ATOM_NS[1:-1], encoding=str('utf-8'))
if hasattr(dest, 'write'):
write(dest) # depends on [control=['if'], data=[]]
else:
with util.long_path_open(dest, 'wb') as f:
write(f) # depends on [control=['with'], data=['f']] |
def moderators(self, limit=None):
"""GETs moderators for this subreddit. Calls :meth:`narwal.Reddit.moderators`.
:param limit: max number of items to return
"""
return self._reddit.moderators(self.display_name, limit=limit) | def function[moderators, parameter[self, limit]]:
constant[GETs moderators for this subreddit. Calls :meth:`narwal.Reddit.moderators`.
:param limit: max number of items to return
]
return[call[name[self]._reddit.moderators, parameter[name[self].display_name]]] | keyword[def] identifier[moderators] ( identifier[self] , identifier[limit] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_reddit] . identifier[moderators] ( identifier[self] . identifier[display_name] , identifier[limit] = identifier[limit] ) | def moderators(self, limit=None):
"""GETs moderators for this subreddit. Calls :meth:`narwal.Reddit.moderators`.
:param limit: max number of items to return
"""
return self._reddit.moderators(self.display_name, limit=limit) |
def empty_like(self, shape):
"""
Make an empty LabelArray with the same categories as ``self``, filled
with ``self.missing_value``.
"""
return type(self).from_codes_and_metadata(
codes=np.full(
shape,
self.reverse_categories[self.missing_value],
dtype=unsigned_int_dtype_with_size_in_bytes(self.itemsize),
),
categories=self.categories,
reverse_categories=self.reverse_categories,
missing_value=self.missing_value,
) | def function[empty_like, parameter[self, shape]]:
constant[
Make an empty LabelArray with the same categories as ``self``, filled
with ``self.missing_value``.
]
return[call[call[name[type], parameter[name[self]]].from_codes_and_metadata, parameter[]]] | keyword[def] identifier[empty_like] ( identifier[self] , identifier[shape] ):
literal[string]
keyword[return] identifier[type] ( identifier[self] ). identifier[from_codes_and_metadata] (
identifier[codes] = identifier[np] . identifier[full] (
identifier[shape] ,
identifier[self] . identifier[reverse_categories] [ identifier[self] . identifier[missing_value] ],
identifier[dtype] = identifier[unsigned_int_dtype_with_size_in_bytes] ( identifier[self] . identifier[itemsize] ),
),
identifier[categories] = identifier[self] . identifier[categories] ,
identifier[reverse_categories] = identifier[self] . identifier[reverse_categories] ,
identifier[missing_value] = identifier[self] . identifier[missing_value] ,
) | def empty_like(self, shape):
"""
Make an empty LabelArray with the same categories as ``self``, filled
with ``self.missing_value``.
"""
return type(self).from_codes_and_metadata(codes=np.full(shape, self.reverse_categories[self.missing_value], dtype=unsigned_int_dtype_with_size_in_bytes(self.itemsize)), categories=self.categories, reverse_categories=self.reverse_categories, missing_value=self.missing_value) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.